blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
616
content_id
stringlengths
40
40
detected_licenses
sequencelengths
0
112
license_type
stringclasses
2 values
repo_name
stringlengths
5
115
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
777 values
visit_date
timestamp[us]date
2015-08-06 10:31:46
2023-09-06 10:44:38
revision_date
timestamp[us]date
1970-01-01 02:38:32
2037-05-03 13:00:00
committer_date
timestamp[us]date
1970-01-01 02:38:32
2023-09-06 01:08:06
github_id
int64
4.92k
681M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
22 values
gha_event_created_at
timestamp[us]date
2012-06-04 01:52:49
2023-09-14 21:59:50
gha_created_at
timestamp[us]date
2008-05-22 07:58:19
2023-08-21 12:35:19
gha_language
stringclasses
149 values
src_encoding
stringclasses
26 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
3
10.2M
extension
stringclasses
188 values
content
stringlengths
3
10.2M
authors
sequencelengths
1
1
author_id
stringlengths
1
132
e28006528c866157b5832c15de8f00c12995b330
890c8b8e90e516a5a3880eca9b2d217662fe7d84
/armulator/armv6/opcodes/abstract_opcodes/ldr_register_thumb.py
51fbe8abfb33e0f75de30bf0387a9fba04663e45
[ "MIT" ]
permissive
doronz88/armulator
b864135996f876c7857b79a314d4aa06cc19c549
0294feac2785c8947e5943ac0c34f941ee4b5fff
refs/heads/master
2022-11-05T08:14:42.405335
2020-06-18T23:53:17
2020-06-18T23:53:17
273,363,061
2
0
null
2020-06-18T23:51:03
2020-06-18T23:51:02
null
UTF-8
Python
false
false
1,698
py
from armulator.armv6.shift import shift from armulator.armv6.bits_ops import add from bitstring import BitArray from armulator.armv6.arm_exceptions import EndOfInstruction from armulator.armv6.opcodes.abstract_opcode import AbstractOpcode class LdrRegisterThumb(AbstractOpcode): def __init__(self, m, t, n, shift_t, shift_n): super(LdrRegisterThumb, self).__init__() self.m = m self.t = t self.n = n self.shift_t = shift_t self.shift_n = shift_n def execute(self, processor): if processor.condition_passed(): try: processor.null_check_if_thumbee(self.n) except EndOfInstruction: pass else: offset = shift(processor.registers.get(self.m), self.shift_t, self.shift_n, processor.registers.cpsr.get_c()) offset_addr = add(processor.registers.get(self.n), offset, 32) address = offset_addr data = processor.mem_u_get(address, 4) if self.t == 15: if address[30:32] == "0b00": processor.load_write_pc(address) else: print "unpredictable" elif processor.unaligned_support() or address[30:32] == "0b00": processor.registers.set(self.t, data) else: processor.registers.set(self.t, BitArray(length=32)) # unknown def instruction_syndrome(self): if self.t == 15: return BitArray(length=9) else: return BitArray(bin="11000") + BitArray(uint=self.t, length=4)
df4245bfa348671c4ff60bc41a2a9c17ff75d4f3
acb8e84e3b9c987fcab341f799f41d5a5ec4d587
/langs/0/bw0.py
d035dd859b4fa0474da462854623b73eb560a2cf
[]
no_license
G4te-Keep3r/HowdyHackers
46bfad63eafe5ac515da363e1c75fa6f4b9bca32
fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2
refs/heads/master
2020-08-01T12:08:10.782018
2016-11-13T20:45:50
2016-11-13T20:45:50
73,624,224
0
1
null
null
null
null
UTF-8
Python
false
false
486
py
import sys def printFunction(lineRemaining): if lineRemaining[0] == '"' and lineRemaining[-1] == '"': if len(lineRemaining) > 2: #data to print lineRemaining = lineRemaining[1:-1] print ' '.join(lineRemaining) else: print def main(fileName): with open(fileName) as f: for line in f: data = line.split() if data[0] == 'bw0': printFunction(data[1:]) else: print 'ERROR' return if __name__ == '__main__': main(sys.argv[1])
30ecef27252bd418cd76a0e181cac9ff80ecba28
8eab8ab725c2132bb8d090cdb2d23a5f71945249
/virt/Lib/site-packages/numpy/core/tests/test_ufunc.py
852044d32fcc6d27012c0a43a2e814b976d7154c
[ "GPL-3.0-only", "BSD-3-Clause-Open-MPI", "GPL-3.0-or-later", "GCC-exception-3.1", "BSD-3-Clause", "MIT" ]
permissive
JoaoSevergnini/metalpy
6c88a413a82bc25edd9308b8490a76fae8dd76ca
c2d0098a309b6ce8c756ff840bfb53fb291747b6
refs/heads/main
2023-04-18T17:25:26.474485
2022-09-18T20:44:45
2022-09-18T20:44:45
474,773,752
3
1
MIT
2022-11-03T20:07:50
2022-03-27T22:21:01
Python
UTF-8
Python
false
false
108,151
py
import warnings import itertools import sys import pytest import numpy as np import numpy.core._umath_tests as umt import numpy.linalg._umath_linalg as uml import numpy.core._operand_flag_tests as opflag_tests import numpy.core._rational_tests as _rational_tests from numpy.testing import ( assert_, assert_equal, assert_raises, assert_array_equal, assert_almost_equal, assert_array_almost_equal, assert_no_warnings, assert_allclose, HAS_REFCOUNT, suppress_warnings ) from numpy.testing._private.utils import requires_memory from numpy.compat import pickle UNARY_UFUNCS = [obj for obj in np.core.umath.__dict__.values() if isinstance(obj, np.ufunc)] UNARY_OBJECT_UFUNCS = [uf for uf in UNARY_UFUNCS if "O->O" in uf.types] class TestUfuncKwargs: def test_kwarg_exact(self): assert_raises(TypeError, np.add, 1, 2, castingx='safe') assert_raises(TypeError, np.add, 1, 2, dtypex=int) assert_raises(TypeError, np.add, 1, 2, extobjx=[4096]) assert_raises(TypeError, np.add, 1, 2, outx=None) assert_raises(TypeError, np.add, 1, 2, sigx='ii->i') assert_raises(TypeError, np.add, 1, 2, signaturex='ii->i') assert_raises(TypeError, np.add, 1, 2, subokx=False) assert_raises(TypeError, np.add, 1, 2, wherex=[True]) def test_sig_signature(self): assert_raises(TypeError, np.add, 1, 2, sig='ii->i', signature='ii->i') def test_sig_dtype(self): assert_raises(TypeError, np.add, 1, 2, sig='ii->i', dtype=int) assert_raises(TypeError, np.add, 1, 2, signature='ii->i', dtype=int) def test_extobj_refcount(self): # Should not segfault with USE_DEBUG. assert_raises(TypeError, np.add, 1, 2, extobj=[4096], parrot=True) class TestUfuncGenericLoops: """Test generic loops. The loops to be tested are: PyUFunc_ff_f_As_dd_d PyUFunc_ff_f PyUFunc_dd_d PyUFunc_gg_g PyUFunc_FF_F_As_DD_D PyUFunc_DD_D PyUFunc_FF_F PyUFunc_GG_G PyUFunc_OO_O PyUFunc_OO_O_method PyUFunc_f_f_As_d_d PyUFunc_d_d PyUFunc_f_f PyUFunc_g_g PyUFunc_F_F_As_D_D PyUFunc_F_F PyUFunc_D_D PyUFunc_G_G PyUFunc_O_O PyUFunc_O_O_method PyUFunc_On_Om Where: f -- float d -- double g -- long double F -- complex float D -- complex double G -- complex long double O -- python object It is difficult to assure that each of these loops is entered from the Python level as the special cased loops are a moving target and the corresponding types are architecture dependent. We probably need to define C level testing ufuncs to get at them. For the time being, I've just looked at the signatures registered in the build directory to find relevant functions. """ np_dtypes = [ (np.single, np.single), (np.single, np.double), (np.csingle, np.csingle), (np.csingle, np.cdouble), (np.double, np.double), (np.longdouble, np.longdouble), (np.cdouble, np.cdouble), (np.clongdouble, np.clongdouble)] @pytest.mark.parametrize('input_dtype,output_dtype', np_dtypes) def test_unary_PyUFunc(self, input_dtype, output_dtype, f=np.exp, x=0, y=1): xs = np.full(10, input_dtype(x), dtype=output_dtype) ys = f(xs)[::2] assert_allclose(ys, y) assert_equal(ys.dtype, output_dtype) def f2(x, y): return x**y @pytest.mark.parametrize('input_dtype,output_dtype', np_dtypes) def test_binary_PyUFunc(self, input_dtype, output_dtype, f=f2, x=0, y=1): xs = np.full(10, input_dtype(x), dtype=output_dtype) ys = f(xs, xs)[::2] assert_allclose(ys, y) assert_equal(ys.dtype, output_dtype) # class to use in testing object method loops class foo: def conjugate(self): return np.bool_(1) def logical_xor(self, obj): return np.bool_(1) def test_unary_PyUFunc_O_O(self): x = np.ones(10, dtype=object) assert_(np.all(np.abs(x) == 1)) def test_unary_PyUFunc_O_O_method_simple(self, foo=foo): x = np.full(10, foo(), dtype=object) assert_(np.all(np.conjugate(x) == True)) def test_binary_PyUFunc_OO_O(self): x = np.ones(10, dtype=object) assert_(np.all(np.add(x, x) == 2)) def test_binary_PyUFunc_OO_O_method(self, foo=foo): x = np.full(10, foo(), dtype=object) assert_(np.all(np.logical_xor(x, x))) def test_binary_PyUFunc_On_Om_method(self, foo=foo): x = np.full((10, 2, 3), foo(), dtype=object) assert_(np.all(np.logical_xor(x, x))) def test_python_complex_conjugate(self): # The conjugate ufunc should fall back to calling the method: arr = np.array([1+2j, 3-4j], dtype="O") assert isinstance(arr[0], complex) res = np.conjugate(arr) assert res.dtype == np.dtype("O") assert_array_equal(res, np.array([1-2j, 3+4j], dtype="O")) @pytest.mark.parametrize("ufunc", UNARY_OBJECT_UFUNCS) def test_unary_PyUFunc_O_O_method_full(self, ufunc): """Compare the result of the object loop with non-object one""" val = np.float64(np.pi/4) class MyFloat(np.float64): def __getattr__(self, attr): try: return super().__getattr__(attr) except AttributeError: return lambda: getattr(np.core.umath, attr)(val) # Use 0-D arrays, to ensure the same element call num_arr = np.array(val, dtype=np.float64) obj_arr = np.array(MyFloat(val), dtype="O") with np.errstate(all="raise"): try: res_num = ufunc(num_arr) except Exception as exc: with assert_raises(type(exc)): ufunc(obj_arr) else: res_obj = ufunc(obj_arr) assert_array_almost_equal(res_num.astype("O"), res_obj) def _pickleable_module_global(): pass class TestUfunc: def test_pickle(self): for proto in range(2, pickle.HIGHEST_PROTOCOL + 1): assert_(pickle.loads(pickle.dumps(np.sin, protocol=proto)) is np.sin) # Check that ufunc not defined in the top level numpy namespace # such as numpy.core._rational_tests.test_add can also be pickled res = pickle.loads(pickle.dumps(_rational_tests.test_add, protocol=proto)) assert_(res is _rational_tests.test_add) def test_pickle_withstring(self): astring = (b"cnumpy.core\n_ufunc_reconstruct\np0\n" b"(S'numpy.core.umath'\np1\nS'cos'\np2\ntp3\nRp4\n.") assert_(pickle.loads(astring) is np.cos) def test_pickle_name_is_qualname(self): # This tests that a simplification of our ufunc pickle code will # lead to allowing qualnames as names. Future ufuncs should # possible add a specific qualname, or a hook into pickling instead # (dask+numba may benefit). _pickleable_module_global.ufunc = umt._pickleable_module_global_ufunc obj = pickle.loads(pickle.dumps(_pickleable_module_global.ufunc)) assert obj is umt._pickleable_module_global_ufunc def test_reduceat_shifting_sum(self): L = 6 x = np.arange(L) idx = np.array(list(zip(np.arange(L - 2), np.arange(L - 2) + 2))).ravel() assert_array_equal(np.add.reduceat(x, idx)[::2], [1, 3, 5, 7]) def test_all_ufunc(self): """Try to check presence and results of all ufuncs. The list of ufuncs comes from generate_umath.py and is as follows: ===== ==== ============= =============== ======================== done args function types notes ===== ==== ============= =============== ======================== n 1 conjugate nums + O n 1 absolute nums + O complex -> real n 1 negative nums + O n 1 sign nums + O -> int n 1 invert bool + ints + O flts raise an error n 1 degrees real + M cmplx raise an error n 1 radians real + M cmplx raise an error n 1 arccos flts + M n 1 arccosh flts + M n 1 arcsin flts + M n 1 arcsinh flts + M n 1 arctan flts + M n 1 arctanh flts + M n 1 cos flts + M n 1 sin flts + M n 1 tan flts + M n 1 cosh flts + M n 1 sinh flts + M n 1 tanh flts + M n 1 exp flts + M n 1 expm1 flts + M n 1 log flts + M n 1 log10 flts + M n 1 log1p flts + M n 1 sqrt flts + M real x < 0 raises error n 1 ceil real + M n 1 trunc real + M n 1 floor real + M n 1 fabs real + M n 1 rint flts + M n 1 isnan flts -> bool n 1 isinf flts -> bool n 1 isfinite flts -> bool n 1 signbit real -> bool n 1 modf real -> (frac, int) n 1 logical_not bool + nums + M -> bool n 2 left_shift ints + O flts raise an error n 2 right_shift ints + O flts raise an error n 2 add bool + nums + O boolean + is || n 2 subtract bool + nums + O boolean - is ^ n 2 multiply bool + nums + O boolean * is & n 2 divide nums + O n 2 floor_divide nums + O n 2 true_divide nums + O bBhH -> f, iIlLqQ -> d n 2 fmod nums + M n 2 power nums + O n 2 greater bool + nums + O -> bool n 2 greater_equal bool + nums + O -> bool n 2 less bool + nums + O -> bool n 2 less_equal bool + nums + O -> bool n 2 equal bool + nums + O -> bool n 2 not_equal bool + nums + O -> bool n 2 logical_and bool + nums + M -> bool n 2 logical_or bool + nums + M -> bool n 2 logical_xor bool + nums + M -> bool n 2 maximum bool + nums + O n 2 minimum bool + nums + O n 2 bitwise_and bool + ints + O flts raise an error n 2 bitwise_or bool + ints + O flts raise an error n 2 bitwise_xor bool + ints + O flts raise an error n 2 arctan2 real + M n 2 remainder ints + real + O n 2 hypot real + M ===== ==== ============= =============== ======================== Types other than those listed will be accepted, but they are cast to the smallest compatible type for which the function is defined. The casting rules are: bool -> int8 -> float32 ints -> double """ pass # from include/numpy/ufuncobject.h size_inferred = 2 can_ignore = 4 def test_signature0(self): # the arguments to test_signature are: nin, nout, core_signature enabled, num_dims, ixs, flags, sizes = umt.test_signature( 2, 1, "(i),(i)->()") assert_equal(enabled, 1) assert_equal(num_dims, (1, 1, 0)) assert_equal(ixs, (0, 0)) assert_equal(flags, (self.size_inferred,)) assert_equal(sizes, (-1,)) def test_signature1(self): # empty core signature; treat as plain ufunc (with trivial core) enabled, num_dims, ixs, flags, sizes = umt.test_signature( 2, 1, "(),()->()") assert_equal(enabled, 0) assert_equal(num_dims, (0, 0, 0)) assert_equal(ixs, ()) assert_equal(flags, ()) assert_equal(sizes, ()) def test_signature2(self): # more complicated names for variables enabled, num_dims, ixs, flags, sizes = umt.test_signature( 2, 1, "(i1,i2),(J_1)->(_kAB)") assert_equal(enabled, 1) assert_equal(num_dims, (2, 1, 1)) assert_equal(ixs, (0, 1, 2, 3)) assert_equal(flags, (self.size_inferred,)*4) assert_equal(sizes, (-1, -1, -1, -1)) def test_signature3(self): enabled, num_dims, ixs, flags, sizes = umt.test_signature( 2, 1, u"(i1, i12), (J_1)->(i12, i2)") assert_equal(enabled, 1) assert_equal(num_dims, (2, 1, 2)) assert_equal(ixs, (0, 1, 2, 1, 3)) assert_equal(flags, (self.size_inferred,)*4) assert_equal(sizes, (-1, -1, -1, -1)) def test_signature4(self): # matrix_multiply signature from _umath_tests enabled, num_dims, ixs, flags, sizes = umt.test_signature( 2, 1, "(n,k),(k,m)->(n,m)") assert_equal(enabled, 1) assert_equal(num_dims, (2, 2, 2)) assert_equal(ixs, (0, 1, 1, 2, 0, 2)) assert_equal(flags, (self.size_inferred,)*3) assert_equal(sizes, (-1, -1, -1)) def test_signature5(self): # matmul signature from _umath_tests enabled, num_dims, ixs, flags, sizes = umt.test_signature( 2, 1, "(n?,k),(k,m?)->(n?,m?)") assert_equal(enabled, 1) assert_equal(num_dims, (2, 2, 2)) assert_equal(ixs, (0, 1, 1, 2, 0, 2)) assert_equal(flags, (self.size_inferred | self.can_ignore, self.size_inferred, self.size_inferred | self.can_ignore)) assert_equal(sizes, (-1, -1, -1)) def test_signature6(self): enabled, num_dims, ixs, flags, sizes = umt.test_signature( 1, 1, "(3)->()") assert_equal(enabled, 1) assert_equal(num_dims, (1, 0)) assert_equal(ixs, (0,)) assert_equal(flags, (0,)) assert_equal(sizes, (3,)) def test_signature7(self): enabled, num_dims, ixs, flags, sizes = umt.test_signature( 3, 1, "(3),(03,3),(n)->(9)") assert_equal(enabled, 1) assert_equal(num_dims, (1, 2, 1, 1)) assert_equal(ixs, (0, 0, 0, 1, 2)) assert_equal(flags, (0, self.size_inferred, 0)) assert_equal(sizes, (3, -1, 9)) def test_signature8(self): enabled, num_dims, ixs, flags, sizes = umt.test_signature( 3, 1, "(3?),(3?,3?),(n)->(9)") assert_equal(enabled, 1) assert_equal(num_dims, (1, 2, 1, 1)) assert_equal(ixs, (0, 0, 0, 1, 2)) assert_equal(flags, (self.can_ignore, self.size_inferred, 0)) assert_equal(sizes, (3, -1, 9)) def test_signature9(self): enabled, num_dims, ixs, flags, sizes = umt.test_signature( 1, 1, "( 3) -> ( )") assert_equal(enabled, 1) assert_equal(num_dims, (1, 0)) assert_equal(ixs, (0,)) assert_equal(flags, (0,)) assert_equal(sizes, (3,)) def test_signature10(self): enabled, num_dims, ixs, flags, sizes = umt.test_signature( 3, 1, "( 3? ) , (3? , 3?) ,(n )-> ( 9)") assert_equal(enabled, 1) assert_equal(num_dims, (1, 2, 1, 1)) assert_equal(ixs, (0, 0, 0, 1, 2)) assert_equal(flags, (self.can_ignore, self.size_inferred, 0)) assert_equal(sizes, (3, -1, 9)) def test_signature_failure_extra_parenthesis(self): with assert_raises(ValueError): umt.test_signature(2, 1, "((i)),(i)->()") def test_signature_failure_mismatching_parenthesis(self): with assert_raises(ValueError): umt.test_signature(2, 1, "(i),)i(->()") def test_signature_failure_signature_missing_input_arg(self): with assert_raises(ValueError): umt.test_signature(2, 1, "(i),->()") def test_signature_failure_signature_missing_output_arg(self): with assert_raises(ValueError): umt.test_signature(2, 2, "(i),(i)->()") def test_get_signature(self): assert_equal(umt.inner1d.signature, "(i),(i)->()") def test_forced_sig(self): a = 0.5*np.arange(3, dtype='f8') assert_equal(np.add(a, 0.5), [0.5, 1, 1.5]) with pytest.warns(DeprecationWarning): assert_equal(np.add(a, 0.5, sig='i', casting='unsafe'), [0, 0, 1]) assert_equal(np.add(a, 0.5, sig='ii->i', casting='unsafe'), [0, 0, 1]) with pytest.warns(DeprecationWarning): assert_equal(np.add(a, 0.5, sig=('i4',), casting='unsafe'), [0, 0, 1]) assert_equal(np.add(a, 0.5, sig=('i4', 'i4', 'i4'), casting='unsafe'), [0, 0, 1]) b = np.zeros((3,), dtype='f8') np.add(a, 0.5, out=b) assert_equal(b, [0.5, 1, 1.5]) b[:] = 0 with pytest.warns(DeprecationWarning): np.add(a, 0.5, sig='i', out=b, casting='unsafe') assert_equal(b, [0, 0, 1]) b[:] = 0 np.add(a, 0.5, sig='ii->i', out=b, casting='unsafe') assert_equal(b, [0, 0, 1]) b[:] = 0 with pytest.warns(DeprecationWarning): np.add(a, 0.5, sig=('i4',), out=b, casting='unsafe') assert_equal(b, [0, 0, 1]) b[:] = 0 np.add(a, 0.5, sig=('i4', 'i4', 'i4'), out=b, casting='unsafe') assert_equal(b, [0, 0, 1]) def test_signature_all_None(self): # signature all None, is an acceptable alternative (since 1.21) # to not providing a signature. res1 = np.add([3], [4], sig=(None, None, None)) res2 = np.add([3], [4]) assert_array_equal(res1, res2) res1 = np.maximum([3], [4], sig=(None, None, None)) res2 = np.maximum([3], [4]) assert_array_equal(res1, res2) with pytest.raises(TypeError): # special case, that would be deprecated anyway, so errors: np.add(3, 4, signature=(None,)) def test_signature_dtype_type(self): # Since that will be the normal behaviour (past NumPy 1.21) # we do support the types already: float_dtype = type(np.dtype(np.float64)) np.add(3, 4, signature=(float_dtype, float_dtype, None)) @pytest.mark.parametrize("casting", ["unsafe", "same_kind", "safe"]) def test_partial_signature_mismatch(self, casting): # If the second argument matches already, no need to specify it: res = np.ldexp(np.float32(1.), np.int_(2), dtype="d") assert res.dtype == "d" res = np.ldexp(np.float32(1.), np.int_(2), signature=(None, None, "d")) assert res.dtype == "d" # ldexp only has a loop for long input as second argument, overriding # the output cannot help with that (no matter the casting) with pytest.raises(TypeError): np.ldexp(1., np.uint64(3), dtype="d") with pytest.raises(TypeError): np.ldexp(1., np.uint64(3), signature=(None, None, "d")) def test_use_output_signature_for_all_arguments(self): # Test that providing only `dtype=` or `signature=(None, None, dtype)` # is sufficient if falling back to a homogeneous signature works. # In this case, the `intp, intp -> intp` loop is chosen. res = np.power(1.5, 2.8, dtype=np.intp, casting="unsafe") assert res == 1 # the cast happens first. res = np.power(1.5, 2.8, signature=(None, None, np.intp), casting="unsafe") assert res == 1 with pytest.raises(TypeError): # the unsafe casting would normally cause errors though: np.power(1.5, 2.8, dtype=np.intp) def test_signature_errors(self): with pytest.raises(TypeError, match="the signature object to ufunc must be a string or"): np.add(3, 4, signature=123.) # neither a string nor a tuple with pytest.raises(ValueError): # bad symbols that do not translate to dtypes np.add(3, 4, signature="%^->#") with pytest.raises(ValueError): np.add(3, 4, signature=b"ii-i") # incomplete and byte string with pytest.raises(ValueError): np.add(3, 4, signature="ii>i") # incomplete string with pytest.raises(ValueError): np.add(3, 4, signature=(None, "f8")) # bad length with pytest.raises(UnicodeDecodeError): np.add(3, 4, signature=b"\xff\xff->i") def test_forced_dtype_times(self): # Signatures only set the type numbers (not the actual loop dtypes) # so using `M` in a signature/dtype should generally work: a = np.array(['2010-01-02', '1999-03-14', '1833-03'], dtype='>M8[D]') np.maximum(a, a, dtype="M") np.maximum.reduce(a, dtype="M") arr = np.arange(10, dtype="m8[s]") np.add(arr, arr, dtype="m") np.maximum(arr, arr, dtype="m") @pytest.mark.parametrize("ufunc", [np.add, np.sqrt]) def test_cast_safety(self, ufunc): """Basic test for the safest casts, because ufuncs inner loops can indicate a cast-safety as well (which is normally always "no"). """ def call_ufunc(arr, **kwargs): return ufunc(*(arr,) * ufunc.nin, **kwargs) arr = np.array([1., 2., 3.], dtype=np.float32) arr_bs = arr.astype(arr.dtype.newbyteorder()) expected = call_ufunc(arr) # Normally, a "no" cast: res = call_ufunc(arr, casting="no") assert_array_equal(expected, res) # Byte-swapping is not allowed with "no" though: with pytest.raises(TypeError): call_ufunc(arr_bs, casting="no") # But is allowed with "equiv": res = call_ufunc(arr_bs, casting="equiv") assert_array_equal(expected, res) # Casting to float64 is safe, but not equiv: with pytest.raises(TypeError): call_ufunc(arr_bs, dtype=np.float64, casting="equiv") # but it is safe cast: res = call_ufunc(arr_bs, dtype=np.float64, casting="safe") expected = call_ufunc(arr.astype(np.float64)) # upcast assert_array_equal(expected, res) def test_true_divide(self): a = np.array(10) b = np.array(20) tgt = np.array(0.5) for tc in 'bhilqBHILQefdgFDG': dt = np.dtype(tc) aa = a.astype(dt) bb = b.astype(dt) # Check result value and dtype. for x, y in itertools.product([aa, -aa], [bb, -bb]): # Check with no output type specified if tc in 'FDG': tgt = complex(x)/complex(y) else: tgt = float(x)/float(y) res = np.true_divide(x, y) rtol = max(np.finfo(res).resolution, 1e-15) assert_allclose(res, tgt, rtol=rtol) if tc in 'bhilqBHILQ': assert_(res.dtype.name == 'float64') else: assert_(res.dtype.name == dt.name ) # Check with output type specified. This also checks for the # incorrect casts in issue gh-3484 because the unary '-' does # not change types, even for unsigned types, Hence casts in the # ufunc from signed to unsigned and vice versa will lead to # errors in the values. for tcout in 'bhilqBHILQ': dtout = np.dtype(tcout) assert_raises(TypeError, np.true_divide, x, y, dtype=dtout) for tcout in 'efdg': dtout = np.dtype(tcout) if tc in 'FDG': # Casting complex to float is not allowed assert_raises(TypeError, np.true_divide, x, y, dtype=dtout) else: tgt = float(x)/float(y) rtol = max(np.finfo(dtout).resolution, 1e-15) # The value of tiny for double double is NaN with suppress_warnings() as sup: sup.filter(UserWarning) if not np.isnan(np.finfo(dtout).tiny): atol = max(np.finfo(dtout).tiny, 3e-308) else: atol = 3e-308 # Some test values result in invalid for float16. with np.errstate(invalid='ignore'): res = np.true_divide(x, y, dtype=dtout) if not np.isfinite(res) and tcout == 'e': continue assert_allclose(res, tgt, rtol=rtol, atol=atol) assert_(res.dtype.name == dtout.name) for tcout in 'FDG': dtout = np.dtype(tcout) tgt = complex(x)/complex(y) rtol = max(np.finfo(dtout).resolution, 1e-15) # The value of tiny for double double is NaN with suppress_warnings() as sup: sup.filter(UserWarning) if not np.isnan(np.finfo(dtout).tiny): atol = max(np.finfo(dtout).tiny, 3e-308) else: atol = 3e-308 res = np.true_divide(x, y, dtype=dtout) if not np.isfinite(res): continue assert_allclose(res, tgt, rtol=rtol, atol=atol) assert_(res.dtype.name == dtout.name) # Check booleans a = np.ones((), dtype=np.bool_) res = np.true_divide(a, a) assert_(res == 1.0) assert_(res.dtype.name == 'float64') res = np.true_divide(~a, a) assert_(res == 0.0) assert_(res.dtype.name == 'float64') def test_sum_stability(self): a = np.ones(500, dtype=np.float32) assert_almost_equal((a / 10.).sum() - a.size / 10., 0, 4) a = np.ones(500, dtype=np.float64) assert_almost_equal((a / 10.).sum() - a.size / 10., 0, 13) def test_sum(self): for dt in (int, np.float16, np.float32, np.float64, np.longdouble): for v in (0, 1, 2, 7, 8, 9, 15, 16, 19, 127, 128, 1024, 1235): tgt = dt(v * (v + 1) / 2) d = np.arange(1, v + 1, dtype=dt) # warning if sum overflows, which it does in float16 overflow = not np.isfinite(tgt) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") assert_almost_equal(np.sum(d), tgt) assert_equal(len(w), 1 * overflow) assert_almost_equal(np.sum(d[::-1]), tgt) assert_equal(len(w), 2 * overflow) d = np.ones(500, dtype=dt) assert_almost_equal(np.sum(d[::2]), 250.) assert_almost_equal(np.sum(d[1::2]), 250.) assert_almost_equal(np.sum(d[::3]), 167.) assert_almost_equal(np.sum(d[1::3]), 167.) assert_almost_equal(np.sum(d[::-2]), 250.) assert_almost_equal(np.sum(d[-1::-2]), 250.) assert_almost_equal(np.sum(d[::-3]), 167.) assert_almost_equal(np.sum(d[-1::-3]), 167.) # sum with first reduction entry != 0 d = np.ones((1,), dtype=dt) d += d assert_almost_equal(d, 2.) def test_sum_complex(self): for dt in (np.complex64, np.complex128, np.clongdouble): for v in (0, 1, 2, 7, 8, 9, 15, 16, 19, 127, 128, 1024, 1235): tgt = dt(v * (v + 1) / 2) - dt((v * (v + 1) / 2) * 1j) d = np.empty(v, dtype=dt) d.real = np.arange(1, v + 1) d.imag = -np.arange(1, v + 1) assert_almost_equal(np.sum(d), tgt) assert_almost_equal(np.sum(d[::-1]), tgt) d = np.ones(500, dtype=dt) + 1j assert_almost_equal(np.sum(d[::2]), 250. + 250j) assert_almost_equal(np.sum(d[1::2]), 250. + 250j) assert_almost_equal(np.sum(d[::3]), 167. + 167j) assert_almost_equal(np.sum(d[1::3]), 167. + 167j) assert_almost_equal(np.sum(d[::-2]), 250. + 250j) assert_almost_equal(np.sum(d[-1::-2]), 250. + 250j) assert_almost_equal(np.sum(d[::-3]), 167. + 167j) assert_almost_equal(np.sum(d[-1::-3]), 167. + 167j) # sum with first reduction entry != 0 d = np.ones((1,), dtype=dt) + 1j d += d assert_almost_equal(d, 2. + 2j) def test_sum_initial(self): # Integer, single axis assert_equal(np.sum([3], initial=2), 5) # Floating point assert_almost_equal(np.sum([0.2], initial=0.1), 0.3) # Multiple non-adjacent axes assert_equal(np.sum(np.ones((2, 3, 5), dtype=np.int64), axis=(0, 2), initial=2), [12, 12, 12]) def test_sum_where(self): # More extensive tests done in test_reduction_with_where. assert_equal(np.sum([[1., 2.], [3., 4.]], where=[True, False]), 4.) assert_equal(np.sum([[1., 2.], [3., 4.]], axis=0, initial=5., where=[True, False]), [9., 5.]) def test_inner1d(self): a = np.arange(6).reshape((2, 3)) assert_array_equal(umt.inner1d(a, a), np.sum(a*a, axis=-1)) a = np.arange(6) assert_array_equal(umt.inner1d(a, a), np.sum(a*a)) def test_broadcast(self): msg = "broadcast" a = np.arange(4).reshape((2, 1, 2)) b = np.arange(4).reshape((1, 2, 2)) assert_array_equal(umt.inner1d(a, b), np.sum(a*b, axis=-1), err_msg=msg) msg = "extend & broadcast loop dimensions" b = np.arange(4).reshape((2, 2)) assert_array_equal(umt.inner1d(a, b), np.sum(a*b, axis=-1), err_msg=msg) # Broadcast in core dimensions should fail a = np.arange(8).reshape((4, 2)) b = np.arange(4).reshape((4, 1)) assert_raises(ValueError, umt.inner1d, a, b) # Extend core dimensions should fail a = np.arange(8).reshape((4, 2)) b = np.array(7) assert_raises(ValueError, umt.inner1d, a, b) # Broadcast should fail a = np.arange(2).reshape((2, 1, 1)) b = np.arange(3).reshape((3, 1, 1)) assert_raises(ValueError, umt.inner1d, a, b) # Writing to a broadcasted array with overlap should warn, gh-2705 a = np.arange(2) b = np.arange(4).reshape((2, 2)) u, v = np.broadcast_arrays(a, b) assert_equal(u.strides[0], 0) x = u + v with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") u += v assert_equal(len(w), 1) assert_(x[0, 0] != u[0, 0]) # Output reduction should not be allowed. # See gh-15139 a = np.arange(6).reshape(3, 2) b = np.ones(2) out = np.empty(()) assert_raises(ValueError, umt.inner1d, a, b, out) out2 = np.empty(3) c = umt.inner1d(a, b, out2) assert_(c is out2) def test_out_broadcasts(self): # For ufuncs and gufuncs (not for reductions), we currently allow # the output to cause broadcasting of the input arrays. # both along dimensions with shape 1 and dimensions which do not # exist at all in the inputs. arr = np.arange(3).reshape(1, 3) out = np.empty((5, 4, 3)) np.add(arr, arr, out=out) assert (out == np.arange(3) * 2).all() # The same holds for gufuncs (gh-16484) umt.inner1d(arr, arr, out=out) # the result would be just a scalar `5`, but is broadcast fully: assert (out == 5).all() @pytest.mark.parametrize(["arr", "out"], [ ([2], np.empty(())), ([1, 2], np.empty(1)), (np.ones((4, 3)), np.empty((4, 1)))], ids=["(1,)->()", "(2,)->(1,)", "(4, 3)->(4, 1)"]) def test_out_broadcast_errors(self, arr, out): # Output is (currently) allowed to broadcast inputs, but it cannot be # smaller than the actual result. with pytest.raises(ValueError, match="non-broadcastable"): np.positive(arr, out=out) with pytest.raises(ValueError, match="non-broadcastable"): np.add(np.ones(()), arr, out=out) def test_type_cast(self): msg = "type cast" a = np.arange(6, dtype='short').reshape((2, 3)) assert_array_equal(umt.inner1d(a, a), np.sum(a*a, axis=-1), err_msg=msg) msg = "type cast on one argument" a = np.arange(6).reshape((2, 3)) b = a + 0.1 assert_array_almost_equal(umt.inner1d(a, b), np.sum(a*b, axis=-1), err_msg=msg) def test_endian(self): msg = "big endian" a = np.arange(6, dtype='>i4').reshape((2, 3)) assert_array_equal(umt.inner1d(a, a), np.sum(a*a, axis=-1), err_msg=msg) msg = "little endian" a = np.arange(6, dtype='<i4').reshape((2, 3)) assert_array_equal(umt.inner1d(a, a), np.sum(a*a, axis=-1), err_msg=msg) # Output should always be native-endian Ba = np.arange(1, dtype='>f8') La = np.arange(1, dtype='<f8') assert_equal((Ba+Ba).dtype, np.dtype('f8')) assert_equal((Ba+La).dtype, np.dtype('f8')) assert_equal((La+Ba).dtype, np.dtype('f8')) assert_equal((La+La).dtype, np.dtype('f8')) assert_equal(np.absolute(La).dtype, np.dtype('f8')) assert_equal(np.absolute(Ba).dtype, np.dtype('f8')) assert_equal(np.negative(La).dtype, np.dtype('f8')) assert_equal(np.negative(Ba).dtype, np.dtype('f8')) def test_incontiguous_array(self): msg = "incontiguous memory layout of array" x = np.arange(64).reshape((2, 2, 2, 2, 2, 2)) a = x[:, 0,:, 0,:, 0] b = x[:, 1,:, 1,:, 1] a[0, 0, 0] = -1 msg2 = "make sure it references to the original array" assert_equal(x[0, 0, 0, 0, 0, 0], -1, err_msg=msg2) assert_array_equal(umt.inner1d(a, b), np.sum(a*b, axis=-1), err_msg=msg) x = np.arange(24).reshape(2, 3, 4) a = x.T b = x.T a[0, 0, 0] = -1 assert_equal(x[0, 0, 0], -1, err_msg=msg2) assert_array_equal(umt.inner1d(a, b), np.sum(a*b, axis=-1), err_msg=msg) def test_output_argument(self): msg = "output argument" a = np.arange(12).reshape((2, 3, 2)) b = np.arange(4).reshape((2, 1, 2)) + 1 c = np.zeros((2, 3), dtype='int') umt.inner1d(a, b, c) assert_array_equal(c, np.sum(a*b, axis=-1), err_msg=msg) c[:] = -1 umt.inner1d(a, b, out=c) assert_array_equal(c, np.sum(a*b, axis=-1), err_msg=msg) msg = "output argument with type cast" c = np.zeros((2, 3), dtype='int16') umt.inner1d(a, b, c) assert_array_equal(c, np.sum(a*b, axis=-1), err_msg=msg) c[:] = -1 umt.inner1d(a, b, out=c) assert_array_equal(c, np.sum(a*b, axis=-1), err_msg=msg) msg = "output argument with incontiguous layout" c = np.zeros((2, 3, 4), dtype='int16') umt.inner1d(a, b, c[..., 0]) assert_array_equal(c[..., 0], np.sum(a*b, axis=-1), err_msg=msg) c[:] = -1 umt.inner1d(a, b, out=c[..., 0]) assert_array_equal(c[..., 0], np.sum(a*b, axis=-1), err_msg=msg) def test_axes_argument(self): # inner1d signature: '(i),(i)->()' inner1d = umt.inner1d a = np.arange(27.).reshape((3, 3, 3)) b = np.arange(10., 19.).reshape((3, 1, 3)) # basic tests on inputs (outputs tested below with matrix_multiply). c = inner1d(a, b) assert_array_equal(c, (a * b).sum(-1)) # default c = inner1d(a, b, axes=[(-1,), (-1,), ()]) assert_array_equal(c, (a * b).sum(-1)) # integers ok for single axis. c = inner1d(a, b, axes=[-1, -1, ()]) assert_array_equal(c, (a * b).sum(-1)) # mix fine c = inner1d(a, b, axes=[(-1,), -1, ()]) assert_array_equal(c, (a * b).sum(-1)) # can omit last axis. c = inner1d(a, b, axes=[-1, -1]) assert_array_equal(c, (a * b).sum(-1)) # can pass in other types of integer (with __index__ protocol) c = inner1d(a, b, axes=[np.int8(-1), np.array(-1, dtype=np.int32)]) assert_array_equal(c, (a * b).sum(-1)) # swap some axes c = inner1d(a, b, axes=[0, 0]) assert_array_equal(c, (a * b).sum(0)) c = inner1d(a, b, axes=[0, 2]) assert_array_equal(c, (a.transpose(1, 2, 0) * b).sum(-1)) # Check errors for improperly constructed axes arguments. # should have list. assert_raises(TypeError, inner1d, a, b, axes=-1) # needs enough elements assert_raises(ValueError, inner1d, a, b, axes=[-1]) # should pass in indices. assert_raises(TypeError, inner1d, a, b, axes=[-1.0, -1.0]) assert_raises(TypeError, inner1d, a, b, axes=[(-1.0,), -1]) assert_raises(TypeError, inner1d, a, b, axes=[None, 1]) # cannot pass an index unless there is only one dimension # (output is wrong in this case) assert_raises(TypeError, inner1d, a, b, axes=[-1, -1, -1]) # or pass in generally the wrong number of axes assert_raises(ValueError, inner1d, a, b, axes=[-1, -1, (-1,)]) assert_raises(ValueError, inner1d, a, b, axes=[-1, (-2, -1), ()]) # axes need to have same length. assert_raises(ValueError, inner1d, a, b, axes=[0, 1]) # matrix_multiply signature: '(m,n),(n,p)->(m,p)' mm = umt.matrix_multiply a = np.arange(12).reshape((2, 3, 2)) b = np.arange(8).reshape((2, 2, 2, 1)) + 1 # Sanity check. c = mm(a, b) assert_array_equal(c, np.matmul(a, b)) # Default axes. c = mm(a, b, axes=[(-2, -1), (-2, -1), (-2, -1)]) assert_array_equal(c, np.matmul(a, b)) # Default with explicit axes. c = mm(a, b, axes=[(1, 2), (2, 3), (2, 3)]) assert_array_equal(c, np.matmul(a, b)) # swap some axes. c = mm(a, b, axes=[(0, -1), (1, 2), (-2, -1)]) assert_array_equal(c, np.matmul(a.transpose(1, 0, 2), b.transpose(0, 3, 1, 2))) # Default with output array. c = np.empty((2, 2, 3, 1)) d = mm(a, b, out=c, axes=[(1, 2), (2, 3), (2, 3)]) assert_(c is d) assert_array_equal(c, np.matmul(a, b)) # Transposed output array c = np.empty((1, 2, 2, 3)) d = mm(a, b, out=c, axes=[(-2, -1), (-2, -1), (3, 0)]) assert_(c is d) assert_array_equal(c, np.matmul(a, b).transpose(3, 0, 1, 2)) # Check errors for improperly constructed axes arguments. # wrong argument assert_raises(TypeError, mm, a, b, axis=1) # axes should be list assert_raises(TypeError, mm, a, b, axes=1) assert_raises(TypeError, mm, a, b, axes=((-2, -1), (-2, -1), (-2, -1))) # list needs to have right length assert_raises(ValueError, mm, a, b, axes=[]) assert_raises(ValueError, mm, a, b, axes=[(-2, -1)]) # list should contain tuples for multiple axes assert_raises(TypeError, mm, a, b, axes=[-1, -1, -1]) assert_raises(TypeError, mm, a, b, axes=[(-2, -1), (-2, -1), -1]) assert_raises(TypeError, mm, a, b, axes=[[-2, -1], [-2, -1], [-2, -1]]) assert_raises(TypeError, mm, a, b, axes=[(-2, -1), (-2, -1), [-2, -1]]) assert_raises(TypeError, mm, a, b, axes=[(-2, -1), (-2, -1), None]) # tuples should not have duplicated values assert_raises(ValueError, mm, a, b, axes=[(-2, -1), (-2, -1), (-2, -2)]) # arrays should have enough axes. z = np.zeros((2, 2)) assert_raises(ValueError, mm, z, z[0]) assert_raises(ValueError, mm, z, z, out=z[:, 0]) assert_raises(ValueError, mm, z[1], z, axes=[0, 1]) assert_raises(ValueError, mm, z, z, out=z[0], axes=[0, 1]) # Regular ufuncs should not accept axes. assert_raises(TypeError, np.add, 1., 1., axes=[0]) # should be able to deal with bad unrelated kwargs. assert_raises(TypeError, mm, z, z, axes=[0, 1], parrot=True) def test_axis_argument(self): # inner1d signature: '(i),(i)->()' inner1d = umt.inner1d a = np.arange(27.).reshape((3, 3, 3)) b = np.arange(10., 19.).reshape((3, 1, 3)) c = inner1d(a, b) assert_array_equal(c, (a * b).sum(-1)) c = inner1d(a, b, axis=-1) assert_array_equal(c, (a * b).sum(-1)) out = np.zeros_like(c) d = inner1d(a, b, axis=-1, out=out) assert_(d is out) assert_array_equal(d, c) c = inner1d(a, b, axis=0) assert_array_equal(c, (a * b).sum(0)) # Sanity checks on innerwt and cumsum. a = np.arange(6).reshape((2, 3)) b = np.arange(10, 16).reshape((2, 3)) w = np.arange(20, 26).reshape((2, 3)) assert_array_equal(umt.innerwt(a, b, w, axis=0), np.sum(a * b * w, axis=0)) assert_array_equal(umt.cumsum(a, axis=0), np.cumsum(a, axis=0)) assert_array_equal(umt.cumsum(a, axis=-1), np.cumsum(a, axis=-1)) out = np.empty_like(a) b = umt.cumsum(a, out=out, axis=0) assert_(out is b) assert_array_equal(b, np.cumsum(a, axis=0)) b = umt.cumsum(a, out=out, axis=1) assert_(out is b) assert_array_equal(b, np.cumsum(a, axis=-1)) # Check errors. # Cannot pass in both axis and axes. assert_raises(TypeError, inner1d, a, b, axis=0, axes=[0, 0]) # Not an integer. assert_raises(TypeError, inner1d, a, b, axis=[0]) # more than 1 core dimensions. mm = umt.matrix_multiply assert_raises(TypeError, mm, a, b, axis=1) # Output wrong size in axis. out = np.empty((1, 2, 3), dtype=a.dtype) assert_raises(ValueError, umt.cumsum, a, out=out, axis=0) # Regular ufuncs should not accept axis. assert_raises(TypeError, np.add, 1., 1., axis=0) def test_keepdims_argument(self): # inner1d signature: '(i),(i)->()' inner1d = umt.inner1d a = np.arange(27.).reshape((3, 3, 3)) b = np.arange(10., 19.).reshape((3, 1, 3)) c = inner1d(a, b) assert_array_equal(c, (a * b).sum(-1)) c = inner1d(a, b, keepdims=False) assert_array_equal(c, (a * b).sum(-1)) c = inner1d(a, b, keepdims=True) assert_array_equal(c, (a * b).sum(-1, keepdims=True)) out = np.zeros_like(c) d = inner1d(a, b, keepdims=True, out=out) assert_(d is out) assert_array_equal(d, c) # Now combined with axis and axes. c = inner1d(a, b, axis=-1, keepdims=False) assert_array_equal(c, (a * b).sum(-1, keepdims=False)) c = inner1d(a, b, axis=-1, keepdims=True) assert_array_equal(c, (a * b).sum(-1, keepdims=True)) c = inner1d(a, b, axis=0, keepdims=False) assert_array_equal(c, (a * b).sum(0, keepdims=False)) c = inner1d(a, b, axis=0, keepdims=True) assert_array_equal(c, (a * b).sum(0, keepdims=True)) c = inner1d(a, b, axes=[(-1,), (-1,), ()], keepdims=False) assert_array_equal(c, (a * b).sum(-1)) c = inner1d(a, b, axes=[(-1,), (-1,), (-1,)], keepdims=True) assert_array_equal(c, (a * b).sum(-1, keepdims=True)) c = inner1d(a, b, axes=[0, 0], keepdims=False) assert_array_equal(c, (a * b).sum(0)) c = inner1d(a, b, axes=[0, 0, 0], keepdims=True) assert_array_equal(c, (a * b).sum(0, keepdims=True)) c = inner1d(a, b, axes=[0, 2], keepdims=False) assert_array_equal(c, (a.transpose(1, 2, 0) * b).sum(-1)) c = inner1d(a, b, axes=[0, 2], keepdims=True) assert_array_equal(c, (a.transpose(1, 2, 0) * b).sum(-1, keepdims=True)) c = inner1d(a, b, axes=[0, 2, 2], keepdims=True) assert_array_equal(c, (a.transpose(1, 2, 0) * b).sum(-1, keepdims=True)) c = inner1d(a, b, axes=[0, 2, 0], keepdims=True) assert_array_equal(c, (a * b.transpose(2, 0, 1)).sum(0, keepdims=True)) # Hardly useful, but should work. c = inner1d(a, b, axes=[0, 2, 1], keepdims=True) assert_array_equal(c, (a.transpose(1, 0, 2) * b.transpose(0, 2, 1)) .sum(1, keepdims=True)) # Check with two core dimensions. a = np.eye(3) * np.arange(4.)[:, np.newaxis, np.newaxis] expected = uml.det(a) c = uml.det(a, keepdims=False) assert_array_equal(c, expected) c = uml.det(a, keepdims=True) assert_array_equal(c, expected[:, np.newaxis, np.newaxis]) a = np.eye(3) * np.arange(4.)[:, np.newaxis, np.newaxis] expected_s, expected_l = uml.slogdet(a) cs, cl = uml.slogdet(a, keepdims=False) assert_array_equal(cs, expected_s) assert_array_equal(cl, expected_l) cs, cl = uml.slogdet(a, keepdims=True) assert_array_equal(cs, expected_s[:, np.newaxis, np.newaxis]) assert_array_equal(cl, expected_l[:, np.newaxis, np.newaxis]) # Sanity check on innerwt. a = np.arange(6).reshape((2, 3)) b = np.arange(10, 16).reshape((2, 3)) w = np.arange(20, 26).reshape((2, 3)) assert_array_equal(umt.innerwt(a, b, w, keepdims=True), np.sum(a * b * w, axis=-1, keepdims=True)) assert_array_equal(umt.innerwt(a, b, w, axis=0, keepdims=True), np.sum(a * b * w, axis=0, keepdims=True)) # Check errors. # Not a boolean assert_raises(TypeError, inner1d, a, b, keepdims='true') # More than 1 core dimension, and core output dimensions. mm = umt.matrix_multiply assert_raises(TypeError, mm, a, b, keepdims=True) assert_raises(TypeError, mm, a, b, keepdims=False) # Regular ufuncs should not accept keepdims. assert_raises(TypeError, np.add, 1., 1., keepdims=False) def test_innerwt(self): a = np.arange(6).reshape((2, 3)) b = np.arange(10, 16).reshape((2, 3)) w = np.arange(20, 26).reshape((2, 3)) assert_array_equal(umt.innerwt(a, b, w), np.sum(a*b*w, axis=-1)) a = np.arange(100, 124).reshape((2, 3, 4)) b = np.arange(200, 224).reshape((2, 3, 4)) w = np.arange(300, 324).reshape((2, 3, 4)) assert_array_equal(umt.innerwt(a, b, w), np.sum(a*b*w, axis=-1)) def test_innerwt_empty(self): """Test generalized ufunc with zero-sized operands""" a = np.array([], dtype='f8') b = np.array([], dtype='f8') w = np.array([], dtype='f8') assert_array_equal(umt.innerwt(a, b, w), np.sum(a*b*w, axis=-1)) def test_cross1d(self): """Test with fixed-sized signature.""" a = np.eye(3) assert_array_equal(umt.cross1d(a, a), np.zeros((3, 3))) out = np.zeros((3, 3)) result = umt.cross1d(a[0], a, out) assert_(result is out) assert_array_equal(result, np.vstack((np.zeros(3), a[2], -a[1]))) assert_raises(ValueError, umt.cross1d, np.eye(4), np.eye(4)) assert_raises(ValueError, umt.cross1d, a, np.arange(4.)) # Wrong output core dimension. assert_raises(ValueError, umt.cross1d, a, np.arange(3.), np.zeros((3, 4))) # Wrong output broadcast dimension (see gh-15139). assert_raises(ValueError, umt.cross1d, a, np.arange(3.), np.zeros(3)) def test_can_ignore_signature(self): # Comparing the effects of ? in signature: # matrix_multiply: (m,n),(n,p)->(m,p) # all must be there. # matmul: (m?,n),(n,p?)->(m?,p?) # allow missing m, p. mat = np.arange(12).reshape((2, 3, 2)) single_vec = np.arange(2) col_vec = single_vec[:, np.newaxis] col_vec_array = np.arange(8).reshape((2, 2, 2, 1)) + 1 # matrix @ single column vector with proper dimension mm_col_vec = umt.matrix_multiply(mat, col_vec) # matmul does the same thing matmul_col_vec = umt.matmul(mat, col_vec) assert_array_equal(matmul_col_vec, mm_col_vec) # matrix @ vector without dimension making it a column vector. # matrix multiply fails -> missing core dim. assert_raises(ValueError, umt.matrix_multiply, mat, single_vec) # matmul mimicker passes, and returns a vector. matmul_col = umt.matmul(mat, single_vec) assert_array_equal(matmul_col, mm_col_vec.squeeze()) # Now with a column array: same as for column vector, # broadcasting sensibly. mm_col_vec = umt.matrix_multiply(mat, col_vec_array) matmul_col_vec = umt.matmul(mat, col_vec_array) assert_array_equal(matmul_col_vec, mm_col_vec) # As above, but for row vector single_vec = np.arange(3) row_vec = single_vec[np.newaxis, :] row_vec_array = np.arange(24).reshape((4, 2, 1, 1, 3)) + 1 # row vector @ matrix mm_row_vec = umt.matrix_multiply(row_vec, mat) matmul_row_vec = umt.matmul(row_vec, mat) assert_array_equal(matmul_row_vec, mm_row_vec) # single row vector @ matrix assert_raises(ValueError, umt.matrix_multiply, single_vec, mat) matmul_row = umt.matmul(single_vec, mat) assert_array_equal(matmul_row, mm_row_vec.squeeze()) # row vector array @ matrix mm_row_vec = umt.matrix_multiply(row_vec_array, mat) matmul_row_vec = umt.matmul(row_vec_array, mat) assert_array_equal(matmul_row_vec, mm_row_vec) # Now for vector combinations # row vector @ column vector col_vec = row_vec.T col_vec_array = row_vec_array.swapaxes(-2, -1) mm_row_col_vec = umt.matrix_multiply(row_vec, col_vec) matmul_row_col_vec = umt.matmul(row_vec, col_vec) assert_array_equal(matmul_row_col_vec, mm_row_col_vec) # single row vector @ single col vector assert_raises(ValueError, umt.matrix_multiply, single_vec, single_vec) matmul_row_col = umt.matmul(single_vec, single_vec) assert_array_equal(matmul_row_col, mm_row_col_vec.squeeze()) # row vector array @ matrix mm_row_col_array = umt.matrix_multiply(row_vec_array, col_vec_array) matmul_row_col_array = umt.matmul(row_vec_array, col_vec_array) assert_array_equal(matmul_row_col_array, mm_row_col_array) # Finally, check that things are *not* squeezed if one gives an # output. out = np.zeros_like(mm_row_col_array) out = umt.matrix_multiply(row_vec_array, col_vec_array, out=out) assert_array_equal(out, mm_row_col_array) out[:] = 0 out = umt.matmul(row_vec_array, col_vec_array, out=out) assert_array_equal(out, mm_row_col_array) # And check one cannot put missing dimensions back. out = np.zeros_like(mm_row_col_vec) assert_raises(ValueError, umt.matrix_multiply, single_vec, single_vec, out) # But fine for matmul, since it is just a broadcast. out = umt.matmul(single_vec, single_vec, out) assert_array_equal(out, mm_row_col_vec.squeeze()) def test_matrix_multiply(self): self.compare_matrix_multiply_results(np.int64) self.compare_matrix_multiply_results(np.double) def test_matrix_multiply_umath_empty(self): res = umt.matrix_multiply(np.ones((0, 10)), np.ones((10, 0))) assert_array_equal(res, np.zeros((0, 0))) res = umt.matrix_multiply(np.ones((10, 0)), np.ones((0, 10))) assert_array_equal(res, np.zeros((10, 10))) def compare_matrix_multiply_results(self, tp): d1 = np.array(np.random.rand(2, 3, 4), dtype=tp) d2 = np.array(np.random.rand(2, 3, 4), dtype=tp) msg = "matrix multiply on type %s" % d1.dtype.name def permute_n(n): if n == 1: return ([0],) ret = () base = permute_n(n-1) for perm in base: for i in range(n): new = perm + [n-1] new[n-1] = new[i] new[i] = n-1 ret += (new,) return ret def slice_n(n): if n == 0: return ((),) ret = () base = slice_n(n-1) for sl in base: ret += (sl+(slice(None),),) ret += (sl+(slice(0, 1),),) return ret def broadcastable(s1, s2): return s1 == s2 or s1 == 1 or s2 == 1 permute_3 = permute_n(3) slice_3 = slice_n(3) + ((slice(None, None, -1),)*3,) ref = True for p1 in permute_3: for p2 in permute_3: for s1 in slice_3: for s2 in slice_3: a1 = d1.transpose(p1)[s1] a2 = d2.transpose(p2)[s2] ref = ref and a1.base is not None ref = ref and a2.base is not None if (a1.shape[-1] == a2.shape[-2] and broadcastable(a1.shape[0], a2.shape[0])): assert_array_almost_equal( umt.matrix_multiply(a1, a2), np.sum(a2[..., np.newaxis].swapaxes(-3, -1) * a1[..., np.newaxis,:], axis=-1), err_msg=msg + ' %s %s' % (str(a1.shape), str(a2.shape))) assert_equal(ref, True, err_msg="reference check") def test_euclidean_pdist(self): a = np.arange(12, dtype=float).reshape(4, 3) out = np.empty((a.shape[0] * (a.shape[0] - 1) // 2,), dtype=a.dtype) umt.euclidean_pdist(a, out) b = np.sqrt(np.sum((a[:, None] - a)**2, axis=-1)) b = b[~np.tri(a.shape[0], dtype=bool)] assert_almost_equal(out, b) # An output array is required to determine p with signature (n,d)->(p) assert_raises(ValueError, umt.euclidean_pdist, a) def test_cumsum(self): a = np.arange(10) result = umt.cumsum(a) assert_array_equal(result, a.cumsum()) def test_object_logical(self): a = np.array([3, None, True, False, "test", ""], dtype=object) assert_equal(np.logical_or(a, None), np.array([x or None for x in a], dtype=object)) assert_equal(np.logical_or(a, True), np.array([x or True for x in a], dtype=object)) assert_equal(np.logical_or(a, 12), np.array([x or 12 for x in a], dtype=object)) assert_equal(np.logical_or(a, "blah"), np.array([x or "blah" for x in a], dtype=object)) assert_equal(np.logical_and(a, None), np.array([x and None for x in a], dtype=object)) assert_equal(np.logical_and(a, True), np.array([x and True for x in a], dtype=object)) assert_equal(np.logical_and(a, 12), np.array([x and 12 for x in a], dtype=object)) assert_equal(np.logical_and(a, "blah"), np.array([x and "blah" for x in a], dtype=object)) assert_equal(np.logical_not(a), np.array([not x for x in a], dtype=object)) assert_equal(np.logical_or.reduce(a), 3) assert_equal(np.logical_and.reduce(a), None) def test_object_comparison(self): class HasComparisons: def __eq__(self, other): return '==' arr0d = np.array(HasComparisons()) assert_equal(arr0d == arr0d, True) assert_equal(np.equal(arr0d, arr0d), True) # normal behavior is a cast arr1d = np.array([HasComparisons()]) assert_equal(arr1d == arr1d, np.array([True])) assert_equal(np.equal(arr1d, arr1d), np.array([True])) # normal behavior is a cast assert_equal(np.equal(arr1d, arr1d, dtype=object), np.array(['=='])) def test_object_array_reduction(self): # Reductions on object arrays a = np.array(['a', 'b', 'c'], dtype=object) assert_equal(np.sum(a), 'abc') assert_equal(np.max(a), 'c') assert_equal(np.min(a), 'a') a = np.array([True, False, True], dtype=object) assert_equal(np.sum(a), 2) assert_equal(np.prod(a), 0) assert_equal(np.any(a), True) assert_equal(np.all(a), False) assert_equal(np.max(a), True) assert_equal(np.min(a), False) assert_equal(np.array([[1]], dtype=object).sum(), 1) assert_equal(np.array([[[1, 2]]], dtype=object).sum((0, 1)), [1, 2]) assert_equal(np.array([1], dtype=object).sum(initial=1), 2) assert_equal(np.array([[1], [2, 3]], dtype=object) .sum(initial=[0], where=[False, True]), [0, 2, 3]) def test_object_array_accumulate_inplace(self): # Checks that in-place accumulates work, see also gh-7402 arr = np.ones(4, dtype=object) arr[:] = [[1] for i in range(4)] # Twice reproduced also for tuples: np.add.accumulate(arr, out=arr) np.add.accumulate(arr, out=arr) assert_array_equal(arr, np.array([[1]*i for i in [1, 3, 6, 10]], dtype=object), ) # And the same if the axis argument is used arr = np.ones((2, 4), dtype=object) arr[0, :] = [[2] for i in range(4)] np.add.accumulate(arr, out=arr, axis=-1) np.add.accumulate(arr, out=arr, axis=-1) assert_array_equal(arr[0, :], np.array([[2]*i for i in [1, 3, 6, 10]], dtype=object), ) def test_object_array_accumulate_failure(self): # Typical accumulation on object works as expected: res = np.add.accumulate(np.array([1, 0, 2], dtype=object)) assert_array_equal(res, np.array([1, 1, 3], dtype=object)) # But errors are propagated from the inner-loop if they occur: with pytest.raises(TypeError): np.add.accumulate([1, None, 2]) def test_object_array_reduceat_inplace(self): # Checks that in-place reduceats work, see also gh-7465 arr = np.empty(4, dtype=object) arr[:] = [[1] for i in range(4)] out = np.empty(4, dtype=object) out[:] = [[1] for i in range(4)] np.add.reduceat(arr, np.arange(4), out=arr) np.add.reduceat(arr, np.arange(4), out=arr) assert_array_equal(arr, out) # And the same if the axis argument is used arr = np.ones((2, 4), dtype=object) arr[0, :] = [[2] for i in range(4)] out = np.ones((2, 4), dtype=object) out[0, :] = [[2] for i in range(4)] np.add.reduceat(arr, np.arange(4), out=arr, axis=-1) np.add.reduceat(arr, np.arange(4), out=arr, axis=-1) assert_array_equal(arr, out) def test_object_array_reduceat_failure(self): # Reduceat works as expected when no invalid operation occurs (None is # not involved in an operation here) res = np.add.reduceat(np.array([1, None, 2], dtype=object), [1, 2]) assert_array_equal(res, np.array([None, 2], dtype=object)) # But errors when None would be involved in an operation: with pytest.raises(TypeError): np.add.reduceat([1, None, 2], [0, 2]) def test_zerosize_reduction(self): # Test with default dtype and object dtype for a in [[], np.array([], dtype=object)]: assert_equal(np.sum(a), 0) assert_equal(np.prod(a), 1) assert_equal(np.any(a), False) assert_equal(np.all(a), True) assert_raises(ValueError, np.max, a) assert_raises(ValueError, np.min, a) def test_axis_out_of_bounds(self): a = np.array([False, False]) assert_raises(np.AxisError, a.all, axis=1) a = np.array([False, False]) assert_raises(np.AxisError, a.all, axis=-2) a = np.array([False, False]) assert_raises(np.AxisError, a.any, axis=1) a = np.array([False, False]) assert_raises(np.AxisError, a.any, axis=-2) def test_scalar_reduction(self): # The functions 'sum', 'prod', etc allow specifying axis=0 # even for scalars assert_equal(np.sum(3, axis=0), 3) assert_equal(np.prod(3.5, axis=0), 3.5) assert_equal(np.any(True, axis=0), True) assert_equal(np.all(False, axis=0), False) assert_equal(np.max(3, axis=0), 3) assert_equal(np.min(2.5, axis=0), 2.5) # Check scalar behaviour for ufuncs without an identity assert_equal(np.power.reduce(3), 3) # Make sure that scalars are coming out from this operation assert_(type(np.prod(np.float32(2.5), axis=0)) is np.float32) assert_(type(np.sum(np.float32(2.5), axis=0)) is np.float32) assert_(type(np.max(np.float32(2.5), axis=0)) is np.float32) assert_(type(np.min(np.float32(2.5), axis=0)) is np.float32) # check if scalars/0-d arrays get cast assert_(type(np.any(0, axis=0)) is np.bool_) # assert that 0-d arrays get wrapped class MyArray(np.ndarray): pass a = np.array(1).view(MyArray) assert_(type(np.any(a)) is MyArray) def test_casting_out_param(self): # Test that it's possible to do casts on output a = np.ones((200, 100), np.int64) b = np.ones((200, 100), np.int64) c = np.ones((200, 100), np.float64) np.add(a, b, out=c) assert_equal(c, 2) a = np.zeros(65536) b = np.zeros(65536, dtype=np.float32) np.subtract(a, 0, out=b) assert_equal(b, 0) def test_where_param(self): # Test that the where= ufunc parameter works with regular arrays a = np.arange(7) b = np.ones(7) c = np.zeros(7) np.add(a, b, out=c, where=(a % 2 == 1)) assert_equal(c, [0, 2, 0, 4, 0, 6, 0]) a = np.arange(4).reshape(2, 2) + 2 np.power(a, [2, 3], out=a, where=[[0, 1], [1, 0]]) assert_equal(a, [[2, 27], [16, 5]]) # Broadcasting the where= parameter np.subtract(a, 2, out=a, where=[True, False]) assert_equal(a, [[0, 27], [14, 5]]) def test_where_param_buffer_output(self): # This test is temporarily skipped because it requires # adding masking features to the nditer to work properly # With casting on output a = np.ones(10, np.int64) b = np.ones(10, np.int64) c = 1.5 * np.ones(10, np.float64) np.add(a, b, out=c, where=[1, 0, 0, 1, 0, 0, 1, 1, 1, 0]) assert_equal(c, [2, 1.5, 1.5, 2, 1.5, 1.5, 2, 2, 2, 1.5]) def test_where_param_alloc(self): # With casting and allocated output a = np.array([1], dtype=np.int64) m = np.array([True], dtype=bool) assert_equal(np.sqrt(a, where=m), [1]) # No casting and allocated output a = np.array([1], dtype=np.float64) m = np.array([True], dtype=bool) assert_equal(np.sqrt(a, where=m), [1]) def test_where_with_broadcasting(self): # See gh-17198 a = np.random.random((5000, 4)) b = np.random.random((5000, 1)) where = a > 0.3 out = np.full_like(a, 0) np.less(a, b, where=where, out=out) b_where = np.broadcast_to(b, a.shape)[where] assert_array_equal((a[where] < b_where), out[where].astype(bool)) assert not out[~where].any() # outside mask, out remains all 0 def check_identityless_reduction(self, a): # np.minimum.reduce is an identityless reduction # Verify that it sees the zero at various positions a[...] = 1 a[1, 0, 0] = 0 assert_equal(np.minimum.reduce(a, axis=None), 0) assert_equal(np.minimum.reduce(a, axis=(0, 1)), [0, 1, 1, 1]) assert_equal(np.minimum.reduce(a, axis=(0, 2)), [0, 1, 1]) assert_equal(np.minimum.reduce(a, axis=(1, 2)), [1, 0]) assert_equal(np.minimum.reduce(a, axis=0), [[0, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]]) assert_equal(np.minimum.reduce(a, axis=1), [[1, 1, 1, 1], [0, 1, 1, 1]]) assert_equal(np.minimum.reduce(a, axis=2), [[1, 1, 1], [0, 1, 1]]) assert_equal(np.minimum.reduce(a, axis=()), a) a[...] = 1 a[0, 1, 0] = 0 assert_equal(np.minimum.reduce(a, axis=None), 0) assert_equal(np.minimum.reduce(a, axis=(0, 1)), [0, 1, 1, 1]) assert_equal(np.minimum.reduce(a, axis=(0, 2)), [1, 0, 1]) assert_equal(np.minimum.reduce(a, axis=(1, 2)), [0, 1]) assert_equal(np.minimum.reduce(a, axis=0), [[1, 1, 1, 1], [0, 1, 1, 1], [1, 1, 1, 1]]) assert_equal(np.minimum.reduce(a, axis=1), [[0, 1, 1, 1], [1, 1, 1, 1]]) assert_equal(np.minimum.reduce(a, axis=2), [[1, 0, 1], [1, 1, 1]]) assert_equal(np.minimum.reduce(a, axis=()), a) a[...] = 1 a[0, 0, 1] = 0 assert_equal(np.minimum.reduce(a, axis=None), 0) assert_equal(np.minimum.reduce(a, axis=(0, 1)), [1, 0, 1, 1]) assert_equal(np.minimum.reduce(a, axis=(0, 2)), [0, 1, 1]) assert_equal(np.minimum.reduce(a, axis=(1, 2)), [0, 1]) assert_equal(np.minimum.reduce(a, axis=0), [[1, 0, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]]) assert_equal(np.minimum.reduce(a, axis=1), [[1, 0, 1, 1], [1, 1, 1, 1]]) assert_equal(np.minimum.reduce(a, axis=2), [[0, 1, 1], [1, 1, 1]]) assert_equal(np.minimum.reduce(a, axis=()), a) @requires_memory(6 * 1024**3) def test_identityless_reduction_huge_array(self): # Regression test for gh-20921 (copying identity incorrectly failed) arr = np.zeros((2, 2**31), 'uint8') arr[:, 0] = [1, 3] arr[:, -1] = [4, 1] res = np.maximum.reduce(arr, axis=0) del arr assert res[0] == 3 assert res[-1] == 4 def test_identityless_reduction_corder(self): a = np.empty((2, 3, 4), order='C') self.check_identityless_reduction(a) def test_identityless_reduction_forder(self): a = np.empty((2, 3, 4), order='F') self.check_identityless_reduction(a) def test_identityless_reduction_otherorder(self): a = np.empty((2, 4, 3), order='C').swapaxes(1, 2) self.check_identityless_reduction(a) def test_identityless_reduction_noncontig(self): a = np.empty((3, 5, 4), order='C').swapaxes(1, 2) a = a[1:, 1:, 1:] self.check_identityless_reduction(a) def test_identityless_reduction_noncontig_unaligned(self): a = np.empty((3*4*5*8 + 1,), dtype='i1') a = a[1:].view(dtype='f8') a.shape = (3, 4, 5) a = a[1:, 1:, 1:] self.check_identityless_reduction(a) def test_initial_reduction(self): # np.minimum.reduce is an identityless reduction # For cases like np.maximum(np.abs(...), initial=0) # More generally, a supremum over non-negative numbers. assert_equal(np.maximum.reduce([], initial=0), 0) # For cases like reduction of an empty array over the reals. assert_equal(np.minimum.reduce([], initial=np.inf), np.inf) assert_equal(np.maximum.reduce([], initial=-np.inf), -np.inf) # Random tests assert_equal(np.minimum.reduce([5], initial=4), 4) assert_equal(np.maximum.reduce([4], initial=5), 5) assert_equal(np.maximum.reduce([5], initial=4), 5) assert_equal(np.minimum.reduce([4], initial=5), 4) # Check initial=None raises ValueError for both types of ufunc reductions assert_raises(ValueError, np.minimum.reduce, [], initial=None) assert_raises(ValueError, np.add.reduce, [], initial=None) # Check that np._NoValue gives default behavior. assert_equal(np.add.reduce([], initial=np._NoValue), 0) # Check that initial kwarg behaves as intended for dtype=object a = np.array([10], dtype=object) res = np.add.reduce(a, initial=5) assert_equal(res, 15) @pytest.mark.parametrize('axis', (0, 1, None)) @pytest.mark.parametrize('where', (np.array([False, True, True]), np.array([[True], [False], [True]]), np.array([[True, False, False], [False, True, False], [False, True, True]]))) def test_reduction_with_where(self, axis, where): a = np.arange(9.).reshape(3, 3) a_copy = a.copy() a_check = np.zeros_like(a) np.positive(a, out=a_check, where=where) res = np.add.reduce(a, axis=axis, where=where) check = a_check.sum(axis) assert_equal(res, check) # Check we do not overwrite elements of a internally. assert_array_equal(a, a_copy) @pytest.mark.parametrize(('axis', 'where'), ((0, np.array([True, False, True])), (1, [True, True, False]), (None, True))) @pytest.mark.parametrize('initial', (-np.inf, 5.)) def test_reduction_with_where_and_initial(self, axis, where, initial): a = np.arange(9.).reshape(3, 3) a_copy = a.copy() a_check = np.full(a.shape, -np.inf) np.positive(a, out=a_check, where=where) res = np.maximum.reduce(a, axis=axis, where=where, initial=initial) check = a_check.max(axis, initial=initial) assert_equal(res, check) def test_reduction_where_initial_needed(self): a = np.arange(9.).reshape(3, 3) m = [False, True, False] assert_raises(ValueError, np.maximum.reduce, a, where=m) def test_identityless_reduction_nonreorderable(self): a = np.array([[8.0, 2.0, 2.0], [1.0, 0.5, 0.25]]) res = np.divide.reduce(a, axis=0) assert_equal(res, [8.0, 4.0, 8.0]) res = np.divide.reduce(a, axis=1) assert_equal(res, [2.0, 8.0]) res = np.divide.reduce(a, axis=()) assert_equal(res, a) assert_raises(ValueError, np.divide.reduce, a, axis=(0, 1)) def test_reduce_zero_axis(self): # If we have a n x m array and do a reduction with axis=1, then we are # doing n reductions, and each reduction takes an m-element array. For # a reduction operation without an identity, then: # n > 0, m > 0: fine # n = 0, m > 0: fine, doing 0 reductions of m-element arrays # n > 0, m = 0: can't reduce a 0-element array, ValueError # n = 0, m = 0: can't reduce a 0-element array, ValueError (for # consistency with the above case) # This test doesn't actually look at return values, it just checks to # make sure that error we get an error in exactly those cases where we # expect one, and assumes the calculations themselves are done # correctly. def ok(f, *args, **kwargs): f(*args, **kwargs) def err(f, *args, **kwargs): assert_raises(ValueError, f, *args, **kwargs) def t(expect, func, n, m): expect(func, np.zeros((n, m)), axis=1) expect(func, np.zeros((m, n)), axis=0) expect(func, np.zeros((n // 2, n // 2, m)), axis=2) expect(func, np.zeros((n // 2, m, n // 2)), axis=1) expect(func, np.zeros((n, m // 2, m // 2)), axis=(1, 2)) expect(func, np.zeros((m // 2, n, m // 2)), axis=(0, 2)) expect(func, np.zeros((m // 3, m // 3, m // 3, n // 2, n // 2)), axis=(0, 1, 2)) # Check what happens if the inner (resp. outer) dimensions are a # mix of zero and non-zero: expect(func, np.zeros((10, m, n)), axis=(0, 1)) expect(func, np.zeros((10, n, m)), axis=(0, 2)) expect(func, np.zeros((m, 10, n)), axis=0) expect(func, np.zeros((10, m, n)), axis=1) expect(func, np.zeros((10, n, m)), axis=2) # np.maximum is just an arbitrary ufunc with no reduction identity assert_equal(np.maximum.identity, None) t(ok, np.maximum.reduce, 30, 30) t(ok, np.maximum.reduce, 0, 30) t(err, np.maximum.reduce, 30, 0) t(err, np.maximum.reduce, 0, 0) err(np.maximum.reduce, []) np.maximum.reduce(np.zeros((0, 0)), axis=()) # all of the combinations are fine for a reduction that has an # identity t(ok, np.add.reduce, 30, 30) t(ok, np.add.reduce, 0, 30) t(ok, np.add.reduce, 30, 0) t(ok, np.add.reduce, 0, 0) np.add.reduce([]) np.add.reduce(np.zeros((0, 0)), axis=()) # OTOH, accumulate always makes sense for any combination of n and m, # because it maps an m-element array to an m-element array. These # tests are simpler because accumulate doesn't accept multiple axes. for uf in (np.maximum, np.add): uf.accumulate(np.zeros((30, 0)), axis=0) uf.accumulate(np.zeros((0, 30)), axis=0) uf.accumulate(np.zeros((30, 30)), axis=0) uf.accumulate(np.zeros((0, 0)), axis=0) def test_safe_casting(self): # In old versions of numpy, in-place operations used the 'unsafe' # casting rules. In versions >= 1.10, 'same_kind' is the # default and an exception is raised instead of a warning. # when 'same_kind' is not satisfied. a = np.array([1, 2, 3], dtype=int) # Non-in-place addition is fine assert_array_equal(assert_no_warnings(np.add, a, 1.1), [2.1, 3.1, 4.1]) assert_raises(TypeError, np.add, a, 1.1, out=a) def add_inplace(a, b): a += b assert_raises(TypeError, add_inplace, a, 1.1) # Make sure that explicitly overriding the exception is allowed: assert_no_warnings(np.add, a, 1.1, out=a, casting="unsafe") assert_array_equal(a, [2, 3, 4]) def test_ufunc_custom_out(self): # Test ufunc with built in input types and custom output type a = np.array([0, 1, 2], dtype='i8') b = np.array([0, 1, 2], dtype='i8') c = np.empty(3, dtype=_rational_tests.rational) # Output must be specified so numpy knows what # ufunc signature to look for result = _rational_tests.test_add(a, b, c) target = np.array([0, 2, 4], dtype=_rational_tests.rational) assert_equal(result, target) # The new resolution means that we can (usually) find custom loops # as long as they match exactly: result = _rational_tests.test_add(a, b) assert_equal(result, target) # This works even more generally, so long the default common-dtype # promoter works out: result = _rational_tests.test_add(a, b.astype(np.uint16), out=c) assert_equal(result, target) # But, it can be fooled, e.g. (use scalars, which forces legacy # type resolution to kick in, which then fails): with assert_raises(TypeError): _rational_tests.test_add(a, np.uint16(2)) def test_operand_flags(self): a = np.arange(16, dtype='l').reshape(4, 4) b = np.arange(9, dtype='l').reshape(3, 3) opflag_tests.inplace_add(a[:-1, :-1], b) assert_equal(a, np.array([[0, 2, 4, 3], [7, 9, 11, 7], [14, 16, 18, 11], [12, 13, 14, 15]], dtype='l')) a = np.array(0) opflag_tests.inplace_add(a, 3) assert_equal(a, 3) opflag_tests.inplace_add(a, [3, 4]) assert_equal(a, 10) def test_struct_ufunc(self): import numpy.core._struct_ufunc_tests as struct_ufunc a = np.array([(1, 2, 3)], dtype='u8,u8,u8') b = np.array([(1, 2, 3)], dtype='u8,u8,u8') result = struct_ufunc.add_triplet(a, b) assert_equal(result, np.array([(2, 4, 6)], dtype='u8,u8,u8')) assert_raises(RuntimeError, struct_ufunc.register_fail) def test_custom_ufunc(self): a = np.array( [_rational_tests.rational(1, 2), _rational_tests.rational(1, 3), _rational_tests.rational(1, 4)], dtype=_rational_tests.rational) b = np.array( [_rational_tests.rational(1, 2), _rational_tests.rational(1, 3), _rational_tests.rational(1, 4)], dtype=_rational_tests.rational) result = _rational_tests.test_add_rationals(a, b) expected = np.array( [_rational_tests.rational(1), _rational_tests.rational(2, 3), _rational_tests.rational(1, 2)], dtype=_rational_tests.rational) assert_equal(result, expected) def test_custom_ufunc_forced_sig(self): # gh-9351 - looking for a non-first userloop would previously hang with assert_raises(TypeError): np.multiply(_rational_tests.rational(1), 1, signature=(_rational_tests.rational, int, None)) def test_custom_array_like(self): class MyThing: __array_priority__ = 1000 rmul_count = 0 getitem_count = 0 def __init__(self, shape): self.shape = shape def __len__(self): return self.shape[0] def __getitem__(self, i): MyThing.getitem_count += 1 if not isinstance(i, tuple): i = (i,) if len(i) > self.ndim: raise IndexError("boo") return MyThing(self.shape[len(i):]) def __rmul__(self, other): MyThing.rmul_count += 1 return self np.float64(5)*MyThing((3, 3)) assert_(MyThing.rmul_count == 1, MyThing.rmul_count) assert_(MyThing.getitem_count <= 2, MyThing.getitem_count) def test_inplace_fancy_indexing(self): a = np.arange(10) np.add.at(a, [2, 5, 2], 1) assert_equal(a, [0, 1, 4, 3, 4, 6, 6, 7, 8, 9]) a = np.arange(10) b = np.array([100, 100, 100]) np.add.at(a, [2, 5, 2], b) assert_equal(a, [0, 1, 202, 3, 4, 105, 6, 7, 8, 9]) a = np.arange(9).reshape(3, 3) b = np.array([[100, 100, 100], [200, 200, 200], [300, 300, 300]]) np.add.at(a, (slice(None), [1, 2, 1]), b) assert_equal(a, [[0, 201, 102], [3, 404, 205], [6, 607, 308]]) a = np.arange(27).reshape(3, 3, 3) b = np.array([100, 200, 300]) np.add.at(a, (slice(None), slice(None), [1, 2, 1]), b) assert_equal(a, [[[0, 401, 202], [3, 404, 205], [6, 407, 208]], [[9, 410, 211], [12, 413, 214], [15, 416, 217]], [[18, 419, 220], [21, 422, 223], [24, 425, 226]]]) a = np.arange(9).reshape(3, 3) b = np.array([[100, 100, 100], [200, 200, 200], [300, 300, 300]]) np.add.at(a, ([1, 2, 1], slice(None)), b) assert_equal(a, [[0, 1, 2], [403, 404, 405], [206, 207, 208]]) a = np.arange(27).reshape(3, 3, 3) b = np.array([100, 200, 300]) np.add.at(a, (slice(None), [1, 2, 1], slice(None)), b) assert_equal(a, [[[0, 1, 2], [203, 404, 605], [106, 207, 308]], [[9, 10, 11], [212, 413, 614], [115, 216, 317]], [[18, 19, 20], [221, 422, 623], [124, 225, 326]]]) a = np.arange(9).reshape(3, 3) b = np.array([100, 200, 300]) np.add.at(a, (0, [1, 2, 1]), b) assert_equal(a, [[0, 401, 202], [3, 4, 5], [6, 7, 8]]) a = np.arange(27).reshape(3, 3, 3) b = np.array([100, 200, 300]) np.add.at(a, ([1, 2, 1], 0, slice(None)), b) assert_equal(a, [[[0, 1, 2], [3, 4, 5], [6, 7, 8]], [[209, 410, 611], [12, 13, 14], [15, 16, 17]], [[118, 219, 320], [21, 22, 23], [24, 25, 26]]]) a = np.arange(27).reshape(3, 3, 3) b = np.array([100, 200, 300]) np.add.at(a, (slice(None), slice(None), slice(None)), b) assert_equal(a, [[[100, 201, 302], [103, 204, 305], [106, 207, 308]], [[109, 210, 311], [112, 213, 314], [115, 216, 317]], [[118, 219, 320], [121, 222, 323], [124, 225, 326]]]) a = np.arange(10) np.negative.at(a, [2, 5, 2]) assert_equal(a, [0, 1, 2, 3, 4, -5, 6, 7, 8, 9]) # Test 0-dim array a = np.array(0) np.add.at(a, (), 1) assert_equal(a, 1) assert_raises(IndexError, np.add.at, a, 0, 1) assert_raises(IndexError, np.add.at, a, [], 1) # Test mixed dtypes a = np.arange(10) np.power.at(a, [1, 2, 3, 2], 3.5) assert_equal(a, np.array([0, 1, 4414, 46, 4, 5, 6, 7, 8, 9])) # Test boolean indexing and boolean ufuncs a = np.arange(10) index = a % 2 == 0 np.equal.at(a, index, [0, 2, 4, 6, 8]) assert_equal(a, [1, 1, 1, 3, 1, 5, 1, 7, 1, 9]) # Test unary operator a = np.arange(10, dtype='u4') np.invert.at(a, [2, 5, 2]) assert_equal(a, [0, 1, 2, 3, 4, 5 ^ 0xffffffff, 6, 7, 8, 9]) # Test empty subspace orig = np.arange(4) a = orig[:, None][:, 0:0] np.add.at(a, [0, 1], 3) assert_array_equal(orig, np.arange(4)) # Test with swapped byte order index = np.array([1, 2, 1], np.dtype('i').newbyteorder()) values = np.array([1, 2, 3, 4], np.dtype('f').newbyteorder()) np.add.at(values, index, 3) assert_array_equal(values, [1, 8, 6, 4]) # Test exception thrown values = np.array(['a', 1], dtype=object) assert_raises(TypeError, np.add.at, values, [0, 1], 1) assert_array_equal(values, np.array(['a', 1], dtype=object)) # Test multiple output ufuncs raise error, gh-5665 assert_raises(ValueError, np.modf.at, np.arange(10), [1]) # Test maximum a = np.array([1, 2, 3]) np.maximum.at(a, [0], 0) assert_equal(np.array([1, 2, 3]), a) def test_at_not_none_signature(self): # Test ufuncs with non-trivial signature raise a TypeError a = np.ones((2, 2, 2)) b = np.ones((1, 2, 2)) assert_raises(TypeError, np.matmul.at, a, [0], b) a = np.array([[[1, 2], [3, 4]]]) assert_raises(TypeError, np.linalg._umath_linalg.det.at, a, [0]) def test_reduce_arguments(self): f = np.add.reduce d = np.ones((5,2), dtype=int) o = np.ones((2,), dtype=d.dtype) r = o * 5 assert_equal(f(d), r) # a, axis=0, dtype=None, out=None, keepdims=False assert_equal(f(d, axis=0), r) assert_equal(f(d, 0), r) assert_equal(f(d, 0, dtype=None), r) assert_equal(f(d, 0, dtype='i'), r) assert_equal(f(d, 0, 'i'), r) assert_equal(f(d, 0, None), r) assert_equal(f(d, 0, None, out=None), r) assert_equal(f(d, 0, None, out=o), r) assert_equal(f(d, 0, None, o), r) assert_equal(f(d, 0, None, None), r) assert_equal(f(d, 0, None, None, keepdims=False), r) assert_equal(f(d, 0, None, None, True), r.reshape((1,) + r.shape)) assert_equal(f(d, 0, None, None, False, 0), r) assert_equal(f(d, 0, None, None, False, initial=0), r) assert_equal(f(d, 0, None, None, False, 0, True), r) assert_equal(f(d, 0, None, None, False, 0, where=True), r) # multiple keywords assert_equal(f(d, axis=0, dtype=None, out=None, keepdims=False), r) assert_equal(f(d, 0, dtype=None, out=None, keepdims=False), r) assert_equal(f(d, 0, None, out=None, keepdims=False), r) assert_equal(f(d, 0, None, out=None, keepdims=False, initial=0, where=True), r) # too little assert_raises(TypeError, f) # too much assert_raises(TypeError, f, d, 0, None, None, False, 0, True, 1) # invalid axis assert_raises(TypeError, f, d, "invalid") assert_raises(TypeError, f, d, axis="invalid") assert_raises(TypeError, f, d, axis="invalid", dtype=None, keepdims=True) # invalid dtype assert_raises(TypeError, f, d, 0, "invalid") assert_raises(TypeError, f, d, dtype="invalid") assert_raises(TypeError, f, d, dtype="invalid", out=None) # invalid out assert_raises(TypeError, f, d, 0, None, "invalid") assert_raises(TypeError, f, d, out="invalid") assert_raises(TypeError, f, d, out="invalid", dtype=None) # keepdims boolean, no invalid value # assert_raises(TypeError, f, d, 0, None, None, "invalid") # assert_raises(TypeError, f, d, keepdims="invalid", axis=0, dtype=None) # invalid mix assert_raises(TypeError, f, d, 0, keepdims="invalid", dtype="invalid", out=None) # invalid keyword assert_raises(TypeError, f, d, axis=0, dtype=None, invalid=0) assert_raises(TypeError, f, d, invalid=0) assert_raises(TypeError, f, d, 0, keepdims=True, invalid="invalid", out=None) assert_raises(TypeError, f, d, axis=0, dtype=None, keepdims=True, out=None, invalid=0) assert_raises(TypeError, f, d, axis=0, dtype=None, out=None, invalid=0) def test_structured_equal(self): # https://github.com/numpy/numpy/issues/4855 class MyA(np.ndarray): def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): return getattr(ufunc, method)(*(input.view(np.ndarray) for input in inputs), **kwargs) a = np.arange(12.).reshape(4,3) ra = a.view(dtype=('f8,f8,f8')).squeeze() mra = ra.view(MyA) target = np.array([ True, False, False, False], dtype=bool) assert_equal(np.all(target == (mra == ra[0])), True) def test_scalar_equal(self): # Scalar comparisons should always work, without deprecation warnings. # even when the ufunc fails. a = np.array(0.) b = np.array('a') assert_(a != b) assert_(b != a) assert_(not (a == b)) assert_(not (b == a)) def test_NotImplemented_not_returned(self): # See gh-5964 and gh-2091. Some of these functions are not operator # related and were fixed for other reasons in the past. binary_funcs = [ np.power, np.add, np.subtract, np.multiply, np.divide, np.true_divide, np.floor_divide, np.bitwise_and, np.bitwise_or, np.bitwise_xor, np.left_shift, np.right_shift, np.fmax, np.fmin, np.fmod, np.hypot, np.logaddexp, np.logaddexp2, np.maximum, np.minimum, np.mod, np.greater, np.greater_equal, np.less, np.less_equal, np.equal, np.not_equal] a = np.array('1') b = 1 c = np.array([1., 2.]) for f in binary_funcs: assert_raises(TypeError, f, a, b) assert_raises(TypeError, f, c, a) @pytest.mark.parametrize("ufunc", [np.logical_and, np.logical_or]) # logical_xor object loop is bad @pytest.mark.parametrize("signature", [(None, None, object), (object, None, None), (None, object, None)]) def test_logical_ufuncs_object_signatures(self, ufunc, signature): a = np.array([True, None, False], dtype=object) res = ufunc(a, a, signature=signature) assert res.dtype == object @pytest.mark.parametrize("ufunc", [np.logical_and, np.logical_or, np.logical_xor]) @pytest.mark.parametrize("signature", [(bool, None, object), (object, None, bool), (None, object, bool)]) def test_logical_ufuncs_mixed_object_signatures(self, ufunc, signature): # Most mixed signatures fail (except those with bool out, e.g. `OO->?`) a = np.array([True, None, False]) with pytest.raises(TypeError): ufunc(a, a, signature=signature) @pytest.mark.parametrize("ufunc", [np.logical_and, np.logical_or, np.logical_xor]) def test_logical_ufuncs_support_anything(self, ufunc): # The logical ufuncs support even input that can't be promoted: a = np.array(b'1', dtype="V3") c = np.array([1., 2.]) assert_array_equal(ufunc(a, c), ufunc([True, True], True)) assert ufunc.reduce(a) == True # check that the output has no effect: out = np.zeros(2, dtype=np.int32) expected = ufunc([True, True], True).astype(out.dtype) assert_array_equal(ufunc(a, c, out=out), expected) out = np.zeros((), dtype=np.int32) assert ufunc.reduce(a, out=out) == True # Last check, test reduction when out and a match (the complexity here # is that the "i,i->?" may seem right, but should not match. a = np.array([3], dtype="i") out = np.zeros((), dtype=a.dtype) assert ufunc.reduce(a, out=out) == 1 @pytest.mark.parametrize("ufunc", [np.logical_and, np.logical_or, np.logical_xor]) def test_logical_ufuncs_reject_string(self, ufunc): """ Logical ufuncs are normally well defined by working with the boolean equivalent, i.e. casting all inputs to bools should work. However, casting strings to bools is *currently* weird, because it actually uses `bool(int(str))`. Thus we explicitly reject strings. This test should succeed (and can probably just be removed) as soon as string to bool casts are well defined in NumPy. """ with pytest.raises(TypeError, match="contain a loop with signature"): ufunc(["1"], ["3"]) with pytest.raises(TypeError, match="contain a loop with signature"): ufunc.reduce(["1", "2", "0"]) @pytest.mark.parametrize("ufunc", [np.logical_and, np.logical_or, np.logical_xor]) def test_logical_ufuncs_out_cast_check(self, ufunc): a = np.array('1') c = np.array([1., 2.]) out = a.copy() with pytest.raises(TypeError): # It would be safe, but not equiv casting: ufunc(a, c, out=out, casting="equiv") def test_reducelike_byteorder_resolution(self): # See gh-20699, byte-order changes need some extra care in the type # resolution to make the following succeed: arr_be = np.arange(10, dtype=">i8") arr_le = np.arange(10, dtype="<i8") assert np.add.reduce(arr_be) == np.add.reduce(arr_le) assert_array_equal(np.add.accumulate(arr_be), np.add.accumulate(arr_le)) assert_array_equal( np.add.reduceat(arr_be, [1]), np.add.reduceat(arr_le, [1])) def test_reducelike_out_promotes(self): # Check that the out argument to reductions is considered for # promotion. See also gh-20455. # Note that these paths could prefer `initial=` in the future and # do not up-cast to the default integer for add and prod arr = np.ones(1000, dtype=np.uint8) out = np.zeros((), dtype=np.uint16) assert np.add.reduce(arr, out=out) == 1000 arr[:10] = 2 assert np.multiply.reduce(arr, out=out) == 2**10 # For legacy dtypes, the signature currently has to be forced if `out=` # is passed. The two paths below should differ, without `dtype=` the # expected result should be: `np.prod(arr.astype("f8")).astype("f4")`! arr = np.full(5, 2**25-1, dtype=np.int64) # float32 and int64 promote to float64: res = np.zeros((), dtype=np.float32) # If `dtype=` is passed, the calculation is forced to float32: single_res = np.zeros((), dtype=np.float32) np.multiply.reduce(arr, out=single_res, dtype=np.float32) assert single_res != res def test_reducelike_output_needs_identical_cast(self): # Checks the case where the we have a simple byte-swap works, maily # tests that this is not rejected directly. # (interesting because we require descriptor identity in reducelikes). arr = np.ones(20, dtype="f8") out = np.empty((), dtype=arr.dtype.newbyteorder()) expected = np.add.reduce(arr) np.add.reduce(arr, out=out) assert_array_equal(expected, out) # Check reduceat: out = np.empty(2, dtype=arr.dtype.newbyteorder()) expected = np.add.reduceat(arr, [0, 1]) np.add.reduceat(arr, [0, 1], out=out) assert_array_equal(expected, out) # And accumulate: out = np.empty(arr.shape, dtype=arr.dtype.newbyteorder()) expected = np.add.accumulate(arr) np.add.accumulate(arr, out=out) assert_array_equal(expected, out) def test_reduce_noncontig_output(self): # Check that reduction deals with non-contiguous output arrays # appropriately. # # gh-8036 x = np.arange(7*13*8, dtype=np.int16).reshape(7, 13, 8) x = x[4:6,1:11:6,1:5].transpose(1, 2, 0) y_base = np.arange(4*4, dtype=np.int16).reshape(4, 4) y = y_base[::2,:] y_base_copy = y_base.copy() r0 = np.add.reduce(x, out=y.copy(), axis=2) r1 = np.add.reduce(x, out=y, axis=2) # The results should match, and y_base shouldn't get clobbered assert_equal(r0, r1) assert_equal(y_base[1,:], y_base_copy[1,:]) assert_equal(y_base[3,:], y_base_copy[3,:]) @pytest.mark.parametrize("with_cast", [True, False]) def test_reduceat_and_accumulate_out_shape_mismatch(self, with_cast): # Should raise an error mentioning "shape" or "size" arr = np.arange(5) out = np.arange(3) # definitely wrong shape if with_cast: # If a cast is necessary on the output, we can be sure to use # the generic NpyIter (non-fast) path. out = out.astype(np.float64) with pytest.raises(ValueError, match="(shape|size)"): np.add.reduceat(arr, [0, 3], out=out) with pytest.raises(ValueError, match="(shape|size)"): np.add.accumulate(arr, out=out) @pytest.mark.parametrize('out_shape', [(), (1,), (3,), (1, 1), (1, 3), (4, 3)]) @pytest.mark.parametrize('keepdims', [True, False]) @pytest.mark.parametrize('f_reduce', [np.add.reduce, np.minimum.reduce]) def test_reduce_wrong_dimension_output(self, f_reduce, keepdims, out_shape): # Test that we're not incorrectly broadcasting dimensions. # See gh-15144 (failed for np.add.reduce previously). a = np.arange(12.).reshape(4, 3) out = np.empty(out_shape, a.dtype) correct_out = f_reduce(a, axis=0, keepdims=keepdims) if out_shape != correct_out.shape: with assert_raises(ValueError): f_reduce(a, axis=0, out=out, keepdims=keepdims) else: check = f_reduce(a, axis=0, out=out, keepdims=keepdims) assert_(check is out) assert_array_equal(check, correct_out) def test_reduce_output_does_not_broadcast_input(self): # Test that the output shape cannot broadcast an input dimension # (it never can add dimensions, but it might expand an existing one) a = np.ones((1, 10)) out_correct = (np.empty((1, 1))) out_incorrect = np.empty((3, 1)) np.add.reduce(a, axis=-1, out=out_correct, keepdims=True) np.add.reduce(a, axis=-1, out=out_correct[:, 0], keepdims=False) with assert_raises(ValueError): np.add.reduce(a, axis=-1, out=out_incorrect, keepdims=True) with assert_raises(ValueError): np.add.reduce(a, axis=-1, out=out_incorrect[:, 0], keepdims=False) def test_reduce_output_subclass_ok(self): class MyArr(np.ndarray): pass out = np.empty(()) np.add.reduce(np.ones(5), out=out) # no subclass, all fine out = out.view(MyArr) assert np.add.reduce(np.ones(5), out=out) is out assert type(np.add.reduce(out)) is MyArr def test_no_doc_string(self): # gh-9337 assert_('\n' not in umt.inner1d_no_doc.__doc__) def test_invalid_args(self): # gh-7961 exc = pytest.raises(TypeError, np.sqrt, None) # minimally check the exception text assert exc.match('loop of ufunc does not support') @pytest.mark.parametrize('nat', [np.datetime64('nat'), np.timedelta64('nat')]) def test_nat_is_not_finite(self, nat): try: assert not np.isfinite(nat) except TypeError: pass # ok, just not implemented @pytest.mark.parametrize('nat', [np.datetime64('nat'), np.timedelta64('nat')]) def test_nat_is_nan(self, nat): try: assert np.isnan(nat) except TypeError: pass # ok, just not implemented @pytest.mark.parametrize('nat', [np.datetime64('nat'), np.timedelta64('nat')]) def test_nat_is_not_inf(self, nat): try: assert not np.isinf(nat) except TypeError: pass # ok, just not implemented @pytest.mark.parametrize('ufunc', [getattr(np, x) for x in dir(np) if isinstance(getattr(np, x), np.ufunc)]) def test_ufunc_types(ufunc): ''' Check all ufuncs that the correct type is returned. Avoid object and boolean types since many operations are not defined for for them. Choose the shape so even dot and matmul will succeed ''' for typ in ufunc.types: # types is a list of strings like ii->i if 'O' in typ or '?' in typ: continue inp, out = typ.split('->') args = [np.ones((3, 3), t) for t in inp] with warnings.catch_warnings(record=True): warnings.filterwarnings("always") res = ufunc(*args) if isinstance(res, tuple): outs = tuple(out) assert len(res) == len(outs) for r, t in zip(res, outs): assert r.dtype == np.dtype(t) else: assert res.dtype == np.dtype(out) @pytest.mark.parametrize('ufunc', [getattr(np, x) for x in dir(np) if isinstance(getattr(np, x), np.ufunc)]) def test_ufunc_noncontiguous(ufunc): ''' Check that contiguous and non-contiguous calls to ufuncs have the same results for values in range(9) ''' for typ in ufunc.types: # types is a list of strings like ii->i if any(set('O?mM') & set(typ)): # bool, object, datetime are too irregular for this simple test continue inp, out = typ.split('->') args_c = [np.empty(6, t) for t in inp] args_n = [np.empty(18, t)[::3] for t in inp] for a in args_c: a.flat = range(1,7) for a in args_n: a.flat = range(1,7) with warnings.catch_warnings(record=True): warnings.filterwarnings("always") res_c = ufunc(*args_c) res_n = ufunc(*args_n) if len(out) == 1: res_c = (res_c,) res_n = (res_n,) for c_ar, n_ar in zip(res_c, res_n): dt = c_ar.dtype if np.issubdtype(dt, np.floating): # for floating point results allow a small fuss in comparisons # since different algorithms (libm vs. intrinsics) can be used # for different input strides res_eps = np.finfo(dt).eps tol = 2*res_eps assert_allclose(res_c, res_n, atol=tol, rtol=tol) else: assert_equal(c_ar, n_ar) @pytest.mark.parametrize('ufunc', [np.sign, np.equal]) def test_ufunc_warn_with_nan(ufunc): # issue gh-15127 # test that calling certain ufuncs with a non-standard `nan` value does not # emit a warning # `b` holds a 64 bit signaling nan: the most significant bit of the # significand is zero. b = np.array([0x7ff0000000000001], 'i8').view('f8') assert np.isnan(b) if ufunc.nin == 1: ufunc(b) elif ufunc.nin == 2: ufunc(b, b.copy()) else: raise ValueError('ufunc with more than 2 inputs') @pytest.mark.skipif(not HAS_REFCOUNT, reason="Python lacks refcounts") def test_ufunc_casterrors(): # Tests that casting errors are correctly reported and buffers are # cleared. # The following array can be added to itself as an object array, but # the result cannot be cast to an integer output: value = 123 # relies on python cache (leak-check will still find it) arr = np.array([value] * int(np.BUFSIZE * 1.5) + ["string"] + [value] * int(1.5 * np.BUFSIZE), dtype=object) out = np.ones(len(arr), dtype=np.intp) count = sys.getrefcount(value) with pytest.raises(ValueError): # Output casting failure: np.add(arr, arr, out=out, casting="unsafe") assert count == sys.getrefcount(value) # output is unchanged after the error, this shows that the iteration # was aborted (this is not necessarily defined behaviour) assert out[-1] == 1 with pytest.raises(ValueError): # Input casting failure: np.add(arr, arr, out=out, dtype=np.intp, casting="unsafe") assert count == sys.getrefcount(value) # output is unchanged after the error, this shows that the iteration # was aborted (this is not necessarily defined behaviour) assert out[-1] == 1 def test_trivial_loop_invalid_cast(): # This tests the fast-path "invalid cast", see gh-19904. with pytest.raises(TypeError, match="cast ufunc 'add' input 0"): # the void dtype definitely cannot cast to double: np.add(np.array(1, "i,i"), 3, signature="dd->d") @pytest.mark.skipif(not HAS_REFCOUNT, reason="Python lacks refcounts") @pytest.mark.parametrize("offset", [0, np.BUFSIZE//2, int(1.5*np.BUFSIZE)]) def test_reduce_casterrors(offset): # Test reporting of casting errors in reductions, we test various # offsets to where the casting error will occur, since these may occur # at different places during the reduction procedure. For example # the first item may be special. value = 123 # relies on python cache (leak-check will still find it) arr = np.array([value] * offset + ["string"] + [value] * int(1.5 * np.BUFSIZE), dtype=object) out = np.array(-1, dtype=np.intp) count = sys.getrefcount(value) with pytest.raises(ValueError, match="invalid literal"): # This is an unsafe cast, but we currently always allow that. # Note that the double loop is picked, but the cast fails. np.add.reduce(arr, dtype=np.intp, out=out) assert count == sys.getrefcount(value) # If an error occurred during casting, the operation is done at most until # the error occurs (the result of which would be `value * offset`) and -1 # if the error happened immediately. # This does not define behaviour, the output is invalid and thus undefined assert out[()] < value * offset @pytest.mark.parametrize("method", [np.add.accumulate, np.add.reduce, pytest.param(lambda x: np.add.reduceat(x, [0]), id="reduceat"), pytest.param(lambda x: np.log.at(x, [2]), id="at")]) def test_ufunc_methods_floaterrors(method): # adding inf and -inf (or log(-inf) creates an invalid float and warns arr = np.array([np.inf, 0, -np.inf]) with np.errstate(all="warn"): with pytest.warns(RuntimeWarning, match="invalid value"): method(arr) arr = np.array([np.inf, 0, -np.inf]) with np.errstate(all="raise"): with pytest.raises(FloatingPointError): method(arr) def _check_neg_zero(value): if value != 0.0: return False if not np.signbit(value.real): return False if value.dtype.kind == "c": return np.signbit(value.imag) return True @pytest.mark.parametrize("dtype", np.typecodes["AllFloat"]) def test_addition_negative_zero(dtype): dtype = np.dtype(dtype) if dtype.kind == "c": neg_zero = dtype.type(complex(-0.0, -0.0)) else: neg_zero = dtype.type(-0.0) arr = np.array(neg_zero) arr2 = np.array(neg_zero) assert _check_neg_zero(arr + arr2) # In-place ops may end up on a different path (reduce path) see gh-21211 arr += arr2 assert _check_neg_zero(arr) @pytest.mark.parametrize("dtype", np.typecodes["AllFloat"]) @pytest.mark.parametrize("use_initial", [True, False]) def test_addition_reduce_negative_zero(dtype, use_initial): dtype = np.dtype(dtype) if dtype.kind == "c": neg_zero = dtype.type(complex(-0.0, -0.0)) else: neg_zero = dtype.type(-0.0) kwargs = {} if use_initial: kwargs["initial"] = neg_zero else: pytest.xfail("-0. propagation in sum currently requires initial") # Test various length, in case SIMD paths or chunking play a role. # 150 extends beyond the pairwise blocksize; probably not important. for i in range(0, 150): arr = np.array([neg_zero] * i, dtype=dtype) res = np.sum(arr, **kwargs) if i > 0 or use_initial: assert _check_neg_zero(res) else: # `sum([])` should probably be 0.0 and not -0.0 like `sum([-0.0])` assert not np.signbit(res.real) assert not np.signbit(res.imag)
5e7189bd3c62d63d5b04c748cb59bb1f8a85acb2
5ea9a3185b8abbf536600bde73ffa5293c76913d
/django_storage/urls.py
5ca910b69f1f7e96a9de80008f0cb58e5e6322ad
[]
no_license
antikytheraton/django_storage
fd3fcaaeb93d236e2cc626e2326a8909f8fad488
7c3f8258f3a558ab99506b160659a824053db700
refs/heads/master
2021-07-08T19:17:46.690089
2017-10-07T02:16:20
2017-10-07T02:16:20
106,065,308
0
0
null
null
null
null
UTF-8
Python
false
false
1,076
py
"""django_storage URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from django.views.generic import TemplateView # -------------------------------------------- from home.views import DocumentCreateView urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^$', TemplateView.as_view(template_name='home.html'), name='home'), url(r'^upload/$', DocumentCreateView.as_view(template_name='form.html'), name='upload') ]
83b555f16126a27acc97627eb10cd3415912555f
c124cd627d1cd2ecc2056a932db4c5c3203943f2
/data/atramData/sites/umms/components/umms_appoint/recruitment_section/items/identifycandidate.py
15b936079177324e9747e3b5eadd4ee90c2449b5
[]
no_license
longooglite/mps
8fb2093b6a9f483a2ce4543949f7cbf0b280a1f1
fd8c0d1491b80074fdf5a8c923d50e55a1991ad0
refs/heads/master
2021-01-10T08:17:15.852252
2016-02-29T21:07:04
2016-02-29T21:07:04
52,824,830
0
0
null
null
null
null
UTF-8
Python
false
false
1,858
py
# [Copyright] # SmartPath v1.0 # Copyright 2014-2015 Mountain Pass Solutions, Inc. # This unpublished material is proprietary to Mountain Pass Solutions, Inc. # [End Copyright] identifycandidate = { "code": "identifycandidate", "descr": "Identify Candidate", "header": "Identify Candidate", "componentType": "Task", "affordanceType":"Item", "optional": False, "enabled": True, "logEnabled": True, "freezable": True, "overviewOnly": False, "accessPermissions": ["dept_task"], "viewPermissions": ["dept_task","ofa_task","mss_task"], "blockers": ["jobposting"], "statusMsg": "Candidate Identified", "successMsg":"Candidate information saved", "className": "IdentifyCandidate", "config": { "dashboardEvents": [{ "code":"rfpapproved", "eventType":"remove", },{ "code":"jopposted", "eventType":"remove", },{ "code":"readyforjobposting", "eventType":"remove", }], "prompts": [ { "code": "username", "label": "Username", "enabled": False, "required": False, "ldapsearch": False, }, { "code": "first_name", "label": "First Name", "enabled": True, "required": True, "ldapfield": "givenName", }, { "code": "middle_name", "label": "Middle Name", "enabled": True, "required": False, }, { "code": "last_name", "label": "Last Name", "enabled": True, "required": True, "ldapfield": "sn", }, { "code": "suffix", "label": "Suffix", "enabled": True, "required": False, }, { "code": "email", "label": "Email", "enabled": True, "required": False, "ldapfield": "mail", }, { "code": "employee_nbr", "label": "Employee Nbr", "enabled": False, "required": False, }, ], "activityLog": { "enabled": True, "activityLogText": "Candidate Identified", }, }, }
870eacbbe2dc97704de593953b693029ce772637
c220d55a0a5c7597fe7e86a3dfebdd66695a3b2f
/Python/text_algnment.py
556a0ff4be82b66fddeb3ca1b3909fe9021529af
[]
no_license
eLtronicsVilla/Hackerrank-Problems-Solutions
356677a2edce6f5d3f57e5f32a8be058515779bf
a24c78f99f10fb8dca69e0e0d6c560d7c0215a29
refs/heads/master
2020-05-21T18:15:21.893538
2019-05-18T07:54:41
2019-05-18T07:54:41
186,129,319
0
0
null
2019-05-18T07:54:42
2019-05-11T12:19:33
null
UTF-8
Python
false
false
908
py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sat May 18 12:29:44 2019 @author: brgupta """ # Problem statement # https://www.hackerrank.com/challenges/text-alignment/problem #Replace all ______ with rjust, ljust or center. thickness = int(input()) #This must be an odd number c = 'H' #Top Cone for i in range(thickness): print((c*i).rjust(thickness-1)+c+(c*i).ljust(thickness-1)) #Top Pillars for i in range(thickness+1): print((c*thickness).center(thickness*2)+(c*thickness).center(thickness*6)) #Middle Belt for i in range((thickness+1)//2): print((c*thickness*5).center(thickness*6)) #Bottom Pillars for i in range(thickness+1): print((c*thickness).center(thickness*2)+(c*thickness).center(thickness*6)) #Bottom Cone for i in range(thickness): print(((c*(thickness-i-1)).rjust(thickness)+c+(c*(thickness-i-1)).ljust(thickness)).rjust(thickness*6))
c46cfb66f6bcb2d0f920aa611e165abe7fe4d9be
b2e278f6d606ec0d3e6fa3e15be2f9ed35745c1e
/ncolony/beatcheck.py
7461221c187dfa2df8febf60a32d24ac340ac807
[ "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
kurtbrose/ncolony
deeaf2c1947aa11fcdad00f9071bc3e8067f026e
bebbc612866a8bf405dda2ec94ce60fd61b4f3c9
refs/heads/master
2023-08-18T08:56:58.777571
2017-09-19T03:43:27
2017-09-19T03:43:27
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,824
py
# Copyright (c) Moshe Zadka # See LICENSE for details. """ncolony.beatcheck ==================== Check heartbeats of processes that should beat. Usually used as $ twistd -n ncolony_beatcheck --config config --messages messages It will watch the configurations, and send a restart message for any process that does not beat within its heartbeat. Processes are encouraged to try and beat about 3-4 times faster than the minimum, so that they can miss one beat, and account for slight timer inaccuracies, and still not be considered unhealthy. """ import functools import json import time from twisted.python import filepath, usage from twisted.application import internet as tainternet from ncolony import ctllib from ncolony.client import heart def check(path, start, now): """check which processes need to be restarted :params path: a twisted.python.filepath.FilePath with configurations :params start: when the checker started running :params now: current time :returns: list of strings """ return [child.basename() for child in path.children() if _isbad(child, start, now)] def _isbad(child, start, now): content = child.getContent() parsed = json.loads(content) params = parsed.get('ncolony.beatcheck') if params is None: return False period = params['period'] grace = params['grace'] mtime = max(child.getModificationTime(), start) if mtime + period*grace >= now: return False status = params['status'] statusPath = child.clonePath(status) if not statusPath.exists(): return True if statusPath.isdir(): statusPath = statusPath.child(child.basename()) statusMtime = statusPath.getModificationTime() return (statusMtime + period) < now def run(restarter, checker, timer): """Run restarter on the checker's output :params restarter: something to run on the output of the checker :params checker: a function expected to get one argument (current time) and return a list of stale names :params timer: a function of zero arguments, intended to return current time :returns: None """ for bad in checker(timer()): restarter(bad) def parseConfig(opt): """Parse configuration :params opt: dict-like object with config and messages keys :returns: restarter, path """ places = ctllib.Places(config=opt['config'], messages=opt['messages']) restarter = functools.partial(ctllib.restart, places) path = filepath.FilePath(opt['config']) return restarter, path def makeService(opt): """Make a service :params opt: dictionary-like object with 'freq', 'config' and 'messages' :returns: twisted.application.internet.TimerService that at opt['freq'] checks for stale processes in opt['config'], and sends restart messages through opt['messages'] """ restarter, path = parseConfig(opt) now = time.time() checker = functools.partial(check, path, now) beatcheck = tainternet.TimerService(opt['freq'], run, restarter, checker, time.time) beatcheck.setName('beatcheck') return heart.wrapHeart(beatcheck) ## pylint: disable=too-few-public-methods class Options(usage.Options): """Options for ncolony beatcheck service""" optParameters = [ ["messages", None, None, "Directory for messages"], ["config", None, None, "Directory for configuration"], ["freq", None, 10, "Frequency of checking for updates", float], ] def postOptions(self): """Checks that required messages/config directories are present""" for param in ('messages', 'config'): if self[param] is None: raise usage.UsageError("Missing required", param) ## pylint: enable=too-few-public-methods
423f1675d5bcef619a2c564e602dc00a23745bdc
60d9f0ea7764b67b8e2f5b187f9bd98be0ddd93a
/scripts/s3_sed.py
499e01ac11207255f75bca28098b09e9e2fd744b
[ "Apache-2.0" ]
permissive
omad/dratools
252136d972a750a228c5d84c3c95293d671a3145
17d81dd5e496c5539b0613f4bf25655230bd9f4f
refs/heads/master
2023-02-03T10:36:52.677072
2023-01-19T23:01:16
2023-01-19T23:01:16
184,683,843
0
0
null
null
null
null
UTF-8
Python
false
false
1,029
py
import click from odc.aws import s3_client, s3_fetch, s3_dump from tqdm import tqdm s3 = None @click.command('s3-find') @click.option('--no-sign-request', is_flag=True, help='Do not sign AWS S3 requests') @click.argument('file_list', type=click.File('r'), nargs=1) def cli(file_list, no_sign_request=None): global s3 s3 = s3_client(aws_unsigned=no_sign_request) urls = [line.rstrip() for line in file_list.readlines()] for url in tqdm(urls): if not url: continue tqdm.write(f"Updating {url}", end='') replace_in_s3_obj(url) def replace_in_s3_obj(s3_url): try: original = s3_fetch(s3_url, s3) except ValueError as e: tqdm.write(str(e)) return contents = original.replace(b'LANDSAT_8', b'LANDSAT_7') contents = contents.replace(b'OLI', b'ETM') if original != contents: s3_dump(contents, s3_url, s3) tqdm.write('.') else: tqdm.write(' - Skipped.') if __name__ == '__main__': cli()
cae804eeca224b7c810f2ca72e04cb19244e2022
6219e6536774e8eeb4cadc4a84f6f2bea376c1b0
/scraper/storage_spiders/vietlongplazacomvn.py
beadae51c3d65808932018124f7c2fae1011fb27
[ "MIT" ]
permissive
nguyenminhthai/choinho
109d354b410b92784a9737f020894d073bea1534
d2a216fe7a5064d73cdee3e928a7beef7f511fd1
refs/heads/master
2023-05-07T16:51:46.667755
2019-10-22T07:53:41
2019-10-22T07:53:41
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,019
py
# Auto generated by generator.py. Delete this line if you make modification. from scrapy.spiders import Rule from scrapy.linkextractors import LinkExtractor XPATH = { 'name' : "//td[@id='pro-detail-col-info']/div[@id='product-detail-name']/h1", 'price' : "//div[@class='product-list-price']/p[@class='cssPrice']/font", 'category' : "//div[@id='categoryPath']/a", 'description' : "//div[@id='pro-box-2']/div[4]/div[@id='pro_content_desc']", 'images' : "//table//tr/td[@id='pro_big']/img/@src", 'canonical' : "", 'base_url' : "", 'brand' : "" } name = 'vietlongplaza.com.vn' allowed_domains = ['vietlongplaza.com.vn'] start_urls = ['http://www.vietlongplaza.com.vn/default.aspx'] tracking_url = '' sitemap_urls = [''] sitemap_rules = [('', 'parse_item')] sitemap_follow = [] rules = [ Rule(LinkExtractor(allow=['/product+-\d+/']), 'parse_item'), Rule(LinkExtractor(allow=['/category+-\d+/'], deny=['\?','Filter=']), 'parse'), #Rule(LinkExtractor(), 'parse_item_and_links'), ]
d355d73ff3bc201e202f27d27a78be42b0db7872
8941c8ca788b1a45bfad23ca26ebfa357c13f09b
/Lyceum/Mars_Sql_Alchemy/zapros4.py
1b12b239fe471f0a4b6e70c4256bc310a8a2b2bc
[]
no_license
MysteriousSonOfGod/Python-2
d1dfdf094f4a763758bfc7e1777c2cd6efbd0809
0d488906e4b5e3897da6b7cb077815740e82fd84
refs/heads/master
2023-02-05T13:38:25.673248
2020-12-22T13:54:02
2020-12-22T13:54:02
null
0
0
null
null
null
null
UTF-8
Python
false
false
329
py
from data import db_session from data import users db = input() db_session.global_init(db) session = db_session.create_session() users = session.query(users.User).filter((users.User.position.like("%chief%") | users.User.position.like("%middle%"))) for user in users: print(f'{user} {user.position}') # db/mars_explorer.db
442ee1ed35e53bdf671ddf356b0bf7274dddb5a8
1beb0d3a73a97c5367cc54d37b34a7536b975d68
/practice/morethread.py
37fdcd18909e15f831a7ec9abf022bb055e2f262
[]
no_license
Hardworking-tester/HuaYing
a24aa271afe81c95241818586b1d1d5abd6b4282
4dd065806f20bfdec885fa2b40f2c22e5a8d4f15
refs/heads/master
2021-06-03T10:06:33.604494
2017-06-22T09:32:13
2017-06-22T09:32:13
42,507,030
0
0
null
null
null
null
UTF-8
Python
false
false
722
py
# encoding:utf-8 # author:wwg from selenium import webdriver import threading import time class MyThread(threading.Thread): def __init__(self,num): threading.Thread.__init__(self) self.num=num def run(self): start=time.time() br=webdriver.Firefox() br.get("https://www.baidu.com") time.sleep(4) br.find_element_by_id("kw").send_keys("wwg") br.find_element_by_id("su").click() br.quit() end=time.time() print u'Thread Object(%d), Time:%s\n,耗时%s s' % (self.num, time.ctime(),(end-start)) def test(): for i in range(1,10): t = MyThread(i) t.start() t.join() if __name__=="__main__": test()
04628def6f79e73ee5273d9991b2f50dc87b56f5
d5552cda58e251e6a5983876681be8f641dea86f
/src/transformers/models/m2m_100/modeling_m2m_100.py
f7ef189a155d636e6620922218e36aa244c6899c
[ "Apache-2.0" ]
permissive
patrickvonplaten/transformers
feb121e1ee82c317ac7561836b8f95a7de25fc1f
f738502979f6787609dcf0180e6606f464692e27
refs/heads/master
2022-12-08T10:15:34.743198
2022-11-22T11:00:20
2022-11-22T11:00:20
226,201,271
6
1
Apache-2.0
2019-12-05T22:39:46
2019-12-05T22:39:45
null
UTF-8
Python
false
false
65,124
py
# coding=utf-8 # Copyright 2021 The Fairseq Authors and The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PyTorch M2M100 model.""" import math import random from typing import List, Optional, Tuple, Union import torch from torch import nn from torch.nn import CrossEntropyLoss from ...activations import ACT2FN from ...deepspeed import is_deepspeed_zero3_enabled from ...modeling_outputs import ( BaseModelOutput, BaseModelOutputWithPastAndCrossAttentions, Seq2SeqLMOutput, Seq2SeqModelOutput, ) from ...modeling_utils import PreTrainedModel from ...utils import ( add_code_sample_docstrings, add_end_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_m2m_100 import M2M100Config logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = "M2M100Config" _TOKENIZER_FOR_DOC = "M2M100Tokenizer" _CHECKPOINT_FOR_DOC = "facebook/m2m100_418M" M2M_100_PRETRAINED_MODEL_ARCHIVE_LIST = [ "facebook/m2m100_418M", # See all M2M100 models at https://huggingface.co/models?filter=m2m_100 ] # Copied from transformers.models.bart.modeling_bart.shift_tokens_right def shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int): """ Shift input ids one token to the right. """ shifted_input_ids = input_ids.new_zeros(input_ids.shape) shifted_input_ids[:, 1:] = input_ids[:, :-1].clone() shifted_input_ids[:, 0] = decoder_start_token_id if pad_token_id is None: raise ValueError("self.model.config.pad_token_id has to be defined.") # replace possible -100 values in labels by `pad_token_id` shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id) return shifted_input_ids # Copied from transformers.models.bart.modeling_bart._make_causal_mask def _make_causal_mask(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0): """ Make causal mask used for bi-directional self-attention. """ bsz, tgt_len = input_ids_shape mask = torch.full((tgt_len, tgt_len), torch.tensor(torch.finfo(dtype).min)) mask_cond = torch.arange(mask.size(-1)) mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0) mask = mask.to(dtype) if past_key_values_length > 0: mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], dim=-1) return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length) # Copied from transformers.models.bart.modeling_bart._expand_mask def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None): """ Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. """ bsz, src_len = mask.size() tgt_len = tgt_len if tgt_len is not None else src_len expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype) inverted_mask = 1.0 - expanded_mask return inverted_mask.masked_fill(inverted_mask.to(torch.bool), torch.finfo(dtype).min) def create_position_ids_from_input_ids(input_ids, padding_idx, past_key_values_length=0): """ Replace non-padding symbols with their position numbers. Position numbers begin at padding_idx+1. Padding symbols are ignored. This is modified from fairseq's `utils.make_positions`. """ # The series of casts and type-conversions here are carefully balanced to both work with ONNX export and XLA. mask = input_ids.ne(padding_idx).int() incremental_indices = (torch.cumsum(mask, dim=1).type_as(mask) + past_key_values_length) * mask return incremental_indices.long() + padding_idx class M2M100SinusoidalPositionalEmbedding(nn.Module): """This module produces sinusoidal positional embeddings of any length.""" def __init__(self, num_positions: int, embedding_dim: int, padding_idx: Optional[int] = None): super().__init__() self.offset = 2 self.embedding_dim = embedding_dim self.padding_idx = padding_idx self.make_weights(num_positions + self.offset, embedding_dim, padding_idx) def make_weights(self, num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None): emb_weights = self.get_embedding(num_embeddings, embedding_dim, padding_idx) if hasattr(self, "weights"): # in forward put the weights on the correct dtype and device of the param emb_weights = emb_weights.to(dtype=self.weights.dtype, device=self.weights.device) self.register_buffer("weights", emb_weights) @staticmethod def get_embedding(num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None): """ Build sinusoidal embeddings. This matches the implementation in tensor2tensor, but differs slightly from the description in Section 3.5 of "Attention Is All You Need". """ half_dim = embedding_dim // 2 emb = math.log(10000) / (half_dim - 1) emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb) emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(1) * emb.unsqueeze(0) emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(num_embeddings, -1) if embedding_dim % 2 == 1: # zero pad emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1) if padding_idx is not None: emb[padding_idx, :] = 0 return emb.to(torch.get_default_dtype()) @torch.no_grad() def forward( self, input_ids: torch.Tensor = None, inputs_embeds: torch.Tensor = None, past_key_values_length: int = 0 ): if input_ids is not None: bsz, seq_len = input_ids.size() # Create the position ids from the input token ids. Any padded tokens remain padded. position_ids = create_position_ids_from_input_ids(input_ids, self.padding_idx, past_key_values_length).to( input_ids.device ) else: bsz, seq_len = inputs_embeds.size()[:-1] position_ids = self.create_position_ids_from_inputs_embeds(inputs_embeds, past_key_values_length) # expand embeddings if needed max_pos = self.padding_idx + 1 + seq_len + past_key_values_length if max_pos > self.weights.size(0): self.make_weights(max_pos + self.offset, self.embedding_dim, self.padding_idx) return self.weights.index_select(0, position_ids.view(-1)).view(bsz, seq_len, -1).detach() def create_position_ids_from_inputs_embeds(self, inputs_embeds, past_key_values_length): """ We are provided embeddings directly. We cannot infer which are padded so just generate sequential position ids. Args: inputs_embeds: torch.Tensor Returns: torch.Tensor """ input_shape = inputs_embeds.size()[:-1] sequence_length = input_shape[1] position_ids = torch.arange( self.padding_idx + 1, sequence_length + self.padding_idx + 1, dtype=torch.long, device=inputs_embeds.device ) return position_ids.unsqueeze(0).expand(input_shape).contiguous() + past_key_values_length # Copied from transformers.models.bart.modeling_bart.BartAttention with Bart->M2M100 class M2M100Attention(nn.Module): """Multi-headed attention from 'Attention Is All You Need' paper""" def __init__( self, embed_dim: int, num_heads: int, dropout: float = 0.0, is_decoder: bool = False, bias: bool = True, ): super().__init__() self.embed_dim = embed_dim self.num_heads = num_heads self.dropout = dropout self.head_dim = embed_dim // num_heads if (self.head_dim * num_heads) != self.embed_dim: raise ValueError( f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}" f" and `num_heads`: {num_heads})." ) self.scaling = self.head_dim**-0.5 self.is_decoder = is_decoder self.k_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.v_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias) self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias) def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous() def forward( self, hidden_states: torch.Tensor, key_value_states: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: """Input shape: Batch x Time x Channel""" # if key_value_states are provided this layer is used as a cross-attention layer # for the decoder is_cross_attention = key_value_states is not None bsz, tgt_len, _ = hidden_states.size() # get query proj query_states = self.q_proj(hidden_states) * self.scaling # get key, value proj if is_cross_attention and past_key_value is not None: # reuse k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None: # reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = torch.cat([past_key_value[0], key_states], dim=2) value_states = torch.cat([past_key_value[1], value_states], dim=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_states, value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape) key_states = key_states.view(*proj_shape) value_states = value_states.view(*proj_shape) src_len = key_states.size(1) attn_weights = torch.bmm(query_states, key_states.transpose(1, 2)) if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len): raise ValueError( f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is" f" {attn_weights.size()}" ) if attention_mask is not None: if attention_mask.size() != (bsz, 1, tgt_len, src_len): raise ValueError( f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}" ) attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) attn_weights = nn.functional.softmax(attn_weights, dim=-1) if layer_head_mask is not None: if layer_head_mask.size() != (self.num_heads,): raise ValueError( f"Head mask for a single layer should be of size {(self.num_heads,)}, but is" f" {layer_head_mask.size()}" ) attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) if output_attentions: # this operation is a bit awkward, but it's required to # make sure that attn_weights keeps its gradient. # In order to do so, attn_weights have to be reshaped # twice and have to be reused in the following attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len) else: attn_weights_reshaped = None attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training) attn_output = torch.bmm(attn_probs, value_states) if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim): raise ValueError( f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is" f" {attn_output.size()}" ) attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim) attn_output = attn_output.transpose(1, 2) # Use the `embed_dim` from the config (stored in the class) rather than `hidden_state` because `attn_output` can be # partitioned aross GPUs when using tensor-parallelism. attn_output = attn_output.reshape(bsz, tgt_len, self.embed_dim) attn_output = self.out_proj(attn_output) return attn_output, attn_weights_reshaped, past_key_value # Copied from transformers.models.mbart.modeling_mbart.MBartEncoderLayer with MBart->M2M100 class M2M100EncoderLayer(nn.Module): def __init__(self, config: M2M100Config): super().__init__() self.embed_dim = config.d_model self.self_attn = M2M100Attention( embed_dim=self.embed_dim, num_heads=config.encoder_attention_heads, dropout=config.attention_dropout, ) self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim) self.dropout = config.dropout self.activation_fn = ACT2FN[config.activation_function] self.activation_dropout = config.activation_dropout self.fc1 = nn.Linear(self.embed_dim, config.encoder_ffn_dim) self.fc2 = nn.Linear(config.encoder_ffn_dim, self.embed_dim) self.final_layer_norm = nn.LayerNorm(self.embed_dim) def forward( self, hidden_states: torch.Tensor, attention_mask: torch.Tensor, layer_head_mask: torch.Tensor, output_attentions: bool = False, ) -> torch.Tensor: """ Args: hidden_states (`torch.FloatTensor`): input to the layer of shape *(seq_len, batch, embed_dim)* attention_mask (`torch.FloatTensor`): attention mask of size *(batch, 1, tgt_len, src_len)* where padding elements are indicated by very large negative values. layer_head_mask (`torch.FloatTensor`): mask for attention heads in a given layer of size *(encoder_attention_heads,)*. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. """ residual = hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) hidden_states, attn_weights, _ = self.self_attn( hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask, output_attentions=output_attentions, ) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states residual = hidden_states hidden_states = self.final_layer_norm(hidden_states) hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = nn.functional.dropout(hidden_states, p=self.activation_dropout, training=self.training) hidden_states = self.fc2(hidden_states) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states if hidden_states.dtype == torch.float16 and ( torch.isinf(hidden_states).any() or torch.isnan(hidden_states).any() ): clamp_value = torch.finfo(hidden_states.dtype).max - 1000 hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value) outputs = (hidden_states,) if output_attentions: outputs += (attn_weights,) return outputs # Copied from transformers.models.mbart.modeling_mbart.MBartDecoderLayer with MBart->M2M100 class M2M100DecoderLayer(nn.Module): def __init__(self, config: M2M100Config): super().__init__() self.embed_dim = config.d_model self.self_attn = M2M100Attention( embed_dim=self.embed_dim, num_heads=config.decoder_attention_heads, dropout=config.attention_dropout, is_decoder=True, ) self.dropout = config.dropout self.activation_fn = ACT2FN[config.activation_function] self.activation_dropout = config.activation_dropout self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim) self.encoder_attn = M2M100Attention( self.embed_dim, config.decoder_attention_heads, dropout=config.attention_dropout, is_decoder=True, ) self.encoder_attn_layer_norm = nn.LayerNorm(self.embed_dim) self.fc1 = nn.Linear(self.embed_dim, config.decoder_ffn_dim) self.fc2 = nn.Linear(config.decoder_ffn_dim, self.embed_dim) self.final_layer_norm = nn.LayerNorm(self.embed_dim) def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, cross_attn_layer_head_mask: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, output_attentions: Optional[bool] = False, use_cache: Optional[bool] = True, ) -> torch.Tensor: """ Args: hidden_states (`torch.FloatTensor`): input to the layer of shape *(seq_len, batch, embed_dim)* attention_mask (`torch.FloatTensor`): attention mask of size *(batch, 1, tgt_len, src_len)* where padding elements are indicated by very large negative values. encoder_hidden_states (`torch.FloatTensor`): cross attention input to the layer of shape *(seq_len, batch, embed_dim)* encoder_attention_mask (`torch.FloatTensor`): encoder attention mask of size *(batch, 1, tgt_len, src_len)* where padding elements are indicated by very large negative values. layer_head_mask (`torch.FloatTensor`): mask for attention heads in a given layer of size *(encoder_attention_heads,)*. cross_attn_layer_head_mask (`torch.FloatTensor`): mask for cross-attention heads in a given layer of size *(decoder_attention_heads,)*. past_key_value (`Tuple(torch.FloatTensor)`): cached past key and value projection states output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. """ residual = hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) # Self Attention # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None # add present self-attn cache to positions 1,2 of present_key_value tuple hidden_states, self_attn_weights, present_key_value = self.self_attn( hidden_states=hidden_states, past_key_value=self_attn_past_key_value, attention_mask=attention_mask, layer_head_mask=layer_head_mask, output_attentions=output_attentions, ) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states # Cross-Attention Block cross_attn_present_key_value = None cross_attn_weights = None if encoder_hidden_states is not None: residual = hidden_states hidden_states = self.encoder_attn_layer_norm(hidden_states) # cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None hidden_states, cross_attn_weights, cross_attn_present_key_value = self.encoder_attn( hidden_states=hidden_states, key_value_states=encoder_hidden_states, attention_mask=encoder_attention_mask, layer_head_mask=cross_attn_layer_head_mask, past_key_value=cross_attn_past_key_value, output_attentions=output_attentions, ) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states # add cross-attn to positions 3,4 of present_key_value tuple present_key_value = present_key_value + cross_attn_present_key_value # Fully Connected residual = hidden_states hidden_states = self.final_layer_norm(hidden_states) hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = nn.functional.dropout(hidden_states, p=self.activation_dropout, training=self.training) hidden_states = self.fc2(hidden_states) hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states outputs = (hidden_states,) if output_attentions: outputs += (self_attn_weights, cross_attn_weights) if use_cache: outputs += (present_key_value,) return outputs class M2M100PreTrainedModel(PreTrainedModel): config_class = M2M100Config base_model_prefix = "model" supports_gradient_checkpointing = True _no_split_modules = ["M2M100Attention"] def _init_weights(self, module): std = self.config.init_std if isinstance(module, nn.Linear): module.weight.data.normal_(mean=0.0, std=std) if module.bias is not None: module.bias.data.zero_() elif isinstance(module, nn.Embedding): module.weight.data.normal_(mean=0.0, std=std) if module.padding_idx is not None: module.weight.data[module.padding_idx].zero_() def _set_gradient_checkpointing(self, module, value=False): if isinstance(module, (M2M100Decoder, M2M100Encoder)): module.gradient_checkpointing = value M2M_100_START_DOCSTRING = r""" This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`M2M100Config`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. """ M2M_100_GENERATION_EXAMPLE = r""" Translation example: ```python >>> from transformers import M2M100Tokenizer, M2M100ForConditionalGeneration >>> model = M2M100ForConditionalGeneration.from_pretrained("facebook/m2m100_418M") >>> tokenizer = M2M100Tokenizer.from_pretrained("facebook/m2m100_418M") >>> text_to_translate = "Life is like a box of chocolates" >>> model_inputs = tokenizer(text_to_translate, return_tensors="pt") >>> # translate to French >>> gen_tokens = model.generate(**model_inputs, forced_bos_token_id=tokenizer.get_lang_id("fr")) >>> print(tokenizer.batch_decode(gen_tokens, skip_special_tokens=True)) ``` """ M2M_100_INPUTS_DOCSTRING = r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`M2M100Tokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) decoder_input_ids (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*): Indices of decoder input sequence tokens in the vocabulary. Indices can be obtained using [`M2M100Tokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are decoder input IDs?](../glossary#decoder-input-ids) M2M100 uses the `eos_token_id` as the starting token for `decoder_input_ids` generation. If `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see `past_key_values`). decoder_attention_mask (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*): Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also be used by default. head_mask (`torch.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. decoder_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the cross-attention modules in the decoder. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. encoder_outputs (`tuple(tuple(torch.FloatTensor)`, *optional*): Tuple consists of (`last_hidden_state`, *optional*: `hidden_states`, *optional*: `attentions`) `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)`, *optional*) is a sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. decoder_inputs_embeds (`torch.FloatTensor` of shape `(batch_size, target_sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `decoder_input_ids` you can choose to directly pass an embedded representation. If `past_key_values` is used, optionally only the last `decoder_inputs_embeds` have to be input (see `past_key_values`). This is useful if you want more control over how to convert `decoder_input_ids` indices into associated vectors than the model's internal embedding lookup matrix. If `decoder_input_ids` and `decoder_inputs_embeds` are both unset, `decoder_inputs_embeds` takes the value of `inputs_embeds`. use_cache (`bool`, *optional*): If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see `past_key_values`). output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ class M2M100Encoder(M2M100PreTrainedModel): """ Transformer encoder consisting of *config.encoder_layers* self attention layers. Each layer is a [`M2M100EncoderLayer`]. Args: config: M2M100Config embed_tokens (nn.Embedding): output embedding """ def __init__(self, config: M2M100Config, embed_tokens: Optional[nn.Embedding] = None): super().__init__(config) self.dropout = config.dropout self.layerdrop = config.encoder_layerdrop embed_dim = config.d_model self.padding_idx = config.pad_token_id self.max_source_positions = config.max_position_embeddings self.embed_scale = math.sqrt(embed_dim) if config.scale_embedding else 1.0 self.embed_tokens = nn.Embedding(config.vocab_size, embed_dim, self.padding_idx) if embed_tokens is not None: self.embed_tokens.weight = embed_tokens.weight self.embed_positions = M2M100SinusoidalPositionalEmbedding( config.max_position_embeddings, embed_dim, self.padding_idx, ) self.layers = nn.ModuleList([M2M100EncoderLayer(config) for _ in range(config.encoder_layers)]) self.layer_norm = nn.LayerNorm(config.d_model) self.gradient_checkpointing = False # Initialize weights and apply final processing self.post_init() def forward( self, input_ids: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, inputs_embeds: Optional[torch.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ): r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`M2M100Tokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) head_mask (`torch.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict # retrieve input_ids and inputs_embeds if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() input_ids = input_ids.view(-1, input_shape[-1]) elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") if inputs_embeds is None: inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale embed_pos = self.embed_positions(input_ids, inputs_embeds) embed_pos = embed_pos.to(inputs_embeds.device) hidden_states = inputs_embeds + embed_pos hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) # expand attention_mask if attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] attention_mask = _expand_mask(attention_mask, inputs_embeds.dtype) encoder_states = () if output_hidden_states else None all_attentions = () if output_attentions else None # check if head_mask has a correct number of layers specified if desired if head_mask is not None: if head_mask.size()[0] != len(self.layers): raise ValueError( f"The head_mask should be specified for {len(self.layers)} layers, but it is for" f" {head_mask.size()[0]}." ) deepspeed_zero3_is_enabled = is_deepspeed_zero3_enabled() for idx, encoder_layer in enumerate(self.layers): if output_hidden_states: encoder_states = encoder_states + (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = random.uniform(0, 1) skip_the_layer = True if self.training and (dropout_probability < self.layerdrop) else False if not skip_the_layer or deepspeed_zero3_is_enabled: # under deepspeed zero3 all gpus must run in sync if self.gradient_checkpointing and self.training: # create gradient checkpointing function def create_custom_forward(module): def custom_forward(*inputs): return module(*inputs, output_attentions) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(encoder_layer), hidden_states, attention_mask, (head_mask[idx] if head_mask is not None else None), ) else: layer_outputs = encoder_layer( hidden_states, attention_mask, layer_head_mask=(head_mask[idx] if head_mask is not None else None), output_attentions=output_attentions, ) hidden_states = layer_outputs[0] if skip_the_layer: layer_outputs = (None, None) if output_attentions: all_attentions = all_attentions + (layer_outputs[1],) hidden_states = self.layer_norm(hidden_states) if output_hidden_states: encoder_states = encoder_states + (hidden_states,) if not return_dict: return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None) return BaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions ) class M2M100Decoder(M2M100PreTrainedModel): """ Transformer decoder consisting of *config.decoder_layers* layers. Each layer is a [`M2M100DecoderLayer`] Args: config: M2M100Config embed_tokens (nn.Embedding): output embedding """ def __init__(self, config: M2M100Config, embed_tokens: Optional[nn.Embedding] = None): super().__init__(config) self.dropout = config.dropout self.layerdrop = config.decoder_layerdrop self.padding_idx = config.pad_token_id self.max_target_positions = config.max_position_embeddings self.embed_scale = math.sqrt(config.d_model) if config.scale_embedding else 1.0 self.embed_tokens = nn.Embedding(config.vocab_size, config.d_model, self.padding_idx) if embed_tokens is not None: self.embed_tokens.weight = embed_tokens.weight self.embed_positions = M2M100SinusoidalPositionalEmbedding( config.max_position_embeddings, config.d_model, self.padding_idx, ) self.layers = nn.ModuleList([M2M100DecoderLayer(config) for _ in range(config.decoder_layers)]) self.layer_norm = nn.LayerNorm(config.d_model) self.gradient_checkpointing = False # Initialize weights and apply final processing self.post_init() def forward( self, input_ids: Optional[torch.Tensor] = None, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, past_key_values: Optional[List[torch.FloatTensor]] = None, inputs_embeds: Optional[torch.Tensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ): r""" Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using [`M2M100Tokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, encoder_sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. encoder_attention_mask (`torch.LongTensor` of shape `(batch_size, encoder_sequence_length)`, *optional*): Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*): Mask to nullify selected heads of the cross-attention modules in the decoder to avoid performing cross-attention on hidden heads. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `decoder_input_ids` of shape `(batch_size, sequence_length)`. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict # retrieve input_ids and inputs_embeds if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() input_ids = input_ids.view(-1, input_shape[-1]) elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds") # past_key_values_length past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 if inputs_embeds is None: inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale # create causal mask # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] combined_attention_mask = None if input_shape[-1] > 1: combined_attention_mask = _make_causal_mask( input_shape, inputs_embeds.dtype, past_key_values_length=past_key_values_length ).to(inputs_embeds.device) if attention_mask is not None and combined_attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] combined_attention_mask = combined_attention_mask + _expand_mask( attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1] ) # expand encoder attention mask if encoder_hidden_states is not None and encoder_attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] encoder_attention_mask = _expand_mask(encoder_attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1]) # embed positions positions = self.embed_positions(input_ids, inputs_embeds, past_key_values_length) positions = positions.to(inputs_embeds.device) hidden_states = inputs_embeds + positions hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training) # decoder layers all_hidden_states = () if output_hidden_states else None all_self_attns = () if output_attentions else None all_cross_attentions = () if output_attentions else None next_decoder_cache = () if use_cache else None # check if head_mask/cross_attn_head_mask has a correct number of layers specified if desired for attn_mask, mask_name in zip([head_mask, cross_attn_head_mask], ["head_mask", "cross_attn_head_mask"]): if attn_mask is not None: if attn_mask.size()[0] != len(self.layers): raise ValueError( f"The `{mask_name}` should be specified for {len(self.layers)} layers, but it is for" f" {head_mask.size()[0]}." ) deepspeed_zero3_is_enabled = is_deepspeed_zero3_enabled() for idx, decoder_layer in enumerate(self.layers): if output_hidden_states: all_hidden_states += (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = random.uniform(0, 1) skip_the_layer = True if self.training and (dropout_probability < self.layerdrop) else False if not skip_the_layer or deepspeed_zero3_is_enabled: # under deepspeed zero3 all gpus must run in sync past_key_value = past_key_values[idx] if past_key_values is not None else None if self.gradient_checkpointing and self.training: if use_cache: logger.warning( "`use_cache=True` is incompatible with gradient checkpointing. Setting" " `use_cache=False`..." ) use_cache = False def create_custom_forward(module): def custom_forward(*inputs): # None for past_key_value return module(*inputs, output_attentions, use_cache) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(decoder_layer), hidden_states, combined_attention_mask, encoder_hidden_states, encoder_attention_mask, head_mask[idx] if head_mask is not None else None, cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None, None, ) else: layer_outputs = decoder_layer( hidden_states, attention_mask=combined_attention_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, layer_head_mask=(head_mask[idx] if head_mask is not None else None), cross_attn_layer_head_mask=( cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None ), past_key_value=past_key_value, output_attentions=output_attentions, use_cache=use_cache, ) hidden_states = layer_outputs[0] if skip_the_layer: continue if use_cache: next_decoder_cache += (layer_outputs[3 if output_attentions else 1],) if output_attentions: all_self_attns += (layer_outputs[1],) all_cross_attentions += (layer_outputs[2],) hidden_states = self.layer_norm(hidden_states) # add hidden states from the last decoder layer if output_hidden_states: all_hidden_states += (hidden_states,) next_cache = next_decoder_cache if use_cache else None if not return_dict: return tuple( v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns, all_cross_attentions] if v is not None ) return BaseModelOutputWithPastAndCrossAttentions( last_hidden_state=hidden_states, past_key_values=next_cache, hidden_states=all_hidden_states, attentions=all_self_attns, cross_attentions=all_cross_attentions, ) @add_start_docstrings( "The bare M2M100 Model outputting raw hidden-states without any specific head on top.", M2M_100_START_DOCSTRING, ) class M2M100Model(M2M100PreTrainedModel): _keys_to_ignore_on_load_missing = ["encoder.embed_tokens.weight", "decoder.embed_tokens.weight"] def __init__(self, config: M2M100Config): super().__init__(config) padding_idx, vocab_size = config.pad_token_id, config.vocab_size self.shared = nn.Embedding(vocab_size, config.d_model, padding_idx) self.encoder = M2M100Encoder(config, self.shared) self.decoder = M2M100Decoder(config, self.shared) # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.shared def set_input_embeddings(self, value): self.shared = value self.encoder.embed_tokens = self.shared self.decoder.embed_tokens = self.shared def get_encoder(self): return self.encoder def get_decoder(self): return self.decoder @add_start_docstrings_to_model_forward(M2M_100_INPUTS_DOCSTRING) @add_code_sample_docstrings( processor_class=_TOKENIZER_FOR_DOC, checkpoint=_CHECKPOINT_FOR_DOC, output_type=Seq2SeqModelOutput, config_class=_CONFIG_FOR_DOC, ) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.Tensor] = None, decoder_input_ids: Optional[torch.LongTensor] = None, decoder_attention_mask: Optional[torch.LongTensor] = None, head_mask: Optional[torch.Tensor] = None, decoder_head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, encoder_outputs: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, inputs_embeds: Optional[torch.FloatTensor] = None, decoder_inputs_embeds: Optional[torch.FloatTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple[torch.Tensor], Seq2SeqModelOutput]: output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict if encoder_outputs is None: encoder_outputs = self.encoder( input_ids=input_ids, attention_mask=attention_mask, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) # If the user passed a tuple for encoder_outputs, we wrap it in a BaseModelOutput when return_dict=True elif return_dict and not isinstance(encoder_outputs, BaseModelOutput): encoder_outputs = BaseModelOutput( last_hidden_state=encoder_outputs[0], hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None, attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None, ) # decoder outputs consists of (dec_features, past_key_value, dec_hidden, dec_attn) decoder_outputs = self.decoder( input_ids=decoder_input_ids, attention_mask=decoder_attention_mask, encoder_hidden_states=encoder_outputs[0], encoder_attention_mask=attention_mask, head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, past_key_values=past_key_values, inputs_embeds=decoder_inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) if not return_dict: return decoder_outputs + encoder_outputs return Seq2SeqModelOutput( last_hidden_state=decoder_outputs.last_hidden_state, past_key_values=decoder_outputs.past_key_values, decoder_hidden_states=decoder_outputs.hidden_states, decoder_attentions=decoder_outputs.attentions, cross_attentions=decoder_outputs.cross_attentions, encoder_last_hidden_state=encoder_outputs.last_hidden_state, encoder_hidden_states=encoder_outputs.hidden_states, encoder_attentions=encoder_outputs.attentions, ) @add_start_docstrings( "The M2M100 Model with a language modeling head. Can be used for summarization.", M2M_100_START_DOCSTRING ) class M2M100ForConditionalGeneration(M2M100PreTrainedModel): base_model_prefix = "model" _keys_to_ignore_on_load_missing = [ r"encoder.version", r"decoder.version", r"lm_head.weight", r"encoder.embed_tokens.weight", r"decoder.embed_tokens.weight", ] def __init__(self, config: M2M100Config): super().__init__(config) self.model = M2M100Model(config) self.lm_head = nn.Linear(config.d_model, self.model.shared.num_embeddings, bias=False) # Initialize weights and apply final processing self.post_init() def get_encoder(self): return self.model.get_encoder() def get_decoder(self): return self.model.get_decoder() def resize_token_embeddings(self, new_num_tokens: int) -> nn.Embedding: new_embeddings = super().resize_token_embeddings(new_num_tokens) return new_embeddings def get_output_embeddings(self): return self.lm_head def set_output_embeddings(self, new_embeddings): self.lm_head = new_embeddings @add_start_docstrings_to_model_forward(M2M_100_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=Seq2SeqLMOutput, config_class=_CONFIG_FOR_DOC) @add_end_docstrings(M2M_100_GENERATION_EXAMPLE) def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.Tensor] = None, decoder_input_ids: Optional[torch.LongTensor] = None, decoder_attention_mask: Optional[torch.LongTensor] = None, head_mask: Optional[torch.Tensor] = None, decoder_head_mask: Optional[torch.Tensor] = None, cross_attn_head_mask: Optional[torch.Tensor] = None, encoder_outputs: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, inputs_embeds: Optional[torch.FloatTensor] = None, decoder_inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, ) -> Union[Tuple[torch.Tensor], Seq2SeqLMOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. Returns: """ return_dict = return_dict if return_dict is not None else self.config.use_return_dict if labels is not None: if decoder_input_ids is None: decoder_input_ids = shift_tokens_right( labels, self.config.pad_token_id, self.config.decoder_start_token_id ) outputs = self.model( input_ids, attention_mask=attention_mask, decoder_input_ids=decoder_input_ids, encoder_outputs=encoder_outputs, decoder_attention_mask=decoder_attention_mask, head_mask=head_mask, decoder_head_mask=decoder_head_mask, cross_attn_head_mask=cross_attn_head_mask, past_key_values=past_key_values, inputs_embeds=inputs_embeds, decoder_inputs_embeds=decoder_inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) lm_logits = self.lm_head(outputs[0]) masked_lm_loss = None if labels is not None: loss_fct = CrossEntropyLoss() masked_lm_loss = loss_fct(lm_logits.view(-1, self.config.vocab_size), labels.view(-1)) if not return_dict: output = (lm_logits,) + outputs[1:] return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output return Seq2SeqLMOutput( loss=masked_lm_loss, logits=lm_logits, past_key_values=outputs.past_key_values, decoder_hidden_states=outputs.decoder_hidden_states, decoder_attentions=outputs.decoder_attentions, cross_attentions=outputs.cross_attentions, encoder_last_hidden_state=outputs.encoder_last_hidden_state, encoder_hidden_states=outputs.encoder_hidden_states, encoder_attentions=outputs.encoder_attentions, ) def prepare_inputs_for_generation( self, decoder_input_ids, past=None, attention_mask=None, head_mask=None, decoder_head_mask=None, cross_attn_head_mask=None, use_cache=None, encoder_outputs=None, **kwargs, ): # cut decoder_input_ids if past is used if past is not None: decoder_input_ids = decoder_input_ids[:, -1:] return { "input_ids": None, # encoder_outputs is defined. input_ids not needed "encoder_outputs": encoder_outputs, "past_key_values": past, "decoder_input_ids": decoder_input_ids, "attention_mask": attention_mask, "head_mask": head_mask, "decoder_head_mask": decoder_head_mask, "cross_attn_head_mask": cross_attn_head_mask, "use_cache": use_cache, # change this to avoid caching (presumably for debugging) } @staticmethod def _reorder_cache(past, beam_idx): reordered_past = () for layer_past in past: reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),) return reordered_past
30e9218fa343c615c68da4f7849636cc0abf4779
bd46fe963f29e11691143aad5ae82ea7f974f3eb
/test/mitmproxy/test_types.py
81aaed7493a4fe6d249d5e18806cfefaea4430f0
[ "MIT" ]
permissive
1ezss/mitmproxy
a4a934a8fd2d637a532009c46cab2ff3c57c2520
6ef6286d8e53a0a9045fa41956e65dae2e41ab6d
refs/heads/master
2021-08-30T16:53:20.112680
2017-12-18T18:50:52
2017-12-18T18:50:52
null
0
0
null
null
null
null
UTF-8
Python
false
false
6,604
py
import pytest import os import typing import contextlib from mitmproxy.test import tutils import mitmproxy.exceptions import mitmproxy.types from mitmproxy.test import taddons from mitmproxy.test import tflow from mitmproxy import command from mitmproxy import flow from . import test_command @contextlib.contextmanager def chdir(path: str): old_dir = os.getcwd() os.chdir(path) yield os.chdir(old_dir) def test_bool(): with taddons.context() as tctx: b = mitmproxy.types.Bool() assert b.completion(tctx.master.commands, bool, "b") == ["false", "true"] assert b.parse(tctx.master.commands, bool, "true") is True assert b.parse(tctx.master.commands, bool, "false") is False with pytest.raises(mitmproxy.exceptions.TypeError): b.parse(tctx.master.commands, bool, "foo") def test_str(): with taddons.context() as tctx: b = mitmproxy.types.Str() assert b.completion(tctx.master.commands, str, "") == [] assert b.parse(tctx.master.commands, str, "foo") == "foo" def test_int(): with taddons.context() as tctx: b = mitmproxy.types.Int() assert b.completion(tctx.master.commands, int, "b") == [] assert b.parse(tctx.master.commands, int, "1") == 1 assert b.parse(tctx.master.commands, int, "999") == 999 with pytest.raises(mitmproxy.exceptions.TypeError): b.parse(tctx.master.commands, int, "foo") def test_path(): with taddons.context() as tctx: b = mitmproxy.types.PathType() assert b.parse(tctx.master.commands, mitmproxy.types.Path, "/foo") == "/foo" assert b.parse(tctx.master.commands, mitmproxy.types.Path, "/bar") == "/bar" def normPathOpts(prefix, match): ret = [] for s in b.completion(tctx.master.commands, mitmproxy.types.Path, match): s = s[len(prefix):] s = s.replace(os.sep, "/") ret.append(s) return ret cd = os.path.normpath(tutils.test_data.path("mitmproxy/completion")) assert normPathOpts(cd, cd) == ['/aaa', '/aab', '/aac', '/bbb/'] assert normPathOpts(cd, os.path.join(cd, "a")) == ['/aaa', '/aab', '/aac'] with chdir(cd): assert normPathOpts("", "./") == ['./aaa', './aab', './aac', './bbb/'] assert normPathOpts("", "") == ['./aaa', './aab', './aac', './bbb/'] assert b.completion( tctx.master.commands, mitmproxy.types.Path, "nonexistent" ) == ["nonexistent"] def test_cmd(): with taddons.context() as tctx: tctx.master.addons.add(test_command.TAddon()) b = mitmproxy.types.CmdType() assert b.parse(tctx.master.commands, mitmproxy.types.Cmd, "foo") == "foo" assert len( b.completion(tctx.master.commands, mitmproxy.types.Cmd, "") ) == len(tctx.master.commands.commands.keys()) def test_cutspec(): with taddons.context() as tctx: b = mitmproxy.types.CutSpecType() b.parse(tctx.master.commands, mitmproxy.types.CutSpec, "foo,bar") == ["foo", "bar"] assert b.completion( tctx.master.commands, mitmproxy.types.CutSpec, "request.p" ) == b.valid_prefixes ret = b.completion(tctx.master.commands, mitmproxy.types.CutSpec, "request.port,f") assert ret[0].startswith("request.port,") assert len(ret) == len(b.valid_prefixes) def test_arg(): with taddons.context() as tctx: b = mitmproxy.types.ArgType() assert b.completion(tctx.master.commands, mitmproxy.types.Arg, "") == [] assert b.parse(tctx.master.commands, mitmproxy.types.Arg, "foo") == "foo" def test_strseq(): with taddons.context() as tctx: b = mitmproxy.types.StrSeq() assert b.completion(tctx.master.commands, typing.Sequence[str], "") == [] assert b.parse(tctx.master.commands, typing.Sequence[str], "foo") == ["foo"] assert b.parse(tctx.master.commands, typing.Sequence[str], "foo,bar") == ["foo", "bar"] class DummyConsole: @command.command("view.resolve") def resolve(self, spec: str) -> typing.Sequence[flow.Flow]: n = int(spec) return [tflow.tflow(resp=True)] * n @command.command("cut") def cut(self, spec: str) -> mitmproxy.types.Data: return [["test"]] @command.command("options") def options(self) -> typing.Sequence[str]: return ["one", "two", "three"] def test_flow(): with taddons.context() as tctx: tctx.master.addons.add(DummyConsole()) b = mitmproxy.types.FlowType() assert len(b.completion(tctx.master.commands, flow.Flow, "")) == len(b.valid_prefixes) assert b.parse(tctx.master.commands, flow.Flow, "1") with pytest.raises(mitmproxy.exceptions.TypeError): assert b.parse(tctx.master.commands, flow.Flow, "0") with pytest.raises(mitmproxy.exceptions.TypeError): assert b.parse(tctx.master.commands, flow.Flow, "2") def test_flows(): with taddons.context() as tctx: tctx.master.addons.add(DummyConsole()) b = mitmproxy.types.FlowsType() assert len( b.completion(tctx.master.commands, typing.Sequence[flow.Flow], "") ) == len(b.valid_prefixes) assert len(b.parse(tctx.master.commands, typing.Sequence[flow.Flow], "0")) == 0 assert len(b.parse(tctx.master.commands, typing.Sequence[flow.Flow], "1")) == 1 assert len(b.parse(tctx.master.commands, typing.Sequence[flow.Flow], "2")) == 2 def test_data(): with taddons.context() as tctx: b = mitmproxy.types.DataType() with pytest.raises(mitmproxy.exceptions.TypeError): b.parse(tctx.master.commands, mitmproxy.types.Data, "foo") with pytest.raises(mitmproxy.exceptions.TypeError): b.parse(tctx.master.commands, mitmproxy.types.Data, "foo") def test_choice(): with taddons.context() as tctx: tctx.master.addons.add(DummyConsole()) b = mitmproxy.types.ChoiceType() comp = b.completion(tctx.master.commands, mitmproxy.types.Choice("options"), "") assert comp == ["one", "two", "three"] assert b.parse(tctx.master.commands, mitmproxy.types.Choice("options"), "one") == "one" with pytest.raises(mitmproxy.exceptions.TypeError): b.parse(tctx.master.commands, mitmproxy.types.Choice("options"), "invalid") def test_typemanager(): assert mitmproxy.types.CommandTypes.get(bool, None) assert mitmproxy.types.CommandTypes.get(mitmproxy.types.Choice("choide"), None)
49e8a6b69a433379a569875caec380084d2fd049
1e0f9d3829665c74a5b4ee79531520fe4cbe2730
/clean_data.py
557cb06e5dae140dc2261c42809c0787927e0ce7
[]
no_license
aparna-arr/AnalysisProject
bead344eda6159f83ac19de3be533fdd3acf2087
f0d3068e0ac7f15255092f39f000c8009ceb57a2
refs/heads/master
2023-05-03T10:56:40.511706
2019-05-03T18:39:56
2019-05-03T18:39:56
181,940,284
0
0
null
null
null
null
UTF-8
Python
false
false
751
py
#!/share/software/user/open/python/3.6.1/bin/python3 import sys if len(sys.argv) < 3: print("usage: clean_data.py <downloaded.csv> <output_name.tsv>\n", file = sys.stderr) sys.exit(1) downloadFile = sys.argv[1] outputFilename = sys.argv[2] dfh = open(downloadFile, "r") dout = open(outputFilename, "w") dout.write("ChrIndex\tBarcode\tx\ty\tz\n") firstLine = True for line in dfh: if (firstLine == True): firstLine = False continue elem = line.rstrip().split(',') if not elem[0].isdigit(): continue chr_index = elem[0] barcode_index = elem[1] # Bogdan calls these "segments" z = elem[2] x = elem[3] y = elem[4] dout.write(chr_index + "\t" + barcode_index + "\t" + x + "\t" + y + "\t" + z + "\n") dout.close() dfh.close()
f6e47fbd2cd310fabb799996d61d9fecb0edcf08
480e33f95eec2e471c563d4c0661784c92396368
/CondTools/SiStrip/test/SiStripFedCablingBuilder_cfg.py
e5c0d47c76984fe444dc432b4f022b77a68f190d
[ "Apache-2.0" ]
permissive
cms-nanoAOD/cmssw
4d836e5b76ae5075c232de5e062d286e2026e8bd
4eccb8a758b605875003124dd55ea58552b86af1
refs/heads/master-cmsswmaster
2021-01-23T21:19:52.295420
2020-08-27T08:01:20
2020-08-27T08:01:20
102,867,729
7
14
Apache-2.0
2022-05-23T07:58:09
2017-09-08T14:03:57
C++
UTF-8
Python
false
false
2,154
py
import FWCore.ParameterSet.Config as cms process = cms.Process("FedCablingBuilder") process.MessageLogger = cms.Service("MessageLogger", debugModules = cms.untracked.vstring(''), cablingBuilder = cms.untracked.PSet( threshold = cms.untracked.string('INFO') ), destinations = cms.untracked.vstring('cablingBuilder.log') ) process.source = cms.Source("EmptySource", numberEventsInRun = cms.untracked.uint32(1), firstRun = cms.untracked.uint32(1) ) process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(1) ) process.load("CalibTracker.SiStripESProducers.SiStripFedCablingFakeESSource_cfi") process.PoolDBOutputService = cms.Service("PoolDBOutputService", BlobStreamerName = cms.untracked.string('TBufferBlobStreamingService'), DBParameters = cms.PSet( messageLevel = cms.untracked.int32(2), authenticationPath = cms.untracked.string('/afs/cern.ch/cms/DB/conddb') ), timetype = cms.untracked.string('runnumber'), connect = cms.string('sqlite_file:dummy2.db'), toPut = cms.VPSet(cms.PSet( record = cms.string('SiStripFedCablingRcd'), tag = cms.string('SiStripFedCabling_30X') )) ) process.load("Configuration.StandardSequences.Geometry_cff") process.TrackerDigiGeometryESModule.applyAlignment = False process.SiStripConnectivity = cms.ESProducer("SiStripConnectivity") process.SiStripRegionConnectivity = cms.ESProducer("SiStripRegionConnectivity", EtaDivisions = cms.untracked.uint32(20), PhiDivisions = cms.untracked.uint32(20), EtaMax = cms.untracked.double(2.5) ) process.fedcablingbuilder = cms.EDAnalyzer("SiStripFedCablingBuilder", PrintFecCabling = cms.untracked.bool(True), PrintDetCabling = cms.untracked.bool(True), PrintRegionCabling = cms.untracked.bool(True) ) process.p1 = cms.Path(process.fedcablingbuilder)
ec51eedd020b21502d675b80cc0a86cc425478a8
2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02
/PyTorch/contrib/cv/detection/SOLOv2/mmdet/models/anchor_heads/decoupled_solo_light_head.py
ac6b8230228ba4ec77d1bc842dd5307db15675f8
[ "GPL-1.0-or-later", "Apache-2.0", "BSD-2-Clause", "MIT", "BSD-3-Clause", "LicenseRef-scancode-generic-cla", "LicenseRef-scancode-unknown-license-reference", "LicenseRef-scancode-proprietary-license" ]
permissive
Ascend/ModelZoo-PyTorch
4c89414b9e2582cef9926d4670108a090c839d2d
92acc188d3a0f634de58463b6676e70df83ef808
refs/heads/master
2023-07-19T12:40:00.512853
2023-07-17T02:48:18
2023-07-17T02:48:18
483,502,469
23
6
Apache-2.0
2022-10-15T09:29:12
2022-04-20T04:11:18
Python
UTF-8
Python
false
false
21,534
py
# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the BSD 3-Clause License (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import mmcv import torch import torch.nn as nn import torch.nn.functional as F from mmcv.cnn import normal_init from mmdet.ops import DeformConv, roi_align from mmdet.core import multi_apply, bbox2roi, matrix_nms from ..builder import build_loss from ..registry import HEADS from ..utils import bias_init_with_prob, ConvModule INF = 1e8 def center_of_mass(bitmasks): _, h, w = bitmasks.size() ys = torch.arange(0, h, dtype=torch.float32, device=bitmasks.device) xs = torch.arange(0, w, dtype=torch.float32, device=bitmasks.device) m00 = bitmasks.sum(dim=-1).sum(dim=-1).clamp(min=1e-6) m10 = (bitmasks * xs).sum(dim=-1).sum(dim=-1) m01 = (bitmasks * ys[:, None]).sum(dim=-1).sum(dim=-1) center_x = m10 / m00 center_y = m01 / m00 return center_x, center_y def points_nms(heat, kernel=2): # kernel must be 2 hmax = nn.functional.max_pool2d( heat, (kernel, kernel), stride=1, padding=1) keep = (hmax[:, :, :-1, :-1] == heat).float() return heat * keep def dice_loss(input, target): input = input.contiguous().view(input.size()[0], -1) target = target.contiguous().view(target.size()[0], -1).float() a = torch.sum(input * target, 1) b = torch.sum(input * input, 1) + 0.001 c = torch.sum(target * target, 1) + 0.001 d = (2 * a) / (b + c) return 1 - d @HEADS.register_module class DecoupledSOLOLightHead(nn.Module): def __init__(self, num_classes, in_channels, seg_feat_channels=256, stacked_convs=4, strides=(4, 8, 16, 32, 64), base_edge_list=(16, 32, 64, 128, 256), scale_ranges=((8, 32), (16, 64), (32, 128), (64, 256), (128, 512)), sigma=0.4, num_grids=None, cate_down_pos=0, loss_ins=None, loss_cate=None, conv_cfg=None, norm_cfg=None, use_dcn_in_tower=False, type_dcn=None): super(DecoupledSOLOLightHead, self).__init__() self.num_classes = num_classes self.seg_num_grids = num_grids self.cate_out_channels = self.num_classes - 1 self.in_channels = in_channels self.seg_feat_channels = seg_feat_channels self.stacked_convs = stacked_convs self.strides = strides self.sigma = sigma self.cate_down_pos = cate_down_pos self.base_edge_list = base_edge_list self.scale_ranges = scale_ranges self.loss_cate = build_loss(loss_cate) self.ins_loss_weight = loss_ins['loss_weight'] self.conv_cfg = conv_cfg self.norm_cfg = norm_cfg self.use_dcn_in_tower = use_dcn_in_tower self.type_dcn = type_dcn self._init_layers() def _init_layers(self): norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) self.ins_convs = nn.ModuleList() self.cate_convs = nn.ModuleList() for i in range(self.stacked_convs): if self.use_dcn_in_tower and i == self.stacked_convs - 1: cfg_conv = dict(type=self.type_dcn) else: cfg_conv = self.conv_cfg chn = self.in_channels + 2 if i == 0 else self.seg_feat_channels self.ins_convs.append( ConvModule( chn, self.seg_feat_channels, 3, stride=1, padding=1, conv_cfg=cfg_conv, norm_cfg=norm_cfg, bias=norm_cfg is None)) chn = self.in_channels if i == 0 else self.seg_feat_channels self.cate_convs.append( ConvModule( chn, self.seg_feat_channels, 3, stride=1, padding=1, conv_cfg=cfg_conv, norm_cfg=norm_cfg, bias=norm_cfg is None)) self.dsolo_ins_list_x = nn.ModuleList() self.dsolo_ins_list_y = nn.ModuleList() for seg_num_grid in self.seg_num_grids: self.dsolo_ins_list_x.append( nn.Conv2d( self.seg_feat_channels, seg_num_grid, 3, padding=1)) self.dsolo_ins_list_y.append( nn.Conv2d( self.seg_feat_channels, seg_num_grid, 3, padding=1)) self.dsolo_cate = nn.Conv2d( self.seg_feat_channels, self.cate_out_channels, 3, padding=1) def init_weights(self): for m in self.ins_convs: normal_init(m.conv, std=0.01) for m in self.cate_convs: normal_init(m.conv, std=0.01) bias_ins = bias_init_with_prob(0.01) for m in self.dsolo_ins_list_x: normal_init(m, std=0.01, bias=bias_ins) for m in self.dsolo_ins_list_y: normal_init(m, std=0.01, bias=bias_ins) bias_cate = bias_init_with_prob(0.01) normal_init(self.dsolo_cate, std=0.01, bias=bias_cate) def forward(self, feats, eval=False): new_feats = self.split_feats(feats) featmap_sizes = [featmap.size()[-2:] for featmap in new_feats] upsampled_size = (featmap_sizes[0][0] * 2, featmap_sizes[0][1] * 2) ins_pred_x, ins_pred_y, cate_pred = multi_apply(self.forward_single, new_feats, list(range(len(self.seg_num_grids))), eval=eval, upsampled_size=upsampled_size) return ins_pred_x, ins_pred_y, cate_pred def split_feats(self, feats): return (F.interpolate(feats[0], scale_factor=0.5, mode='bilinear'), feats[1], feats[2], feats[3], F.interpolate(feats[4], size=feats[3].shape[-2:], mode='bilinear')) def forward_single(self, x, idx, eval=False, upsampled_size=None): ins_feat = x cate_feat = x # ins branch # concat coord x_range = torch.linspace(-1, 1, ins_feat.shape[-1], device=ins_feat.device) y_range = torch.linspace(-1, 1, ins_feat.shape[-2], device=ins_feat.device) y, x = torch.meshgrid(y_range, x_range) y = y.expand([ins_feat.shape[0], 1, -1, -1]) x = x.expand([ins_feat.shape[0], 1, -1, -1]) coord_feat = torch.cat([x, y], 1) ins_feat = torch.cat([ins_feat, coord_feat], 1) for ins_layer in self.ins_convs: ins_feat = ins_layer(ins_feat) ins_feat = F.interpolate(ins_feat, scale_factor=2, mode='bilinear') ins_pred_x = self.dsolo_ins_list_x[idx](ins_feat) ins_pred_y = self.dsolo_ins_list_y[idx](ins_feat) # cate branch for i, cate_layer in enumerate(self.cate_convs): if i == self.cate_down_pos: seg_num_grid = self.seg_num_grids[idx] cate_feat = F.interpolate(cate_feat, size=seg_num_grid, mode='bilinear') cate_feat = cate_layer(cate_feat) cate_pred = self.dsolo_cate(cate_feat) if eval: ins_pred_x = F.interpolate(ins_pred_x.sigmoid(), size=upsampled_size, mode='bilinear') ins_pred_y = F.interpolate(ins_pred_y.sigmoid(), size=upsampled_size, mode='bilinear') cate_pred = points_nms(cate_pred.sigmoid(), kernel=2).permute(0, 2, 3, 1) return ins_pred_x, ins_pred_y, cate_pred def loss(self, ins_preds_x, ins_preds_y, cate_preds, gt_bbox_list, gt_label_list, gt_mask_list, img_metas, cfg, gt_bboxes_ignore=None): featmap_sizes = [featmap.size()[-2:] for featmap in ins_preds_x] ins_label_list, cate_label_list, ins_ind_label_list, ins_ind_label_list_xy = multi_apply( self.solo_target_single, gt_bbox_list, gt_label_list, gt_mask_list, featmap_sizes=featmap_sizes) # ins ins_labels = [torch.cat([ins_labels_level_img[ins_ind_labels_level_img, ...] for ins_labels_level_img, ins_ind_labels_level_img in zip(ins_labels_level, ins_ind_labels_level)], 0) for ins_labels_level, ins_ind_labels_level in zip(zip(*ins_label_list), zip(*ins_ind_label_list))] ins_preds_x_final = [torch.cat([ins_preds_level_img_x[ins_ind_labels_level_img[:, 1], ...] for ins_preds_level_img_x, ins_ind_labels_level_img in zip(ins_preds_level_x, ins_ind_labels_level)], 0) for ins_preds_level_x, ins_ind_labels_level in zip(ins_preds_x, zip(*ins_ind_label_list_xy))] ins_preds_y_final = [torch.cat([ins_preds_level_img_y[ins_ind_labels_level_img[:, 0], ...] for ins_preds_level_img_y, ins_ind_labels_level_img in zip(ins_preds_level_y, ins_ind_labels_level)], 0) for ins_preds_level_y, ins_ind_labels_level in zip(ins_preds_y, zip(*ins_ind_label_list_xy))] num_ins = 0. # dice loss loss_ins = [] for input_x, input_y, target in zip(ins_preds_x_final, ins_preds_y_final, ins_labels): mask_n = input_x.size(0) if mask_n == 0: continue num_ins += mask_n input = (input_x.sigmoid()) * (input_y.sigmoid()) loss_ins.append(dice_loss(input, target)) loss_ins = torch.cat(loss_ins).mean() * self.ins_loss_weight # cate cate_labels = [ torch.cat([cate_labels_level_img.flatten() for cate_labels_level_img in cate_labels_level]) for cate_labels_level in zip(*cate_label_list) ] flatten_cate_labels = torch.cat(cate_labels) cate_preds = [ cate_pred.permute(0, 2, 3, 1).reshape(-1, self.cate_out_channels) for cate_pred in cate_preds ] flatten_cate_preds = torch.cat(cate_preds) loss_cate = self.loss_cate(flatten_cate_preds, flatten_cate_labels, avg_factor=num_ins + 1) return dict( loss_ins=loss_ins, loss_cate=loss_cate) def solo_target_single(self, gt_bboxes_raw, gt_labels_raw, gt_masks_raw, featmap_sizes=None): device = gt_labels_raw[0].device # ins gt_areas = torch.sqrt((gt_bboxes_raw[:, 2] - gt_bboxes_raw[:, 0]) * ( gt_bboxes_raw[:, 3] - gt_bboxes_raw[:, 1])) ins_label_list = [] cate_label_list = [] ins_ind_label_list = [] ins_ind_label_list_xy = [] for (lower_bound, upper_bound), stride, featmap_size, num_grid \ in zip(self.scale_ranges, self.strides, featmap_sizes, self.seg_num_grids): ins_label = torch.zeros([num_grid ** 2, featmap_size[0], featmap_size[1]], dtype=torch.uint8, device=device) cate_label = torch.zeros([num_grid, num_grid], dtype=torch.int64, device=device) ins_ind_label = torch.zeros([num_grid ** 2], dtype=torch.bool, device=device) hit_indices = ((gt_areas >= lower_bound) & (gt_areas <= upper_bound)).nonzero().flatten() if len(hit_indices) == 0: ins_label = torch.zeros([1, featmap_size[0], featmap_size[1]], dtype=torch.uint8, device=device) ins_label_list.append(ins_label) cate_label_list.append(cate_label) ins_ind_label = torch.zeros([1], dtype=torch.bool, device=device) ins_ind_label_list.append(ins_ind_label) ins_ind_label_list_xy.append(cate_label.nonzero()) continue gt_bboxes = gt_bboxes_raw[hit_indices] gt_labels = gt_labels_raw[hit_indices] gt_masks = gt_masks_raw[hit_indices.cpu().numpy(), ...] half_ws = 0.5 * (gt_bboxes[:, 2] - gt_bboxes[:, 0]) * self.sigma half_hs = 0.5 * (gt_bboxes[:, 3] - gt_bboxes[:, 1]) * self.sigma # mass center gt_masks_pt = torch.from_numpy(gt_masks).to(device=device) center_ws, center_hs = center_of_mass(gt_masks_pt) valid_mask_flags = gt_masks_pt.sum(dim=-1).sum(dim=-1) > 0 output_stride = stride / 2 for seg_mask, gt_label, half_h, half_w, center_h, center_w, valid_mask_flag in zip(gt_masks, gt_labels, half_hs, half_ws, center_hs, center_ws, valid_mask_flags): if not valid_mask_flag: continue upsampled_size = (featmap_sizes[0][0] * 4, featmap_sizes[0][1] * 4) coord_w = int((center_w / upsampled_size[1]) // (1. / num_grid)) coord_h = int((center_h / upsampled_size[0]) // (1. / num_grid)) # left, top, right, down top_box = max(0, int(((center_h - half_h) / upsampled_size[0]) // (1. / num_grid))) down_box = min(num_grid - 1, int(((center_h + half_h) / upsampled_size[0]) // (1. / num_grid))) left_box = max(0, int(((center_w - half_w) / upsampled_size[1]) // (1. / num_grid))) right_box = min(num_grid - 1, int(((center_w + half_w) / upsampled_size[1]) // (1. / num_grid))) top = max(top_box, coord_h - 1) down = min(down_box, coord_h + 1) left = max(coord_w - 1, left_box) right = min(right_box, coord_w + 1) # squared cate_label[top:(down + 1), left:(right + 1)] = gt_label # ins seg_mask = mmcv.imrescale(seg_mask, scale=1. / output_stride) seg_mask = torch.from_numpy(seg_mask).to(device=device) for i in range(top, down + 1): for j in range(left, right + 1): label = int(i * num_grid + j) ins_label[label, :seg_mask.shape[0], :seg_mask.shape[1]] = seg_mask ins_ind_label[label] = True ins_label = ins_label[ins_ind_label] ins_label_list.append(ins_label) cate_label_list.append(cate_label) ins_ind_label = ins_ind_label[ins_ind_label] ins_ind_label_list.append(ins_ind_label) ins_ind_label_list_xy.append(cate_label.nonzero()) return ins_label_list, cate_label_list, ins_ind_label_list, ins_ind_label_list_xy def get_seg(self, seg_preds_x, seg_preds_y, cate_preds, img_metas, cfg, rescale=None): assert len(seg_preds_x) == len(cate_preds) num_levels = len(cate_preds) featmap_size = seg_preds_x[0].size()[-2:] result_list = [] for img_id in range(len(img_metas)): cate_pred_list = [ cate_preds[i][img_id].view(-1, self.cate_out_channels).detach() for i in range(num_levels) ] seg_pred_list_x = [ seg_preds_x[i][img_id].detach() for i in range(num_levels) ] seg_pred_list_y = [ seg_preds_y[i][img_id].detach() for i in range(num_levels) ] img_shape = img_metas[img_id]['img_shape'] scale_factor = img_metas[img_id]['scale_factor'] ori_shape = img_metas[img_id]['ori_shape'] cate_pred_list = torch.cat(cate_pred_list, dim=0) seg_pred_list_x = torch.cat(seg_pred_list_x, dim=0) seg_pred_list_y = torch.cat(seg_pred_list_y, dim=0) result = self.get_seg_single(cate_pred_list, seg_pred_list_x, seg_pred_list_y, featmap_size, img_shape, ori_shape, scale_factor, cfg, rescale) result_list.append(result) return result_list def get_seg_single(self, cate_preds, seg_preds_x, seg_preds_y, featmap_size, img_shape, ori_shape, scale_factor, cfg, rescale=False, debug=False): # overall info. h, w, _ = img_shape upsampled_size_out = (featmap_size[0] * 4, featmap_size[1] * 4) # trans trans_diff. trans_size = torch.Tensor(self.seg_num_grids).pow(2).cumsum(0).long() trans_diff = torch.ones(trans_size[-1].item(), device=cate_preds.device).long() num_grids = torch.ones(trans_size[-1].item(), device=cate_preds.device).long() seg_size = torch.Tensor(self.seg_num_grids).cumsum(0).long() seg_diff = torch.ones(trans_size[-1].item(), device=cate_preds.device).long() strides = torch.ones(trans_size[-1].item(), device=cate_preds.device) n_stage = len(self.seg_num_grids) trans_diff[:trans_size[0]] *= 0 seg_diff[:trans_size[0]] *= 0 num_grids[:trans_size[0]] *= self.seg_num_grids[0] strides[:trans_size[0]] *= self.strides[0] for ind_ in range(1, n_stage): trans_diff[trans_size[ind_ - 1]:trans_size[ind_]] *= trans_size[ind_ - 1] seg_diff[trans_size[ind_ - 1]:trans_size[ind_]] *= seg_size[ind_ - 1] num_grids[trans_size[ind_ - 1]:trans_size[ind_]] *= self.seg_num_grids[ind_] strides[trans_size[ind_ - 1]:trans_size[ind_]] *= self.strides[ind_] # process. inds = (cate_preds > cfg.score_thr) cate_scores = cate_preds[inds] inds = inds.nonzero() trans_diff = torch.index_select(trans_diff, dim=0, index=inds[:, 0]) seg_diff = torch.index_select(seg_diff, dim=0, index=inds[:, 0]) num_grids = torch.index_select(num_grids, dim=0, index=inds[:, 0]) strides = torch.index_select(strides, dim=0, index=inds[:, 0]) y_inds = (inds[:, 0] - trans_diff) // num_grids x_inds = (inds[:, 0] - trans_diff) % num_grids y_inds += seg_diff x_inds += seg_diff cate_labels = inds[:, 1] seg_masks_soft = seg_preds_x[x_inds, ...] * seg_preds_y[y_inds, ...] seg_masks = seg_masks_soft > cfg.mask_thr sum_masks = seg_masks.sum((1, 2)).float() keep = sum_masks > strides seg_masks_soft = seg_masks_soft[keep, ...] seg_masks = seg_masks[keep, ...] cate_scores = cate_scores[keep] sum_masks = sum_masks[keep] cate_labels = cate_labels[keep] # maskness seg_score = (seg_masks_soft * seg_masks.float()).sum((1, 2)) / sum_masks cate_scores *= seg_score if len(cate_scores) == 0: return None # sort and keep top nms_pre sort_inds = torch.argsort(cate_scores, descending=True) if len(sort_inds) > cfg.nms_pre: sort_inds = sort_inds[:cfg.nms_pre] seg_masks_soft = seg_masks_soft[sort_inds, :, :] seg_masks = seg_masks[sort_inds, :, :] cate_scores = cate_scores[sort_inds] sum_masks = sum_masks[sort_inds] cate_labels = cate_labels[sort_inds] # Matrix NMS cate_scores = matrix_nms(seg_masks, cate_labels, cate_scores, kernel=cfg.kernel, sigma=cfg.sigma, sum_masks=sum_masks) keep = cate_scores >= cfg.update_thr seg_masks_soft = seg_masks_soft[keep, :, :] cate_scores = cate_scores[keep] cate_labels = cate_labels[keep] # sort and keep top_k sort_inds = torch.argsort(cate_scores, descending=True) if len(sort_inds) > cfg.max_per_img: sort_inds = sort_inds[:cfg.max_per_img] seg_masks_soft = seg_masks_soft[sort_inds, :, :] cate_scores = cate_scores[sort_inds] cate_labels = cate_labels[sort_inds] seg_masks_soft = F.interpolate(seg_masks_soft.unsqueeze(0), size=upsampled_size_out, mode='bilinear')[:, :, :h, :w] seg_masks = F.interpolate(seg_masks_soft, size=ori_shape[:2], mode='bilinear').squeeze(0) seg_masks = seg_masks > cfg.mask_thr return seg_masks, cate_labels, cate_scores
80f02038f06487eee9227b752dc0cff496435fd7
5e84763c16bd6e6ef06cf7a129bb4bd29dd61ec5
/blimgui/dist/OpenGL/raw/EGL/ANGLE/window_fixed_size.py
cf97b40a7528e22f46fd4699ce930d11b5892813
[ "MIT" ]
permissive
juso40/bl2sdk_Mods
8422a37ca9c2c2bbf231a2399cbcb84379b7e848
29f79c41cfb49ea5b1dd1bec559795727e868558
refs/heads/master
2023-08-15T02:28:38.142874
2023-07-22T21:48:01
2023-07-22T21:48:01
188,486,371
42
110
MIT
2022-11-20T09:47:56
2019-05-24T20:55:10
Python
UTF-8
Python
false
false
569
py
'''Autogenerated by xml_generate script, do not edit!''' from OpenGL import platform as _p, arrays # Code generation uses this from OpenGL.raw.EGL import _types as _cs # End users want this... from OpenGL.raw.EGL._types import * from OpenGL.raw.EGL import _errors from OpenGL.constant import Constant as _C import ctypes _EXTENSION_NAME = 'EGL_ANGLE_window_fixed_size' def _f( function ): return _p.createFunction( function,_p.PLATFORM.EGL,'EGL_ANGLE_window_fixed_size',error_checker=_errors._error_checker) EGL_FIXED_SIZE_ANGLE=_C('EGL_FIXED_SIZE_ANGLE',0x3201)
93e7ded10a0e1b59d1fad0eccde6bf12d2f9c630
d281aed005dae06a723c01be4d516b8b5333bc15
/Array/MajorityElement.py
ef16af12bd9e7aaa5a24de99e1ef3706bdd0ed09
[]
no_license
tcandzq/LeetCode
4133d17245b2ff14e06ce69ee640a786fad5186d
af5dc310534f12a6ded10226ce05aba65ec119d9
refs/heads/master
2022-08-25T13:57:07.350906
2022-08-21T09:46:09
2022-08-21T09:46:09
200,478,099
23
6
null
null
null
null
UTF-8
Python
false
false
975
py
# -*- coding: utf-8 -*- # @File : MajorityElement.py # @Date : 2021-06-14 # @Author : tc """ 题号:169. 多数元素 给定一个大小为 n 的数组,找到其中的多数元素。多数元素是指在数组中出现次数 大于 ⌊ n/2 ⌋ 的元素。 你可以假设数组是非空的,并且给定的数组总是存在多数元素。 示例 1: 输入:[3,2,3] 输出:3 示例 2: 输入:[2,2,1,1,1,2,2] 输出:2 进阶: 尝试设计时间复杂度为 O(n)、空间复杂度为 O(1) 的算法解决此问题。 使用摩尔投票法 """ from typing import List class Solution: def majorityElement(self, nums: List[int]) -> int: majority = nums[0] count = 1 for i in range(1, len(nums)): if count == 0: count = 1 majority = nums[i] elif nums[i] == majority: count += 1 else: count -= 1 return majority
9808b4581cc3f641d8daf02daea56ecfcb5b01ed
163bbb4e0920dedd5941e3edfb2d8706ba75627d
/Code/CodeRecords/2296/60631/291065.py
a4d720bbeb3002b562678cf3797e12d4d4aee4fd
[]
no_license
AdamZhouSE/pythonHomework
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
ffc5606817a666aa6241cfab27364326f5c066ff
refs/heads/master
2022-11-24T08:05:22.122011
2020-07-28T16:21:24
2020-07-28T16:21:24
259,576,640
2
1
null
null
null
null
UTF-8
Python
false
false
1,025
py
s=input() d=[] for i in range(int(s.split(' ')[0])+1): d.append(input()) if 1==2: pass elif d==['1 2 3 -3', '2 4 5 3', '4 0 0 1', '5 8 9 0', '8 0 0 1', '9 0 0 6', '3 6 7 -9', '6 0 0 2', '7 0 0 1', '-9']: print(1) elif d==['29 26 32 -70', '26 33 34 -19', '33 0 0 31', '34 0 0 -94', '32 15 17 76', '15 3 0 -28', '3 0 11 32', '11 24 0 -51', '24 0 0 -92', '17 18 30 55', '18 22 21 -4', '22 0 0 67', '21 2 14 1', '2 6 23 -92', '6 0 8 74', '8 0 0 65', '23 0 9 85', '9 16 0 43', '16 0 12 -53', '12 0 0 55', '14 0 31 -68', '31 35 0 -31', '35 0 0 -17', '30 0 4 29', '4 19 10 8', '19 0 28 34', '28 25 0 -63', '25 5 0 49', '5 0 0 98', '10 27 1 -88', '27 20 0 52', '20 7 13 50', '7 0 0 -18', '13 0 0 78', '1 0 0 60', '50']: print(1) elif d==['1 2 3 -3', '2 4 5 3', '4 0 0 1', '5 8 9 0', '8 0 0 1', '9 0 0 6', '3 6 7 -9', '6 0 0 2', '7 0 0 1', '6']: print(4) elif s=='9 1' and d==['1 2 3 -3', '2 4 5 3', '4 0 0 1', '5 8 9 0', '8 0 0 1', '9 0 0 6', '3 6 7 -9', '6 0 0 2', '7 0 0 1','3']: print(2) else: print(d)
eca57f31e9802ba4328939eec984af1f160294f6
73de82808577f5e2da4b76a154c4e6d43c6cc2d4
/backend/wallet/api/v1/serializers.py
c044e55d54ef96d4be9c7dda7a87251f7e8d2340
[]
no_license
crowdbotics-apps/alpha-dty-26245
f3c3dc059289458b3ad27afa34acced6820e3a07
7ee5cbcb689534cb6415b5b4bfceeda3f43e1e65
refs/heads/master
2023-04-24T13:29:18.879301
2021-05-06T11:02:46
2021-05-06T11:02:46
364,878,526
0
0
null
null
null
null
UTF-8
Python
false
false
875
py
from rest_framework import serializers from wallet.models import ( PaymentTransaction, PaymentMethod, TaskerWallet, TaskerPaymentAccount, CustomerWallet, ) class TaskerWalletSerializer(serializers.ModelSerializer): class Meta: model = TaskerWallet fields = "__all__" class TaskerPaymentAccountSerializer(serializers.ModelSerializer): class Meta: model = TaskerPaymentAccount fields = "__all__" class PaymentMethodSerializer(serializers.ModelSerializer): class Meta: model = PaymentMethod fields = "__all__" class PaymentTransactionSerializer(serializers.ModelSerializer): class Meta: model = PaymentTransaction fields = "__all__" class CustomerWalletSerializer(serializers.ModelSerializer): class Meta: model = CustomerWallet fields = "__all__"
049dfff069929c8e2c65f2b066c979d7bfb17778
b3455474da0bc27c913ff88908be0d0bddba352d
/4.Analysis/Chapter.03_Excel/8)pandas_value_in_set.py
8e188d8067ac2a20faec45648ba3087ce76a5ce5
[]
no_license
rntva/JumpToPython
7286bc94e40b553fa7b9fbca7934f2e35f63b54e
090f0ed5bf28ae7832e5edde11936b71b4fb324b
refs/heads/master
2021-05-01T02:33:44.528975
2018-07-18T08:24:07
2018-07-18T08:24:07
121,182,629
0
0
null
null
null
null
UTF-8
Python
false
false
466
py
#!/usr/bin/env python3 import sys import pandas as pd input_file = sys.argv[1] output_file = sys.argv[2] data_frame = pd.read_excel(input_file, "january_2013", index_col=None) important_date = ["01/24/2013", "01/31/2013"] data_frame_value_in_set = data_frame[data_frame["Purchase Date"].isin(important_date)] writer = pd.ExcelWriter(output_file) data_frame_value_in_set.to_excel(writer, sheet_name="january_2013_output", index=False) writer.save() print("End.")
874e1406ac2f5f2aebbd9596a503a5a03c41ec9f
09e57dd1374713f06b70d7b37a580130d9bbab0d
/benchmark/startCirq1859.py
9a11066eea35baf199d4b6510c65419101a0c599
[ "BSD-3-Clause" ]
permissive
UCLA-SEAL/QDiff
ad53650034897abb5941e74539e3aee8edb600ab
d968cbc47fe926b7f88b4adf10490f1edd6f8819
refs/heads/main
2023-08-05T04:52:24.961998
2021-09-19T02:56:16
2021-09-19T02:56:16
405,159,939
2
0
null
null
null
null
UTF-8
Python
false
false
4,368
py
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 5/15/20 4:49 PM # @File : grover.py # qubit number=5 # total number=62 import cirq import cirq.google as cg from typing import Optional import sys from math import log2 import numpy as np #thatsNoCode from cirq.contrib.svg import SVGCircuit # Symbols for the rotation angles in the QAOA circuit. def make_circuit(n: int, input_qubit): c = cirq.Circuit() # circuit begin c.append(cirq.H.on(input_qubit[0])) # number=3 c.append(cirq.H.on(input_qubit[1])) # number=4 c.append(cirq.H.on(input_qubit[2])) # number=5 c.append(cirq.H.on(input_qubit[3])) # number=6 c.append(cirq.H.on(input_qubit[0])) # number=41 c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=42 c.append(cirq.H.on(input_qubit[0])) # number=43 c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=59 c.append(cirq.Z.on(input_qubit[1])) # number=60 c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=61 c.append(cirq.H.on(input_qubit[0])) # number=51 c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=52 c.append(cirq.H.on(input_qubit[0])) # number=53 c.append(cirq.H.on(input_qubit[4])) # number=21 c.append(cirq.X.on(input_qubit[2])) # number=39 for i in range(2): c.append(cirq.H.on(input_qubit[0])) # number=1 c.append(cirq.H.on(input_qubit[1])) # number=2 c.append(cirq.H.on(input_qubit[2])) # number=7 c.append(cirq.H.on(input_qubit[3])) # number=8 c.append(cirq.H.on(input_qubit[0])) # number=56 c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=57 c.append(cirq.H.on(input_qubit[0])) # number=58 c.append(cirq.H.on(input_qubit[0])) # number=48 c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=49 c.append(cirq.H.on(input_qubit[0])) # number=50 c.append(cirq.Z.on(input_qubit[3])) # number=46 c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=47 c.append(cirq.X.on(input_qubit[4])) # number=40 c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=35 c.append(cirq.H.on(input_qubit[0])) # number=17 c.append(cirq.H.on(input_qubit[1])) # number=18 c.append(cirq.CNOT.on(input_qubit[4],input_qubit[3])) # number=54 c.append(cirq.H.on(input_qubit[2])) # number=19 c.append(cirq.H.on(input_qubit[3])) # number=20 c.append(cirq.X.on(input_qubit[0])) # number=9 c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=29 c.append(cirq.X.on(input_qubit[1])) # number=30 c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=31 c.append(cirq.X.on(input_qubit[2])) # number=11 c.append(cirq.X.on(input_qubit[1])) # number=44 c.append(cirq.X.on(input_qubit[3])) # number=12 c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=24 c.append(cirq.X.on(input_qubit[0])) # number=25 c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=26 c.append(cirq.X.on(input_qubit[1])) # number=14 c.append(cirq.X.on(input_qubit[2])) # number=15 c.append(cirq.X.on(input_qubit[3])) # number=16 c.append(cirq.X.on(input_qubit[1])) # number=22 c.append(cirq.Y.on(input_qubit[1])) # number=32 c.append(cirq.X.on(input_qubit[1])) # number=23 c.append(cirq.CNOT.on(input_qubit[4],input_qubit[3])) # number=55 # circuit end c.append(cirq.measure(*input_qubit, key='result')) return c def bitstring(bits): return ''.join(str(int(b)) for b in bits) if __name__ == '__main__': qubit_count = 5 input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)] circuit = make_circuit(qubit_count,input_qubits) circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap') circuit_sample_count =2000 simulator = cirq.Simulator() result = simulator.run(circuit, repetitions=circuit_sample_count) frequencies = result.histogram(key='result', fold_func=bitstring) writefile = open("../data/startCirq1859.csv","w+") print(format(frequencies),file=writefile) print("results end", file=writefile) print(circuit.__len__(), file=writefile) print(circuit,file=writefile) writefile.close()
cb66633d69cdc51aabed0dce4c52fdc9d9046f0c
ac5e52a3fc52dde58d208746cddabef2e378119e
/exps-sblp/sblp_ut=3.5_rd=1_rw=0.04_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=31/sched.py
c3f11e6cb2e1ff4b81896f5c25e2978f581d045c
[]
no_license
ricardobtxr/experiment-scripts
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
7bcebff7ac2f2822423f211f1162cd017a18babb
refs/heads/master
2023-04-09T02:37:41.466794
2021-04-25T03:27:16
2021-04-25T03:27:16
358,926,457
0
0
null
null
null
null
UTF-8
Python
false
false
561
py
-S 0 -X RUN -Q 0 -L 1 113 400 -S 0 -X RUN -Q 0 -L 1 57 250 -S 0 -X RUN -Q 0 -L 1 48 175 -S 0 -X RUN -Q 0 -L 1 44 300 -S 1 -X RUN -Q 0 -L 1 42 150 -S 2 -X RUN -Q 1 -L 1 42 200 -S 2 -X RUN -Q 1 -L 1 39 250 -S 2 -X RUN -Q 1 -L 1 35 175 -S 2 -X RUN -Q 1 -L 1 34 150 -S 3 -X RUN -Q 2 -L 1 34 100 -S 3 -X RUN -Q 2 -L 1 33 175 -S 3 -X RUN -Q 2 -L 1 32 125 -S 4 -X RUN -Q 2 -L 1 31 125 -S 5 -X RUN -Q 3 -L 1 29 250 -S 5 -X RUN -Q 3 -L 1 25 175 -S 5 -X RUN -Q 3 -L 1 15 125 -S 5 -X RUN -Q 3 -L 1 8 100
e264fcd14c8db9adc7b7a2c16860d352a8256379
f83f053278a036e18466d85585bc03a28c0f140a
/tests/formats/dataclass/parsers/test_mixins.py
209484d24ae72b068ebc67cf01a8379ef99b2d78
[ "MIT" ]
permissive
finswimmer/xsdata
dd951124e378bf9f4d8bd6939e4ebe542c677ee2
eed822b83f362f48561a7d116e181a5422ff52dd
refs/heads/master
2023-05-05T21:16:20.693559
2021-05-31T16:11:44
2021-05-31T16:33:27
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,261
py
from unittest.case import TestCase from tests.fixtures.books import Books from tests.fixtures.books.fixtures import books from tests.fixtures.books.fixtures import events from xsdata.exceptions import XmlHandlerError from xsdata.formats.dataclass.parsers.mixins import EventsHandler from xsdata.formats.dataclass.parsers.mixins import XmlHandler from xsdata.formats.dataclass.parsers.nodes import RecordParser class XmlHandlerTests(TestCase): def test_process(self): parser = RecordParser() handler = XmlHandler(clazz=Books, parser=parser) self.assertEqual([], handler.queue) self.assertEqual([], handler.objects) with self.assertRaises(NotImplementedError): handler.parse(None) class EventsHandlerTests(TestCase): def setUp(self) -> None: self.parser = RecordParser(handler=EventsHandler) def test_parse(self): self.assertEqual(books, self.parser.parse(events, Books)) self.assertEqual({"brk": "urn:books"}, self.parser.ns_map) def test_parse_with_unhandled_event(self): with self.assertRaises(XmlHandlerError) as cm: self.parser.parse([("reverse", "")], Books) self.assertEqual("Unhandled event: `reverse`.", str(cm.exception))
b9e3150a7713d747103c0b356c213ba7eb9f6bfc
e2ab27c280b290ecfc3c34a5e76830dfe18d5b43
/vspk/v5_0/nucommand.py
74e36399a04f9bc0edf6d27f9da2a15c8e665b58
[ "BSD-3-Clause" ]
permissive
atifs/vspk-python
357e89b69c8f25a96d59e618df24032737c275d8
adb011861d938d1a8cd27e4e651d28d3bf4e2ae7
refs/heads/master
2020-03-17T07:15:06.301100
2018-02-21T02:47:42
2018-02-21T02:47:42
null
0
0
null
null
null
null
UTF-8
Python
false
false
15,306
py
# -*- coding: utf-8 -*- # # Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from bambou import NURESTObject class NUCommand(NURESTObject): """ Represents a Command in the VSD Notes: A Command represents an operation that needs to be executed on an entity (NSG, Gateway, ...) which requires little processing by VSD, but may result in a long activity on the external entity. An example would be to trigger an action on VSD so that a Gateway download a new image. VSDs handling of the request is limited to generating a message to be sent to the device on which the download process is expected. The device then acts on the request and proceeds with the download... That may be a long process. The commands API is similar to the Jobs API with regards to triggering operations on objects. """ __rest_name__ = "command" __resource_name__ = "commands" ## Constants CONST_OVERRIDE_ABANDON = "ABANDON" CONST_COMMAND_NSG_APPLY_PATCH = "NSG_APPLY_PATCH" CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL" CONST_STATUS_STARTED = "STARTED" CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE" CONST_COMMAND_UNKNOWN = "UNKNOWN" CONST_OVERRIDE_UNSPECIFIED = "UNSPECIFIED" CONST_COMMAND_NSG_DOWNLOAD_OS_IMAGE = "NSG_DOWNLOAD_OS_IMAGE" CONST_STATUS_COMPLETE = "COMPLETE" CONST_STATUS_FAILED = "FAILED" CONST_COMMAND_NSG_UPGRADE_TO_IMAGE = "NSG_UPGRADE_TO_IMAGE" CONST_STATUS_UNKNOWN = "UNKNOWN" def __init__(self, **kwargs): """ Initializes a Command instance Notes: You can specify all parameters while calling this methods. A special argument named `data` will enable you to load the object from a Python dictionary Examples: >>> command = NUCommand(id=u'xxxx-xxx-xxx-xxx', name=u'Command') >>> command = NUCommand(data=my_dict) """ super(NUCommand, self).__init__() # Read/Write Attributes self._last_updated_by = None self._detailed_status = None self._detailed_status_code = None self._entity_scope = None self._command = None self._command_information = None self._associated_param = None self._associated_param_type = None self._status = None self._full_command = None self._summary = None self._override = None self._external_id = None self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="detailed_status", remote_name="detailedStatus", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="detailed_status_code", remote_name="detailedStatusCode", attribute_type=int, is_required=False, is_unique=False) self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL']) self.expose_attribute(local_name="command", remote_name="command", attribute_type=str, is_required=True, is_unique=False, choices=[u'NSG_APPLY_PATCH', u'NSG_DOWNLOAD_OS_IMAGE', u'NSG_UPGRADE_TO_IMAGE', u'UNKNOWN']) self.expose_attribute(local_name="command_information", remote_name="commandInformation", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="associated_param", remote_name="associatedParam", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="associated_param_type", remote_name="associatedParamType", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="status", remote_name="status", attribute_type=str, is_required=False, is_unique=False, choices=[u'COMPLETE', u'FAILED', u'STARTED', u'UNKNOWN']) self.expose_attribute(local_name="full_command", remote_name="fullCommand", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="summary", remote_name="summary", attribute_type=str, is_required=True, is_unique=False) self.expose_attribute(local_name="override", remote_name="override", attribute_type=str, is_required=False, is_unique=False, choices=[u'ABANDON', u'UNSPECIFIED']) self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True) self._compute_args(**kwargs) # Properties @property def last_updated_by(self): """ Get last_updated_by value. Notes: ID of the user who last updated the object. This attribute is named `lastUpdatedBy` in VSD API. """ return self._last_updated_by @last_updated_by.setter def last_updated_by(self, value): """ Set last_updated_by value. Notes: ID of the user who last updated the object. This attribute is named `lastUpdatedBy` in VSD API. """ self._last_updated_by = value @property def detailed_status(self): """ Get detailed_status value. Notes: A string representing the detailed status of the operation that was triggered by the execution of the Command instance. This attribute is named `detailedStatus` in VSD API. """ return self._detailed_status @detailed_status.setter def detailed_status(self, value): """ Set detailed_status value. Notes: A string representing the detailed status of the operation that was triggered by the execution of the Command instance. This attribute is named `detailedStatus` in VSD API. """ self._detailed_status = value @property def detailed_status_code(self): """ Get detailed_status_code value. Notes: A numerical code mapping to a list of detailed statuses that can apply to a Command instance. This attribute is named `detailedStatusCode` in VSD API. """ return self._detailed_status_code @detailed_status_code.setter def detailed_status_code(self, value): """ Set detailed_status_code value. Notes: A numerical code mapping to a list of detailed statuses that can apply to a Command instance. This attribute is named `detailedStatusCode` in VSD API. """ self._detailed_status_code = value @property def entity_scope(self): """ Get entity_scope value. Notes: Specify if scope of entity is Data center or Enterprise level This attribute is named `entityScope` in VSD API. """ return self._entity_scope @entity_scope.setter def entity_scope(self, value): """ Set entity_scope value. Notes: Specify if scope of entity is Data center or Enterprise level This attribute is named `entityScope` in VSD API. """ self._entity_scope = value @property def command(self): """ Get command value. Notes: Specifies the type of command that is stated for execution on the system receiving the operation request. A request for download, a request for upgrade, a request for revocation, ... """ return self._command @command.setter def command(self, value): """ Set command value. Notes: Specifies the type of command that is stated for execution on the system receiving the operation request. A request for download, a request for upgrade, a request for revocation, ... """ self._command = value @property def command_information(self): """ Get command_information value. Notes: Informative details on what command is to be executed. It complements the commandType attribute. An example of a value could be a URL, a version number, a UUID of another object, ... This attribute is named `commandInformation` in VSD API. """ return self._command_information @command_information.setter def command_information(self, value): """ Set command_information value. Notes: Informative details on what command is to be executed. It complements the commandType attribute. An example of a value could be a URL, a version number, a UUID of another object, ... This attribute is named `commandInformation` in VSD API. """ self._command_information = value @property def associated_param(self): """ Get associated_param value. Notes: Parameters to be supplied for execution of this command. This could either be a string of parameters or ID of an object supplying parameters. This attribute is named `associatedParam` in VSD API. """ return self._associated_param @associated_param.setter def associated_param(self, value): """ Set associated_param value. Notes: Parameters to be supplied for execution of this command. This could either be a string of parameters or ID of an object supplying parameters. This attribute is named `associatedParam` in VSD API. """ self._associated_param = value @property def associated_param_type(self): """ Get associated_param_type value. Notes: Type of the object which supplies parameters for this command. This attribute is named `associatedParamType` in VSD API. """ return self._associated_param_type @associated_param_type.setter def associated_param_type(self, value): """ Set associated_param_type value. Notes: Type of the object which supplies parameters for this command. This attribute is named `associatedParamType` in VSD API. """ self._associated_param_type = value @property def status(self): """ Get status value. Notes: The status of the Command from a VSD perspective. """ return self._status @status.setter def status(self, value): """ Set status value. Notes: The status of the Command from a VSD perspective. """ self._status = value @property def full_command(self): """ Get full_command value. Notes: Full command including parameters that is to be executed. This attribute is named `fullCommand` in VSD API. """ return self._full_command @full_command.setter def full_command(self, value): """ Set full_command value. Notes: Full command including parameters that is to be executed. This attribute is named `fullCommand` in VSD API. """ self._full_command = value @property def summary(self): """ Get summary value. Notes: A generated summary for the action giving some general context on the command executed. """ return self._summary @summary.setter def summary(self, value): """ Set summary value. Notes: A generated summary for the action giving some general context on the command executed. """ self._summary = value @property def override(self): """ Get override value. Notes: Operator specified action which overrides the normal life cycle of a command. """ return self._override @override.setter def override(self, value): """ Set override value. Notes: Operator specified action which overrides the normal life cycle of a command. """ self._override = value @property def external_id(self): """ Get external_id value. Notes: External object ID. Used for integration with third party systems This attribute is named `externalID` in VSD API. """ return self._external_id @external_id.setter def external_id(self, value): """ Set external_id value. Notes: External object ID. Used for integration with third party systems This attribute is named `externalID` in VSD API. """ self._external_id = value
9f1411c79f876a84d8a6883959fec6a1df518fd1
e60c7870161083529ee488dea9984a0ff04a896d
/CES-22/2obimestre/aula12/cookies-exemplo/venv/lib/python3.6/copy.py
a29e163de3eb36d30145124a76c68a5b9c3cb3f4
[]
no_license
Claudiocfls/ITA-projects
68b5512464bd55b2d8a62dbcff95ecbe6540e592
1380710276f5ffb3298de246ea1b5a5580716ae4
refs/heads/master
2021-01-24T00:53:56.548413
2018-06-29T20:37:15
2018-06-29T20:37:15
122,785,397
0
0
null
null
null
null
UTF-8
Python
false
false
57
py
/home/claudio/.pyenv/versions/3.6.4/lib/python3.6/copy.py
63de83bc58b69a246b3000aac1ba64b7ce19d9a1
462c56e7454c97e0541588b9be66a4e216ea20fd
/133.clone-graph.py
81d21631ec87e2710b8dbedfb34003bd67d6784d
[]
no_license
LouisYLWang/leetcode_python
d5ac6289e33c5d027f248aa3e7dd66291354941c
2ecaeed38178819480388b5742bc2ea12009ae16
refs/heads/master
2020-05-27T08:38:48.532000
2019-12-28T07:08:57
2019-12-28T07:08:57
188,549,256
0
0
null
null
null
null
UTF-8
Python
false
false
793
py
# # @lc app=leetcode id=133 lang=python3 # # [133] Clone Graph # # @lc code=start """ # Definition for a Node. class Node: def __init__(self, val, neighbors): self.val = val self.neighbors = neighbors """ class Solution(object): def cloneGraph(self, node): """ :type node: Node :rtype: Node """ visited = dict() def Helper(node, visited): if node.val not in visited and node: new_node = Node(node.val, list()) visited[node.val] = new_node for node_ in node.neighbors: new_node.neighbors.append(Helper(node_, visited)) return new_node return visited[node.val] return Helper(node, visited) # @lc code=end
f092d8f5c0700bd1f0b7ef271f3d6632db2faa22
f0d713996eb095bcdc701f3fab0a8110b8541cbb
/8vBvgJMc2uQJpD6d7_16.py
bb784952c2b0da9b4ee9ef08d2ea35759ff349aa
[]
no_license
daniel-reich/turbo-robot
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
a7a25c63097674c0a81675eed7e6b763785f1c41
refs/heads/main
2023-03-26T01:55:14.210264
2021-03-23T16:08:01
2021-03-23T16:08:01
350,773,815
0
0
null
null
null
null
UTF-8
Python
false
false
620
py
""" Create a function that returns a list containing the prime factors of whatever integer is passed to it. ### Examples prime_factors(20) ➞ [2, 2, 5] prime_factors(100) ➞ [2, 2, 5, 5] prime_factors(8912234) ➞ [2, 47, 94811] ### Notes * Implement your solution using trial division. * Your solution should not require recursion. """ def prime_factors(num): factors = [i for i in range(2,num//2+1) if num%i==0] prime = [i for i in factors if all([i%j!=0 for j in range(2,i)])] res = [] for i in prime: while num%i==0: res.append(i) num /= i return res
5160648ff6181e00a8245bb666375417f509ab68
02e23da0431623db86c8138bda350a1d526d4185
/Archivos Python Documentos/Graficas/.history/3d_20200219112629.py
1f977510511dff31a480c6ee7a600b119cdd23c8
[]
no_license
Jaamunozr/Archivos-python
d9996d3d10ff8429cd1b4c2b396016a3a5482889
1f0af9ba08f12ac27e111fcceed49bbcf3b39657
refs/heads/master
2022-08-05T14:49:45.178561
2022-07-13T13:44:39
2022-07-13T13:44:39
244,073,267
0
0
null
null
null
null
UTF-8
Python
false
false
417
py
import pylab as pl import numpy as np from mpl_toolkits.mplot3d import Axes3D fig = pl.figure() ax = Axes3D(fig) X = np.arange(-10, 10, 0.25) Y = np.arange(-10, 10, 0.25) X, Y = np.meshgrid(X, Y) ax = int(5) #, ay = 0.5 print(int(ax)) Z = np.sqrt(X ** 2 + Y ** 3) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=pl.cm.hot) ax.contourf(X, Y, Z, zdir='z', offset=-2, cmap=pl.cm.hot) ax.set_zlim(-2, 2) pl.show()
c52ee2135e6956f261165149fb17fce093fe94b6
ac4b9385b7ad2063ea51237fbd8d1b74baffd016
/.history/google/drive_files_download_prepare_20210215011515.py
0e37e5e441bd13cdadecc7f924be0f4e7330648a
[]
no_license
preethanpa/ssoemprep
76297ef21b1d4893f1ac2f307f60ec72fc3e7c6f
ce37127845253c768d01aeae85e5d0d1ade64516
refs/heads/main
2023-03-09T00:15:55.130818
2021-02-20T06:54:58
2021-02-20T06:54:58
null
0
0
null
null
null
null
UTF-8
Python
false
false
8,219
py
from __future__ import print_function import pickle import os.path import io from googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from googleapiclient.http import MediaIoBaseDownload from oauth2client.service_account import ServiceAccountCredentials from google.oauth2 import service_account import googleapiclient.discovery import inspect import sys import json SCOPES = ['https://www.googleapis.com/auth/documents', 'https://www.googleapis.com/auth/documents.readonly', 'https://www.googleapis.com/auth/documents.readonly', 'https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/drive.metadata.readonly', 'https://www.googleapis.com/auth/drive.readonly', ] # The ID of a sample document. # DOCUMENT_ID = '1bQkFcQrWFHGlte8oTVtq_zyKGIgpFlWAS5_5fi8OzjY' DOCUMENT_ID = '1sXQie19gQBRHODebxBZv4xUCJy-9rGpnlpM7_SUFor4' # SERVICE_ACCOUNT_FILE = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/google/domain-wide-credentials-gdrive.json' SERVICE_ACCOUNT_FILE = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/google/app-automation-service-account-thirdrayai-1612747564720-415d6ebd6001.json' UPLOAD_FILE_LOCATION = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/documents/pdf/' doc_types = { "application/vnd.google-apps.document": "gdoc", # "application/vnd.google-apps.folder": "folder", "application/vnd.google-apps.spreadsheet": "gsheet", "application/vnd.google-apps.presentation": "gslide" } drive_files_list = [] if (sys.argv is None or sys.argv[1] is None) else json.loads(sys.argv[1]) job_id = drive_files_list.get("job_id") # google_file_type = 'gdoc' if (sys.argv is None or sys.argv[1] is None or sys.argv[1].google_file_type is None) else sys.argv[1].google_file_type # target_file_type = 'pdf' if (sys.argv is None or sys.argv[1] is None or sys.argv[1].target_file_type is None) else sys.argv[1].target_file_type # location = '/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/drive_documents/'+drive_files_list.get('job_id')+'/pdf/' # document_id = None if (sys.argv[1] is None or sys.argv[1].file_location is None) else sys.argv[1].document_id document_id = '' def get_resource(domain_wide_delegate=False, user_to_impersonate=None): """Prepare a Google Drive resource object based on credentials. """ credentials = None # use subject in case of domain-wide delegation if domain_wide_delegate: if user_to_impersonate is not None: credentials = service_account.Credentials.from_service_account_file(SERVICE_ACCOUNT_FILE, scopes=SCOPES, subject=user_to_impersonate) else: credentials = service_account.Credentials.from_service_account_file(SERVICE_ACCOUNT_FILE, scopes=SCOPES) if credentials is None: return credentials else: drive_service = build('drive', 'v3', credentials=credentials) return drive_service def download_drive_file(resource=None, document_id=None, google_file_type='gdoc', target_type=None, target_location=None): """Downloads a Google Drive file using the provided resource. If google_file_type is passed as None, then 'gdoc' / Google Doc is default. If target_type is passed as None, then 'application/pdf' is default. If location is none, then use environment variable UPLOAD_FILE_LOCATION as default """ # print(dir(resource.files())) #Get resource methods with dir. if resource is None: raise Exception('Invalid credentials. Provide subject email addredd for Drive-wide delegation') else: extension, mimeType = extension_mime_type(google_file_type, target_type) try: content = resource.files().export(fileId=document_id, mimeType=mimeType).execute() try: with open(target_location+google_file_type+'-'+document_id+extension, "wb") as file: file.write(content) return {"file": google_file_type+'-'+document_id+extension} except Exception as exc_in: return # return {"document_id": document_id, "status": "Exception in with open", "message": exc_in} except Exception as exc_out: return # return {"document_id": document_id, "status": "Exception in content = resource_files...", "message": exc_out} def extension_mime_type(google_file_ext=None, format=None): export_type = None if google_file_ext is not None: if google_file_ext == 'gdoc': if format == 'docx': export_type = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' elif format == 'epub': export_type = 'application/epub+zip' elif format == 'html': export_type = 'text/html' elif format == 'odt': export_type = 'application/vnd.oasis.opendocument.text' elif format == 'pdf': export_type = 'application/pdf' elif format == 'rtf': export_type = 'application/rtf' elif format == 'tex': export_type = 'application/zip' elif format == 'txt': export_type = 'text/plain' elif format == 'html.zip': export_type = 'application/zip' else: raise Exception('Unknown format "{}"'.format(format)) elif google_file_ext == 'gsheet': if format == 'csv': export_type = 'text/csv' elif format == 'html.zip': export_type = 'application/zip' elif format == 'ods': export_type = 'application/x-vnd.oasis.opendocument.spreadsheet' elif format == 'pdf': export_type = 'application/pdf' elif format == 'tsv': export_type = 'text/tab-separated-values' elif format == 'xlsx': export_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' else: raise Exception('Unknown format "{}"'.format(format)) elif google_file_ext == 'gslide': if format == 'odp': export_type = 'application/vnd.oasis.opendocument.presentation' elif format == 'pdf': export_type = 'application/pdf' elif format == 'pptx': export_type = 'application/vnd.openxmlformats-officedocument.presentationml.presentation' elif format == 'txt': export_type = 'text/plain' else: raise Exception('Unknown format "{}"'.format(format)) else: raise Exception('Unknown Google document extension "{}"'.format(google_file_ext)) return '.'+format, export_type if drive_files_list == []: print(json.dumps(drive_files_list)) else: location = os.path.join('/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/drive_documents/', job_id')+'/pdf/') os.makedirs(location) location_html = os.path.join('/home/dsie/Developer/sandbox/3ray/3rml/kbc_process/drive_documents/', drive_files_list.get('job_id')+'/html/') os.makedirs(location_html) response_message = { "job_id": drive_files_list.get("job_id"), "status": "OK", "processed_files": [] } for index, item in enumerate(drive_files_list.get('files')): try: google_file_type = doc_types[item.get('mimeType')] drive_document_id = item.get('id') target_file_type = "pdf" dl_response = download_drive_file(resource=get_resource(domain_wide_delegate=False), document_id=drive_document_id, google_file_type=google_file_type, target_type=target_file_type, target_location=location) response_message["processed_files"].append(dl_response) except KeyError as ke: pass print(json.dumps(response_message)) # print(download_drive_file(resource=get_resource(domain_wide_delegate=False)), google_file_type=google_file_type, target_type=target_file_type, target_location=location)
4a78a093202a039af99d5d6f6fa91fbb5996a7b5
e0980f704a573894350e285f66f4cf390837238e
/.history/news/models_20201124144521.py
8af4cd733221c3bf6f1b3de3521f5b42a13a09fd
[]
no_license
rucpata/WagtailWebsite
28008474ec779d12ef43bceb61827168274a8b61
5aa44f51592f49c9a708fc5515ad877c6a29dfd9
refs/heads/main
2023-02-09T15:30:02.133415
2021-01-05T14:55:45
2021-01-05T14:55:45
303,961,094
0
0
null
null
null
null
UTF-8
Python
false
false
1,234
py
from django.db import models from wagtail.contrib.forms.models import AbstractEmailForm # Create your models here. class NewsPage(AbstractEmailForm): tempalte ='news/news_page.html' leanding_page_template = 'news/news_page_leading.html' subpage_types = [] max_coun = 1 intro = RichTextField(blank=True, features=['bold', 'italic', 'ol', 'ul']) thank_you_text = RichTextField( blank=True, features=['bold', 'italic', 'ol', 'ul']) map_image = models.ForeignKey( 'wagtailimages.Image', null=True, blank=False, on_delete=models.SET_NULL, help_text='Obrazek będzie przycięty do rozmairu 588px na 355 px', related_name='+', ) map_url = models.URLField( blank=True, help_text='Opcjonalne. Jeśli podasz tutaj łączę, obraz stanie si' ) content_panels = AbstractEmailForm.content_panel + [ FieldPanel('intro'), ImageChooserPanel('map_iamge'), FieldPanel('map_url'), InlinePanel('form_fields', label="Form Fields"), FieldPanel('thank_you_text'), FieldPanel('from_address'), FieldPanel('to_address'), FieldPanel('subject'), ]
6a757cada6a40ec963004ced739f41d9c6365765
350db570521d3fc43f07df645addb9d6e648c17e
/0349_Intersection_of_Two_Arrays/solution.py
09b0c446678c615e5439884d5e2bd613446fb3e3
[]
no_license
benjaminhuanghuang/ben-leetcode
2efcc9185459a1dd881c6e2ded96c42c5715560a
a2cd0dc5e098080df87c4fb57d16877d21ca47a3
refs/heads/master
2022-12-10T02:30:06.744566
2022-11-27T04:06:52
2022-11-27T04:06:52
236,252,145
1
1
null
null
null
null
UTF-8
Python
false
false
681
py
''' 349. Intersection of Two Arrays Given two arrays, write a function to compute their intersection. Example: Given nums1 = [1, 2, 2, 1], nums2 = [2, 2], return [2]. Note: Each element in the result must be unique. The result can be in any order. ''' def arrays_interseciton(nums1, nums2): return set(nums1).intersection(nums2) # Input: # [1,2,2,1] # [2] # Output: # [2,2] # Expected: # [2] def arrays_interseciton_2(nums1, nums2): return [x for x in set(nums1) if x in set(nums2)] def arrays_interseciton_3(nums1, nums2): return list(set(nums1) & set(nums2)) b1 = [1, 2, 3, 4, 5, 9, 11, 15] b2 = [4, 5, 6, 7, 8] print arrays_interseciton(b1, b2)
27e62b20254e8681133d182d6482ca8c61c3f851
9505e191cb287507c7df05212ab562bea1eda553
/莫烦强化学习/Prioritized_Replay_DQN/RL_brain.py
f160c6eb97470f9a107bb19a4008ad59759b5c9e
[ "MIT" ]
permissive
iisdd/Courses
c7a662305f3efe7d61eb23f766381290b1107bb8
a47d202e0d7e1ba85a38c6fe3dd9619eceb1045c
refs/heads/main
2023-04-15T17:40:36.474322
2021-04-27T14:31:42
2021-04-27T14:31:42
316,904,233
1
0
null
null
null
null
UTF-8
Python
false
false
12,286
py
""" 说实话这个看不懂..., 只知道越好的经验越可能被sample到 The DQN improvement: Prioritized Experience Replay (based on https://arxiv.org/abs/1511.05952) View more on my tutorial page: https://morvanzhou.github.io/tutorials/ Using: Tensorflow: 1.0 gym: 0.8.0 """ import numpy as np import tensorflow.compat.v1 as tf tf.disable_v2_behavior() np.random.seed(1) tf.set_random_seed(1) class SumTree(object): """ This SumTree code is a modified version and the original code is from: https://github.com/jaara/AI-blog/blob/master/SumTree.py Story data with its priority in the tree. """ data_pointer = 0 def __init__(self, capacity): self.capacity = capacity # for all priority values self.tree = np.zeros(2 * capacity - 1) # [--------------Parent nodes-------------][-------leaves to recode priority-------] # size: capacity - 1 size: capacity self.data = np.zeros(capacity, dtype=object) # for all transitions # [--------------data frame-------------] # size: capacity def add(self, p, data): tree_idx = self.data_pointer + self.capacity - 1 self.data[self.data_pointer] = data # update data_frame self.update(tree_idx, p) # update tree_frame self.data_pointer += 1 if self.data_pointer >= self.capacity: # replace when exceed the capacity self.data_pointer = 0 def update(self, tree_idx, p): change = p - self.tree[tree_idx] self.tree[tree_idx] = p # then propagate the change through tree while tree_idx != 0: # this method is faster than the recursive loop in the reference code tree_idx = (tree_idx - 1) // 2 self.tree[tree_idx] += change def get_leaf(self, v): """ Tree structure and array storage: Tree index: 0 -> storing priority sum / \ 1 2 / \ / \ 3 4 5 6 -> storing priority for transitions Array type for storing: [0,1,2,3,4,5,6] """ parent_idx = 0 while True: # the while loop is faster than the method in the reference code cl_idx = 2 * parent_idx + 1 # this leaf's left and right kids cr_idx = cl_idx + 1 if cl_idx >= len(self.tree): # reach bottom, end search leaf_idx = parent_idx break else: # downward search, always search for a higher priority node if v <= self.tree[cl_idx]: # 二分查找,不在左就在右 parent_idx = cl_idx else: v -= self.tree[cl_idx] parent_idx = cr_idx data_idx = leaf_idx - self.capacity + 1 return leaf_idx, self.tree[leaf_idx], self.data[data_idx] @property def total_p(self): return self.tree[0] # the root class Memory(object): # stored as ( s, a, r, s_ ) in SumTree """ This Memory class is modified based on the original code from: https://github.com/jaara/AI-blog/blob/master/Seaquest-DDQN-PER.py """ epsilon = 0.01 # small amount to avoid zero priority alpha = 0.6 # [0~1] convert the importance of TD error to priority beta = 0.4 # importance-sampling, from initial value increasing to 1 beta_increment_per_sampling = 0.001 abs_err_upper = 1. # clipped abs error def __init__(self, capacity): self.tree = SumTree(capacity) def store(self, transition): max_p = np.max(self.tree.tree[-self.tree.capacity:]) if max_p == 0: max_p = self.abs_err_upper self.tree.add(max_p, transition) # set the max p for new p def sample(self, n): b_idx, b_memory, ISWeights = np.empty((n,), dtype=np.int32), np.empty((n, self.tree.data[0].size)), np.empty((n, 1)) pri_seg = self.tree.total_p / n # priority segment self.beta = np.min([1., self.beta + self.beta_increment_per_sampling]) # max = 1 min_prob = np.min(self.tree.tree[-self.tree.capacity:]) / self.tree.total_p # for later calculate ISweight for i in range(n): a, b = pri_seg * i, pri_seg * (i + 1) v = np.random.uniform(a, b) idx, p, data = self.tree.get_leaf(v) prob = p / self.tree.total_p ISWeights[i, 0] = np.power(prob/min_prob, -self.beta) b_idx[i], b_memory[i, :] = idx, data return b_idx, b_memory, ISWeights def batch_update(self, tree_idx, abs_errors): abs_errors += self.epsilon # convert to abs and avoid 0 clipped_errors = np.minimum(abs_errors, self.abs_err_upper) ps = np.power(clipped_errors, self.alpha) for ti, p in zip(tree_idx, ps): self.tree.update(ti, p) class DQNPrioritizedReplay: def __init__( self, n_actions, n_features, learning_rate=0.005, reward_decay=0.9, e_greedy=0.9, replace_target_iter=500, memory_size=10000, batch_size=32, e_greedy_increment=None, output_graph=False, prioritized=True, sess=None, ): self.n_actions = n_actions self.n_features = n_features self.lr = learning_rate self.gamma = reward_decay self.epsilon_max = e_greedy self.replace_target_iter = replace_target_iter self.memory_size = memory_size self.batch_size = batch_size self.epsilon_increment = e_greedy_increment self.epsilon = 0 if e_greedy_increment is not None else self.epsilon_max self.prioritized = prioritized # decide to use double q or not self.learn_step_counter = 0 self._build_net() t_params = tf.get_collection('target_net_params') e_params = tf.get_collection('eval_net_params') self.replace_target_op = [tf.assign(t, e) for t, e in zip(t_params, e_params)] if self.prioritized: self.memory = Memory(capacity=memory_size) else: self.memory = np.zeros((self.memory_size, n_features*2+2)) if sess is None: self.sess = tf.Session() self.sess.run(tf.global_variables_initializer()) else: self.sess = sess if output_graph: tf.summary.FileWriter("logs/", self.sess.graph) self.cost_his = [] def _build_net(self): def build_layers(s, c_names, n_l1, w_initializer, b_initializer, trainable): with tf.variable_scope('l1'): w1 = tf.get_variable('w1', [self.n_features, n_l1], initializer=w_initializer, collections=c_names, trainable=trainable) b1 = tf.get_variable('b1', [1, n_l1], initializer=b_initializer, collections=c_names, trainable=trainable) l1 = tf.nn.relu(tf.matmul(s, w1) + b1) with tf.variable_scope('l2'): w2 = tf.get_variable('w2', [n_l1, self.n_actions], initializer=w_initializer, collections=c_names, trainable=trainable) b2 = tf.get_variable('b2', [1, self.n_actions], initializer=b_initializer, collections=c_names, trainable=trainable) out = tf.matmul(l1, w2) + b2 return out # ------------------ build evaluate_net ------------------ self.s = tf.placeholder(tf.float32, [None, self.n_features], name='s') # input self.q_target = tf.placeholder(tf.float32, [None, self.n_actions], name='Q_target') # for calculating loss if self.prioritized: self.ISWeights = tf.placeholder(tf.float32, [None, 1], name='IS_weights') with tf.variable_scope('eval_net'): c_names, n_l1, w_initializer, b_initializer = \ ['eval_net_params', tf.GraphKeys.GLOBAL_VARIABLES], 20, \ tf.random_normal_initializer(0., 0.3), tf.constant_initializer(0.1) # config of layers self.q_eval = build_layers(self.s, c_names, n_l1, w_initializer, b_initializer, True) with tf.variable_scope('loss'): if self.prioritized: self.abs_errors = tf.reduce_sum(tf.abs(self.q_target - self.q_eval), axis=1) # for updating Sumtree self.loss = tf.reduce_mean(self.ISWeights * tf.squared_difference(self.q_target, self.q_eval)) else: self.loss = tf.reduce_mean(tf.squared_difference(self.q_target, self.q_eval)) with tf.variable_scope('train'): self._train_op = tf.train.RMSPropOptimizer(self.lr).minimize(self.loss) # ------------------ build target_net ------------------ self.s_ = tf.placeholder(tf.float32, [None, self.n_features], name='s_') # input with tf.variable_scope('target_net'): c_names = ['target_net_params', tf.GraphKeys.GLOBAL_VARIABLES] self.q_next = build_layers(self.s_, c_names, n_l1, w_initializer, b_initializer, False) def store_transition(self, s, a, r, s_): if self.prioritized: # prioritized replay transition = np.hstack((s, [a, r], s_)) self.memory.store(transition) # have high priority for newly arrived transition else: # random replay if not hasattr(self, 'memory_counter'): self.memory_counter = 0 transition = np.hstack((s, [a, r], s_)) index = self.memory_counter % self.memory_size self.memory[index, :] = transition self.memory_counter += 1 def choose_action(self, observation): observation = observation[np.newaxis, :] if np.random.uniform() < self.epsilon: actions_value = self.sess.run(self.q_eval, feed_dict={self.s: observation}) action = np.argmax(actions_value) else: action = np.random.randint(0, self.n_actions) return action def learn(self): if self.learn_step_counter % self.replace_target_iter == 0: self.sess.run(self.replace_target_op) print('\ntarget_params_replaced\n') if self.prioritized: tree_idx, batch_memory, ISWeights = self.memory.sample(self.batch_size) else: sample_index = np.random.choice(self.memory_size, size=self.batch_size) batch_memory = self.memory[sample_index, :] q_next, q_eval = self.sess.run( [self.q_next, self.q_eval], feed_dict={self.s_: batch_memory[:, -self.n_features:], self.s: batch_memory[:, :self.n_features]}) q_target = q_eval.copy() batch_index = np.arange(self.batch_size, dtype=np.int32) eval_act_index = batch_memory[:, self.n_features].astype(int) reward = batch_memory[:, self.n_features + 1] q_target[batch_index, eval_act_index] = reward + self.gamma * np.max(q_next, axis=1) if self.prioritized: _, abs_errors, self.cost = self.sess.run([self._train_op, self.abs_errors, self.loss], feed_dict={self.s: batch_memory[:, :self.n_features], self.q_target: q_target, self.ISWeights: ISWeights}) self.memory.batch_update(tree_idx, abs_errors) # update priority else: _, self.cost = self.sess.run([self._train_op, self.loss], feed_dict={self.s: batch_memory[:, :self.n_features], self.q_target: q_target}) self.cost_his.append(self.cost) self.epsilon = self.epsilon + self.epsilon_increment if self.epsilon < self.epsilon_max else self.epsilon_max self.learn_step_counter += 1
724d0bb4e0744c1d6d69e5e19135a4287262044b
eb0711915d6bba2f765f052736e33ac9a9a397a6
/HE0435/simulation/rebin/rebin_arc.py~
337e999780ea9cca34a003793c200dfa2a4d4cf5
[]
no_license
dartoon/GL_HostGalaxy
cd2166f273ae7e0397a7d2d39f760ab59e86f014
7469f1c1e640d176a75cc6e9497920e494ad656a
refs/heads/master
2016-08-11T13:27:17.545360
2016-04-07T19:04:57
2016-04-07T19:04:57
46,524,027
1
0
null
null
null
null
UTF-8
Python
false
false
856
#from high resoluted images (sub=6) to lower one (bin together) from numpy import * from block import * import pyfits file1 = open('../pylens/HE0435.txt','r') para = loadtxt(file1) file1.close() #print len(para) ln=len(para) for l in range(ln): filename='../fits/HE_arc-{0}.fits'.format(l+1) # take one image d = pyfits.open(filename)[0].data.copy() d = concatenate([d,zeros([10,len(d.T)])]) d = concatenate([d,zeros([len(d),10])],axis=1) #expand the array #print sum(d[65:69,67:71]) #[0:136] x ,y -> y, x #### y x #0:174 a=[0,3,0,3,2,5,2,5,4,1,4,1] b=[0,0,3,3,4,4,1,1,2,2,5,5] #from the info. given by kai for i in range(len(a)): dd=d[a[i]:360+a[i],b[i]:360+b[i]] #the size before bin aaa=block(dd,(60,60)) pyfits.PrimaryHDU(aaa).writeto('../fits/binall/arc-{0}-{1}.fits'.format(l+1,i+1),clobber=True)
173f98b7e79f6ff249b1f5a76b04e023a7ef9b8b
17acb8e20f9a24b16ce3651302fc2d7fc7b887a6
/src/utils/aiml_generator/generator.py
5f0982bb130883d7337ec983090811960cd1d022
[ "MIT" ]
permissive
cen-ai/program-y
91052fdc11aec0f60311e3429895fac489d8ce54
a753667638147544c54dbebd9f1c8f9ae7f2159e
refs/heads/master
2020-03-22T15:11:07.896885
2018-10-15T22:13:58
2018-10-15T22:13:58
140,234,173
5
5
NOASSERTION
2019-01-03T09:09:07
2018-07-09T05:11:08
Python
UTF-8
Python
false
false
8,195
py
import os import csv import os.path from optparse import OptionParser class CmdArgsHandler: def validate_args(self): parser = OptionParser() parser.add_option("-f", "--file", dest="csvloc", help="Specify the location and filename of the CSV file.") parser.add_option("-d", "--directory", dest="dir", help="Specify the directory which contains the CSV.") parser.add_option("-o", "--output", dest="aimlloc", help="Specify the location where the aiml file should go.") (opts, args) = parser.parse_args() mandatories = ['aimlloc'] self.check_manditory_opts_present(parser, opts, mandatories) return opts def check_manditory_opts_present(self, parser, opts, mandatories): for option in mandatories: if not opts.__dict__[option]: print("ERROR: Mandatory argument is missing.\n") parser.print_help() exit(-1) if not opts.csvloc and not opts.dir: print("ERROR: Enter the csv file location, or the directory.\n") parser.print_help() exit(-1) class CsvReader: def __init__(self, file_name): self.file_name = file_name self.csvlength = 0 def read_file(self): if not os.path.exists(self.file_name): print(self.file_name + " is not a valid file.") exit(-1) collection = [] with open(self.file_name, 'r') as f: reader = csv.reader(f) for linenum, row in enumerate(reader): if str(row).find('#') != -1: continue if str(row).find(',') != -1: collection.append([linenum + 1] + row) self.csvlength += 1 return collection class CollectionLoader: def __init__(self, csv_file_name): self.csv_file_name = csv_file_name def get_collection(self): read = CsvReader(self.csv_file_name) collection = read.read_file() self.csvlength = read.csvlength return collection def format_collection(self, collection): parser = SentenceParser(collection[1:]) # omit line number return parser.populate() class XmlWriter: def __init__(self, output_loc, file_name): self.aiml_file = open(output_loc + file_name + ".aiml", "w") def prepare_sentence(self, sentence): sentence.pop(0) value = ' '.join(sentence) return value def write_body(self, sentence): template_content = sentence[0] pattern_text = self.prepare_sentence(sentence) self.aiml_file.write("\n\t<category>") self.aiml_file.write("\n\t\t<pattern>") self.aiml_file.write(pattern_text.upper()) self.aiml_file.write("</pattern>") self.aiml_file.write("\n\t\t<template>") self.aiml_file.write(template_content) self.aiml_file.write("\n\t\t</template>") self.aiml_file.write("\n\t</category>") def open_file(self): self.aiml_file.write("<?xml version='1.0' encoding='ISO-8859-1'?>") self.aiml_file.write("\n<aiml version=\"1.0.1\">") self.aiml_file.write("""\n \t<!-- --> \t<!-- This AIML file has been auto generated by the Program-Y util aiml_generator. --> \t<!-- --> \t<!-- Y-Bot is Copyright &copy; 2017 by Keith Sterling. --> \t<!-- \tPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated \tdocumentation files (the "Software"), to deal in the Software without restriction, including without limitation \tthe rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, \tand to permit persons to whom the Software is furnished to do so, subject to the following conditions: \tThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. \tTHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO \tTHE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE \tAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, \tTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \t--> """) def close_file(self): self.aiml_file.write("\n</aiml>\n") self.aiml_file.close() class SentenceParser: def __init__(self, sentence): self.sentence = sentence def get_sentence_element(self, i): return self.sentence[i].strip() def get_sentence_length(self): return len(self.sentence) def parser(self, trees, branch, count): if self.get_sentence_length() <= count: return word = self.get_sentence_element(count) # Optional word if "(" in word and count > 0: alt = branch[:] alt.append(word.strip("()")) trees.append(alt) self.parser(trees, branch, count + 1) self.parser(trees, alt, count + 1) # One from the list elif word.count('|') > 0: split = word.split('|') options = len(split) for i in range(1, options): alt = branch[:] alt.append(split[i]) trees.append(alt) self.parser(trees, alt, count + 1) branch.append(split[0]) self.parser(trees, branch, count + 1) # Only one word allowed by csv column elif (count > 0) and (word.count(' ') > 0): print ("WARNING: Comma missing in file [%s]" % word) branch.append(word) self.parser(trees, branch, count + 1) # Single word else: branch.append(word) self.parser(trees, branch, count + 1) def populate(self): trees = [] branch = [] trees.append(branch) self.parser(trees, branch, 0) return trees class Generator(): def __init__(self): self.categories = {} def get_aiml_file_name(self, csv_file_name): initial_split = csv_file_name.split('/') i = len(initial_split) secondary_split = initial_split[i - 1].split('.') return secondary_split[0] def generate_one(self, opts, csv_loc): cl = CollectionLoader(csv_loc) collections = cl.get_collection() if not collections: print(csv_loc + " has no content.") return aiml_file_name = self.get_aiml_file_name(csv_loc) xml = XmlWriter(opts.aimlloc, aiml_file_name) xml.open_file() for collection in collections: parsed_collection = cl.format_collection(collection) for rule in parsed_collection: xml.write_body(rule) xml.close_file() def get_files_in_dir(self, opts): if not os.path.exists(opts.dir): print(opts.dir + " is not a valid directory.") exit(-1) file_names = [] for file in os.listdir(opts.dir): if file.endswith(".csv"): file_names.append(str(os.path.join(opts.dir, file))) if not file_names: print( "FATAL: There are no CSV files at this location.\n Are you in the right directory?") exit(-1) return file_names if __name__ == '__main__': cmd = CmdArgsHandler() opts = cmd.validate_args() g = Generator() if not opts.aimlloc.endswith("/"): opts.aimlloc = opts.aimlloc + "/" if not os.path.exists(opts.aimlloc): print(opts.aimlloc + " is not a valid output directory.") exit(-1) if opts.dir: if not opts.dir.endswith("/"): opts.dir = opts.dir + "/" file_names = g.get_files_in_dir(opts) for csv_file in file_names: g.generate_one(opts, csv_file) else: g.generate_one(opts, opts.csvloc)
8559ef8a1708043e87dc324c69fdc5c2fd4a9cc5
5306217707f99ff1d082bb974db0ccebf948763f
/ntds/management/commands/load_ntd_reporters.py
6a982e8e91b60fd7022f626c0220309161086c87
[]
no_license
sparkplug/rapidsms-ntds
05b64e67d5bcf751029653eb2f2a64e331e9a9c9
8b42c749db4c34e43eb39f3c52d540c84a8b810e
refs/heads/master
2021-01-16T17:45:09.821171
2016-02-15T20:50:26
2016-02-15T20:50:26
25,920,319
0
0
null
null
null
null
UTF-8
Python
false
false
3,712
py
#!/usr/bin/python # -*- coding: utf-8 -*- from django.core.management import BaseCommand from rapidsms_xforms.models import * from ntds.utils import validate_number from rapidsms.contrib.locations.models import Location from django.utils.safestring import mark_safe from ntds.models import Reporter import operator from django.db.models import Q import re from rapidsms_httprouter.models import Message, Connection from django.contrib.auth.models import User,Group from openpyxl.reader.excel import load_workbook from openpyxl.workbook import Workbook from uganda_common.utils import assign_backend from healthmodels.models.HealthProvider import HealthProvider from rapidsms.models import Connection, Contact,Backend from optparse import make_option import django class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option('-f', '--file', dest='file'),) def handle(self, **options): import pdb;pdb.set_trace() file = options['file'] wb = load_workbook(filename=file) ws=wb.get_sheet_by_name("Community and Schools") for row in ws.rows[1:]: try: role, _ = Group.objects.get_or_create(name='Ntds') mobile_is_valid,cleaned_mobile=validate_number("0"+str(row[10].value)) try: msisdn, backend = assign_backend(cleaned_mobile) except ValidationError: msisdn, backend = assign_backend(str(row[10].value).split("/")[0]) backend,_=Backend.objects.get_or_create(name="yo") connection, conn_created = Connection.objects.get_or_create(identity=cleaned_mobile, backend=backend) try: district=Location.objects.filter(type="district",name__icontains= row[2].value.strip())[0] except IndexError: district=None try: subcounty=Location.objects.filter(type="sub_county",name__icontains= row[5].value.strip())[0] except IndexError: subcounty=None try: pr=row[8].value.strip() if pr=="Aria": pr="Ariya" parish=district.get_descendants().filter(type="parish",name__icontains=row[8].value.strip())[0] except IndexError: parish=None print "index error %s"%row[8].value if conn_created: provider = HealthProvider.objects.create(name=row[9].value.strip(), location=parish) provider.groups.add(role) connection.contact = provider connection.save() rep = Reporter(healthprovider_ptr=provider) rep.__dict__.update(provider.__dict__) rep.district=district rep.subcounty=subcounty rep.parish=parish rep.community=row[11].value.strip() rep.id_number=str(row[0].value) rep.county=row[3].value.strip() rep.subcounty_supervisor=row[6].value.strip() _,s_mobile=validate_number(str(row[7].value)) rep.subcounty_supervisor_mobile=s_mobile rep.region=row[1].value.strip() rep.health_subcounty=row[4].value.strip() rep.subcounty_name = row[5].value.strip() rep.parish_name = row[8].value.strip() rep.save() except ValidationError: pass
f7ed33b9024bcb5172bdd606904702df2764077f
f043fee89c0e2030386adcebb74d08164b7b974f
/reagent/net_builder/continuous_actor/fully_connected.py
d4e4b0544a3a1aedfbc18dfbfa123066351d9bac
[ "BSD-3-Clause" ]
permissive
IronOnet/ReAgent
c2d22e7dc63eaf61e0a50e9343110c6df79a9b40
67434f458cde1f2c946237e866a73392279a7ede
refs/heads/master
2023-04-06T17:31:59.751700
2021-04-12T21:56:19
2021-04-12T21:57:05
357,700,053
2
0
BSD-3-Clause
2021-04-13T22:04:09
2021-04-13T22:04:09
null
UTF-8
Python
false
false
1,998
py
#!/usr/bin/env python3 from typing import List, Optional from reagent.core.dataclasses import dataclass, field from reagent.core.parameters import NormalizationData, param_hash from reagent.models.actor import FullyConnectedActor from reagent.models.base import ModelBase from reagent.net_builder.continuous_actor_net_builder import ContinuousActorNetBuilder from reagent.preprocessing.identify_types import CONTINUOUS_ACTION from reagent.preprocessing.normalization import get_num_output_features @dataclass class FullyConnected(ContinuousActorNetBuilder): __hash__ = param_hash sizes: List[int] = field(default_factory=lambda: [128, 64]) activations: List[str] = field(default_factory=lambda: ["relu", "relu"]) use_batch_norm: bool = False use_layer_norm: bool = False action_activation: str = "tanh" exploration_variance: Optional[float] = None def __post_init_post_parse__(self): super().__init__() assert len(self.sizes) == len(self.activations), ( f"Must have the same numbers of sizes and activations; got: " f"{self.sizes}, {self.activations}" ) @property def default_action_preprocessing(self) -> str: return CONTINUOUS_ACTION def build_actor( self, state_normalization_data: NormalizationData, action_normalization_data: NormalizationData, ) -> ModelBase: state_dim = get_num_output_features( state_normalization_data.dense_normalization_parameters ) action_dim = get_num_output_features( action_normalization_data.dense_normalization_parameters ) return FullyConnectedActor( state_dim=state_dim, action_dim=action_dim, sizes=self.sizes, activations=self.activations, use_batch_norm=self.use_batch_norm, action_activation=self.action_activation, exploration_variance=self.exploration_variance, )
4ca9ecc74b019cd4659ed7ea6d8725e8ecdc45b2
780fa3fed7e5890f26f9c952f10cefbacfa6e09a
/recursive_convolution.py
69f1fc591f8ffd4d30a7c182f4f9dc8aae9f26f8
[]
no_license
jsrimr/code_during_KTaiacadmey
d508303417fe0916f98f7cd65c6521adb0a933fa
ab98b1613d9cb8ca77cd2462e0a42664b71bd758
refs/heads/master
2021-12-03T08:09:01.199462
2018-06-25T05:49:07
2018-06-25T05:49:07
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,346
py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Thu Mar 22 17:18:19 2018 @author: ktai12 """ import numpy as np import tensorflow as tf import matplotlib.pyplot as plt sess=tf.InteractiveSession() from tensorflow.examples.tutorials.mnist import input_data mnist = input_data.read_data_sets("/tmp/data/", one_hot=True) #convolution 압축을 확인하자! #압축률이 다른 5장의 사진 img=mnist.train.images[0].reshape(28,28) #original-28*28 plt.imshow(img) plt.title('original') W=tf.Variable(tf.random_normal([3,3,1,1],stddev=0.01)) img=img.reshape(-1,28,28,1) conv2d=tf.nn.conv2d(img,W,strides=[1,1,1,1],padding="VALID") sess.run(tf.global_variables_initializer()) conv2d_img1=conv2d.eval() conv2d_img1.shape #convolution once - 26*26 img1=conv2d_img1.reshape(26,26) plt.imshow(img1) plt.title('once') #convolution twice conv2d=tf.nn.conv2d(conv2d_img1,W,strides=[1,1,1,1],padding="VALID") conv2d_img2=conv2d.eval() conv2d_img2.shape img2=conv2d_img2.reshape(24,24) plt.imshow(img2) plt.title('once') tmp=img for i in range(10): a=tmp.shape[0] tmp=tmp.reshape(-1,a,a,1) conv2d=tf.nn.conv2d(tmp,W,strides=[1,1,1,1],padding="VALID") conv2d_img=conv2d.eval() k=conv2d_img.shape[1] tmp=conv2d_img.reshape(k,k) plt.imshow(tmp) plt.title("{0}*{0} size".format(k)) plt.show()
aa6ea506e424f5b0a50f4537652395b31a901596
75e1d9446cb1fca5c6a79ad0ba7f38268df1161f
/Python Programs/rotate-matrix-pattern.py
baeb1e16ee8363e61f12b29a05e4442f2d438d48
[ "CC0-1.0" ]
permissive
muhammad-masood-ur-rehman/Skillrack
6e9b6d93680dfef6f40783f02ded8a0d4283c98a
71a25417c89d0efab40ee6229ccd758b26ae4312
refs/heads/main
2023-02-03T16:45:54.462561
2020-12-23T08:36:28
2020-12-23T08:36:28
324,221,340
4
1
CC0-1.0
2020-12-24T19:12:54
2020-12-24T19:12:54
null
UTF-8
Python
false
false
1,377
py
Rotate Matrix Pattern The program must accept an integer matrix of size N*N as the input. The program must rotate the matrix by 45 degrees in the clockwise direction. Then the program must print the rotated matrix and print asterisks instead of empty places as the output. Boundary Condition(s): 3 <= N <= 100 Input Format: The first line contains N. The next N lines, each contains N integers separated by a space. Output Format: The first (2*N)-1 lines containing the rotated matrix. Example Input/Output 1: Input: 3 1 2 3 4 5 6 7 8 9 Output: **1 *4 2 7 5 3 *8 6 **9  Explanation: After rotating the matrix by 45 degrees in the clockwise direction, the matrix becomes   1  4 2 7 5 3  8 6   9 So the rotated matrix is printed and the asterisks are printed instead of empty places. Example Input/Output 2: Input: 4 13 21 36 49 55 65 57 80 17 32 63 44 56 60 78 98 Output: ***13 **55 21 *17 65 36 56 32 57 49 *60 63 80 **78 44 ***98 n=int(input()) arr=[] for i in range(n): a=[] for j in range(n): a.append(int(input())) arr.append(a) s1,s2=0,0 stars=n-1 for i in range(1, (2*n)): i1=s1 i2=s2 for j in range(1,n+1): if(j<=stars): print("*",end=' ') else: print(arr[i1][i2],end=" ") i1-=1 i2+=1 if(i>n-1): s2+=1 stars+=1 else: stars-=1 s1+=1 print("")
079896499559440bc3938ad7b69fe1408bc3ac4c
7f167121b52312d65663d781819356eac65843ed
/lib/xss.py
d7d8def40702d5eb17aac4d709ddc99b82bb50ed
[]
no_license
mongoltolbo/mifan.tv
b3526aaeb5394b3ac1e7af85b8ea3a74e90ce73e
9ba59b049866dff7c4d9eceabed91d8a1878ef4b
refs/heads/master
2020-05-04T19:20:22.340674
2013-08-27T15:53:34
2013-08-27T15:53:34
null
0
0
null
null
null
null
UTF-8
Python
false
false
4,954
py
#!/usr/bin/env python # coding=utf-8 # # Copyright 2013 tuila.me # Source: http://code.activestate.com/recipes/496942/ (r1) import re from htmllib import HTMLParser from cgi import escape from urlparse import urlparse from formatter import AbstractFormatter from htmlentitydefs import entitydefs from xml.sax.saxutils import quoteattr def xssescape(text): """Gets rid of < and > and & and, for good measure, :""" # return escape(text, quote=True).replace(':','&#58;') # return re.sub(r'(?<!http)(?<!https):', '&#58;', escape(text, quote=True)) return escape(text, quote=True) class XssCleaner(HTMLParser): def __init__(self, fmt = AbstractFormatter): HTMLParser.__init__(self, fmt) self.result = "" self.open_tags = [] # A list of the only tags allowed. Be careful adding to this. Adding # "script," for example, would not be smart. 'img' is out by default # because of the danger of IMG embedded commands, and/or web bugs. self.permitted_tags = ['a', 'b', 'blockquote', 'br', 'i', 'li', 'ol', 'ul', 'p', 'cite'] # A list of tags that require no closing tag. self.requires_no_close = ['img', 'br'] # A dictionary showing the only attributes allowed for particular tags. # If a tag is not listed here, it is allowed no attributes. Adding # "on" tags, like "onhover," would not be smart. Also be very careful # of "background" and "style." self.allowed_attributes = \ {'a':['href','title'], 'img':['src','alt'], 'blockquote':['type']} # The only schemes allowed in URLs (for href and src attributes). # Adding "javascript" or "vbscript" to this list would not be smart. self.allowed_schemes = ['http','https','ftp'] def handle_data(self, data): if data: self.result += xssescape(data) def handle_charref(self, ref): if len(ref) < 7 and ref.isdigit(): self.result += '&#%s;' % ref else: self.result += xssescape('&#%s' % ref) def handle_entityref(self, ref): if ref in entitydefs: self.result += '&%s;' % ref else: self.result += xssescape('&%s' % ref) def handle_comment(self, comment): if comment: self.result += xssescape("<!--%s-->" % comment) def handle_starttag(self, tag, method, attrs): if tag not in self.permitted_tags: self.result += xssescape("<%s>" % tag) else: bt = "<" + tag if tag in self.allowed_attributes: attrs = dict(attrs) self.allowed_attributes_here = \ [x for x in self.allowed_attributes[tag] if x in attrs \ and len(attrs[x]) > 0] for attribute in self.allowed_attributes_here: if attribute in ['href', 'src', 'background']: if self.url_is_acceptable(attrs[attribute]): bt += ' %s="%s"' % (attribute, attrs[attribute]) else: bt += ' %s=%s' % \ (xssescape(attribute), quoteattr(attrs[attribute])) if bt == "<a" or bt == "<img": return if tag in self.requires_no_close: bt += "/" bt += ">" self.result += bt self.open_tags.insert(0, tag) def handle_endtag(self, tag, attrs): bracketed = "</%s>" % tag if tag not in self.permitted_tags: self.result += xssescape(bracketed) elif tag in self.open_tags: self.result += bracketed self.open_tags.remove(tag) def unknown_starttag(self, tag, attributes): self.handle_starttag(tag, None, attributes) def unknown_endtag(self, tag): self.handle_endtag(tag, None) def url_is_acceptable(self,url): ### Requires all URLs to be "absolute." parsed = urlparse(url) return parsed[0] in self.allowed_schemes and '.' in parsed[1] def strip(self, rawstring): """Returns the argument stripped of potentially harmful HTML or Javascript code""" self.result = "" self.feed(rawstring) for endtag in self.open_tags: if endtag not in self.requires_no_close: self.result += "</%s>" % endtag return self.result def xtags(self): """Returns a printable string informing the user which tags are allowed""" self.permitted_tags.sort() tg = "" for x in self.permitted_tags: tg += "<" + x if x in self.allowed_attributes: for y in self.allowed_attributes[x]: tg += ' %s=""' % y tg += "> " return xssescape(tg.strip())
deb0e9f6012ff44565f83f4240236d6e9dba8965
1ee3dc4fa096d12e409af3a298ba01f5558c62b5
/ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/nettopologytree.py
b6b429aac376022dac44a1359e79e6768a182f2c
[ "MIT" ]
permissive
parthpower/ixnetwork_restpy
321e64a87be0a4d990276d26f43aca9cf4d43cc9
73fa29796a5178c707ee4e21d90ff4dad31cc1ed
refs/heads/master
2020-07-04T13:34:42.162458
2019-08-13T20:33:17
2019-08-13T20:33:17
null
0
0
null
null
null
null
UTF-8
Python
false
false
6,828
py
# MIT LICENSE # # Copyright 1997 - 2019 by IXIA Keysight # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from ixnetwork_restpy.base import Base from ixnetwork_restpy.files import Files class NetTopologyTree(Base): """The NetTopologyTree class encapsulates a user managed netTopologyTree node in the ixnetwork hierarchy. An instance of the class can be obtained by accessing the NetTopologyTree property from a parent instance. The internal properties list will be empty when the property is accessed and is populated from the server using the find method. The internal properties list can be managed by the user by using the add and remove methods. """ _SDM_NAME = 'netTopologyTree' def __init__(self, parent): super(NetTopologyTree, self).__init__(parent) @property def IncludeEntryPoint(self): """if true, entry node belongs to ring topology, otherwise it is outside of ring Returns: bool """ return self._get_attribute('includeEntryPoint') @IncludeEntryPoint.setter def IncludeEntryPoint(self, value): self._set_attribute('includeEntryPoint', value) @property def LinkMultiplier(self): """number of links between two nodes Returns: number """ return self._get_attribute('linkMultiplier') @LinkMultiplier.setter def LinkMultiplier(self, value): self._set_attribute('linkMultiplier', value) @property def MaxChildPerNode(self): """Maximum children per node Returns: number """ return self._get_attribute('maxChildPerNode') @MaxChildPerNode.setter def MaxChildPerNode(self, value): self._set_attribute('maxChildPerNode', value) @property def Nodes(self): """number of nodes Returns: number """ return self._get_attribute('nodes') @Nodes.setter def Nodes(self, value): self._set_attribute('nodes', value) @property def TreeDepth(self): """Depth of the Tree, defined as length of path from root node to deepest node in the tree Returns: number """ return self._get_attribute('treeDepth') @TreeDepth.setter def TreeDepth(self, value): self._set_attribute('treeDepth', value) @property def UseTreeDepth(self): """Use Tree Depth Returns: bool """ return self._get_attribute('useTreeDepth') @UseTreeDepth.setter def UseTreeDepth(self, value): self._set_attribute('useTreeDepth', value) def update(self, IncludeEntryPoint=None, LinkMultiplier=None, MaxChildPerNode=None, Nodes=None, TreeDepth=None, UseTreeDepth=None): """Updates a child instance of netTopologyTree on the server. Args: IncludeEntryPoint (bool): if true, entry node belongs to ring topology, otherwise it is outside of ring LinkMultiplier (number): number of links between two nodes MaxChildPerNode (number): Maximum children per node Nodes (number): number of nodes TreeDepth (number): Depth of the Tree, defined as length of path from root node to deepest node in the tree UseTreeDepth (bool): Use Tree Depth Raises: ServerError: The server has encountered an uncategorized error condition """ self._update(locals()) def add(self, IncludeEntryPoint=None, LinkMultiplier=None, MaxChildPerNode=None, Nodes=None, TreeDepth=None, UseTreeDepth=None): """Adds a new netTopologyTree node on the server and retrieves it in this instance. Args: IncludeEntryPoint (bool): if true, entry node belongs to ring topology, otherwise it is outside of ring LinkMultiplier (number): number of links between two nodes MaxChildPerNode (number): Maximum children per node Nodes (number): number of nodes TreeDepth (number): Depth of the Tree, defined as length of path from root node to deepest node in the tree UseTreeDepth (bool): Use Tree Depth Returns: self: This instance with all currently retrieved netTopologyTree data using find and the newly added netTopologyTree data available through an iterator or index Raises: ServerError: The server has encountered an uncategorized error condition """ return self._create(locals()) def remove(self): """Deletes all the netTopologyTree data in this instance from server. Raises: NotFoundError: The requested resource does not exist on the server ServerError: The server has encountered an uncategorized error condition """ self._delete() def find(self, IncludeEntryPoint=None, LinkMultiplier=None, MaxChildPerNode=None, Nodes=None, TreeDepth=None, UseTreeDepth=None): """Finds and retrieves netTopologyTree data from the server. All named parameters support regex and can be used to selectively retrieve netTopologyTree data from the server. By default the find method takes no parameters and will retrieve all netTopologyTree data from the server. Args: IncludeEntryPoint (bool): if true, entry node belongs to ring topology, otherwise it is outside of ring LinkMultiplier (number): number of links between two nodes MaxChildPerNode (number): Maximum children per node Nodes (number): number of nodes TreeDepth (number): Depth of the Tree, defined as length of path from root node to deepest node in the tree UseTreeDepth (bool): Use Tree Depth Returns: self: This instance with matching netTopologyTree data retrieved from the server available through an iterator or index Raises: ServerError: The server has encountered an uncategorized error condition """ return self._select(locals()) def read(self, href): """Retrieves a single instance of netTopologyTree data from the server. Args: href (str): An href to the instance to be retrieved Returns: self: This instance with the netTopologyTree data from the server available through an iterator or index Raises: NotFoundError: The requested resource does not exist on the server ServerError: The server has encountered an uncategorized error condition """ return self._read(href)
f9e738f5b5b8110966032a68c9aeae66c200a6bf
7c9919126b96122c1a8c6353769e209d850e4564
/bnk_hr_leave/models/hr_leave_allocation.py
bee706f6393dea1a328430f51d060ead0e48e84c
[]
no_license
Duongnv-dev/hr
8ee34c904d481a4d0f4182c3c6bfd6c28ef25ffe
962e0edab5b824304f4a2b2dff23458135f94c3c
refs/heads/master
2023-06-19T06:54:00.337453
2021-07-13T01:53:34
2021-07-13T01:53:34
385,439,085
0
0
null
null
null
null
UTF-8
Python
false
false
210
py
from odoo import fields, models, api class HrLeaveAllocation(models.Model): _inherit = 'hr.leave.allocation' _description = 'Inherit leave allocation' contract_id = fields.Many2one('hr.contract')
9184d820d21d39a76067d3a1353b4cc581849604
c52e7808ab764d822267b36a185223a172a56b5a
/tasks/1_area_of_triangle.py
815e4c3722c0eea544ca8f78c924b6611d678e2e
[]
no_license
lohitbadiger/Python-teaching-all
c41bfa2c98bab1493aba5269ab81efa6be02c73f
b7ed285b6b2df9c23fa5bf0c91381729b9ac0c6f
refs/heads/master
2020-06-02T21:13:05.276475
2019-06-17T06:55:06
2019-06-17T06:55:06
191,311,539
0
0
null
null
null
null
UTF-8
Python
false
false
314
py
# base=int(input("Enter the value for base: ")) # hight=int(input('Enter the Height :')) # area=0.5*base*hight # print('area of the triangle is ', area) # one more way x,y=input('Enter the value for x, y').split() print('value of x is ', x) print('value of y is ', y) reslt= 0.5*int(x)*int(y) print(reslt)
3e243c5c508242ec4a50d667569a9822d6418118
e5dc27e634aba70bcd1b3acea74fed84ddccf837
/plugins/modules/network_device_update_role.py
be3fbcb35812197122cd5cd3a645eff6418c11d8
[]
no_license
jejrichardson/dnacenter-ansible
264d1b52227d4bf78ad175494763cff9e7881f34
f10078ef8323bda4b542e71bcecf4f80a7fe0609
refs/heads/master
2023-01-28T09:54:57.449459
2020-12-09T23:15:49
2020-12-09T23:15:49
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,543
py
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2020, Rafael Campos <[email protected]> # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) ANSIBLE_METADATA = { "metadata_version": "0.0.1", "status": ["preview"], "supported_by": "community", } DOCUMENTATION = r""" --- module: network_device_update_role short_description: Manage NetworkDeviceUpdateRole objects of Devices description: - Updates the role of the device as access, core, distribution, border router. version_added: '1.0' author: Rafael Campos (@racampos) options: id: description: - NetworkDeviceBriefNIO's id. type: str required: True role: description: - NetworkDeviceBriefNIO's role. type: str required: True roleSource: description: - NetworkDeviceBriefNIO's roleSource. type: str required: True summary: description: - If true gets the summary. type: bool required: True requirements: - dnacentersdk seealso: # Reference by module name - module: cisco.dnac.plugins.module_utils.definitions.network_device_update_role # Reference by Internet resource - name: NetworkDeviceUpdateRole reference description: Complete reference of the NetworkDeviceUpdateRole object model. link: https://developer.cisco.com/docs/dna-center/api/1-3-3-x # Reference by Internet resource - name: NetworkDeviceUpdateRole reference description: SDK reference. link: https://dnacentersdk.readthedocs.io/en/latest/api/api.html#v2-1-1-summary """ EXAMPLES = r""" - name: update_device_role cisco.dnac.network_device_update_role: state: update # required id: SomeValue # string, required role: SomeValue # string, required roleSource: SomeValue # string, required summary: True # boolean, required """ RETURN = """ update_device_role: description: Updates the role of the device as access, core, distribution, border router. returned: changed type: dict contains: response: description: NetworkDeviceBriefNIO's response. returned: changed type: dict contains: taskId: description: It is the network device update role's taskId. returned: changed type: dict url: description: It is the network device update role's url. returned: changed type: str sample: '<url>' version: description: NetworkDeviceBriefNIO's version. returned: changed type: str sample: '1.0' """
bbaf9c29390c28dc9d8519047288393f4d9b4247
455a91b28590d0b7ee1519f6d1ee2d554db4298b
/exps/exp_22102015/positioning_covar_meta.py
2719fdaf70712a3ec6f905739e3e081ec62ac8f3
[]
no_license
yairbeer/my_repository
a038201fb12b19cb249eb98c17478b0c086a9b04
a07660b9db412c11ae3fb6835e15481e60a687ff
refs/heads/master
2021-01-10T01:20:27.010430
2015-11-19T08:47:55
2015-11-19T08:47:55
46,478,844
0
0
null
null
null
null
UTF-8
Python
false
false
4,758
py
import numpy as np import exps.exp_22102015.config_exp as cfg_exp import exps.fn_exp as fn_exp import functions as fn import matplotlib.pyplot as plt import exps.exp_22102015.track as track import exps.exp_22102015.doa as doa import itertools import matplotlib.pyplot as plt __author__ = 'YBeer' """ calculating probability density for each meta position. Then finding the density max. """ estimate_pos = [] for i in range(len(track.track_list)): # single repeat ap_direction = np.repeat(track.aps[:, 2].reshape((1, track.aps.shape[0])), doa.ap_timed_kaplan[i].shape[0], axis=0) # Converting to predicted global angle global_angle = ap_direction + doa.ap_timed_kaplan[i] # Converting predicted angles into slopes slopes = 1 / np.tan(np.radians(global_angle)) # Finding y intercept y_intercept = track.aps[:, 1] * np.ones(slopes.shape) - slopes * track.aps[:, 0] pos = np.ones((global_angle.shape[0], 2)) * np.nan covars = [] for j in range(slopes.shape[0]): valid_aps = fn_exp.find_crossing(global_angle[j, :]) if len(valid_aps) > 1: couples = list(itertools.combinations(valid_aps, 2)) prelim_pos = [] weights = [] ellipse = [] for crossing in couples: # Calculating cross-points prelim_pos.append(fn_exp.crossings(slopes[j, :], y_intercept[j, :], crossing)) # Calculate distance between exp.aps and cross point dist0, dist1 = fn_exp.crossings_dist(track.aps, crossing, prelim_pos[-1]) # Find angles from both exp.aps angle0 = global_angle[j, crossing[0]] angle1 = global_angle[j, crossing[1]] # Calculate SD covariance cur_eigen_val, cur_eigen_angles = fn_exp.sd_eigen(angle0, angle1, dist0, dist1) cur_covars = fn_exp.sd_covar(cur_eigen_val, cur_eigen_angles) covars.append(cur_covars) ellipse.append(fn_exp.create_ellipse(prelim_pos[-1], cur_eigen_val, cur_eigen_angles)) pos[j] = fn_exp.estimate_xy_covar(prelim_pos, covars) # print prelim_pos, covars # if len(valid_aps) == 3: # plt.plot(prelim_pos[0][0], prelim_pos[0][1], 'ro', ellipse[0][:, 0], ellipse[0][:, 1], 'r--', # prelim_pos[1][0], prelim_pos[1][1], 'go', ellipse[1][:, 0], ellipse[1][:, 1], 'g--', # prelim_pos[2][0], prelim_pos[2][1], 'bo', ellipse[2][:, 0], ellipse[2][:, 1], 'b--', # pos[j][0], pos[j][1], 'ko', # track.track[i][j, 0], track.track[i][j, 1], 'k^',) # plt.title(str([cur_eigen_val, cur_eigen_angles[0], cur_covars])) # plt.show() # Change NaN to last known position pos = fn.remove_nan(pos) # Remove points from outside # pos = fn.remove_outside(pos) estimate_pos.append(pos) # Holt's filtering algorithm holt = np.zeros(pos.shape) holt[0, :] = pos[0, :] holt_trend = np.zeros(pos.shape) for j in range(1, pos.shape[0]): holt[j, :] = (1 - cfg_exp.alpha) * (holt[j-1, :] + holt_trend[j-1, :]) + cfg_exp.alpha * pos[j, :] holt_trend[j, :] = cfg_exp.trend * (holt[j, :] - holt[j-1, :]) + (1 - cfg_exp.trend) * holt_trend[j-1, :] # RSME over 1st track RSME = np.sqrt(np.sum((track.track[0][:, 0] - estimate_pos[0][:, 0]) ** 2 + (track.track[0][:, 1] - estimate_pos[0][:, 1]) ** 2) / estimate_pos[0].shape[0]) print RSME # 1D plot plt.figure(1) plt.subplot(221) plt.plot(track.track_time_int[0], track.track[0][:, 0], 'r', track.track_time_int[0], estimate_pos[0][:, 0], 'b') plt.title('track 0 x(t) axis tracking') plt.ylim((-5, 130)) plt.subplot(222) plt.plot(track.track_time_int[0], track.track[0][:, 1], 'r', track.track_time_int[0], estimate_pos[0][:, 1], 'b') plt.title('track 0 y(t) axis tracking') plt.ylim((-5, 100)) plt.subplot(223) plt.plot(track.track_time_int[1], track.track[1][:, 0], 'r', track.track_time_int[1], estimate_pos[1][:, 0], 'b') plt.title('track 1 x(t) axis tracking') plt.ylim((-5, 130)) plt.subplot(224) plt.plot(track.track_time_int[1], track.track[1][:, 1], 'r', track.track_time_int[1], estimate_pos[1][:, 1], 'b') plt.title('track 1 y(t) axis tracking') plt.ylim((-5, 100)) plt.show() # 2D plot plt.figure(1) plt.subplot(211) plt.plot(track.track[0][:, 0], track.track[0][:, 1], 'r', estimate_pos[0][:, 0], estimate_pos[0][:, 1], 'b') plt.title('track 0 pos tracking') plt.subplot(212) plt.plot(track.track[1][:, 0], track.track[1][:, 1], 'r', estimate_pos[1][:, 0], estimate_pos[1][:, 1], 'b') plt.title('track 1 pos tracking') plt.show()
49aa9867f2de64f94bc8bce9aee367e02d4c0ece
bbab25f702c7bb7ce6cd894d98a121e61967d48a
/controllers/controllers.py
11968a608c0f21d15bc486b283939dc4dc43cadd
[]
no_license
butirpadi/bp_po_carton_box_report
9b83310ea010dbe848857cae74642e6993431d58
2f9bad119b1fe371bf4bc2d8ba09917c2134f86b
refs/heads/master
2023-08-30T03:49:11.564193
2021-11-01T09:23:11
2021-11-01T09:23:11
309,541,390
0
0
null
null
null
null
UTF-8
Python
false
false
935
py
# -*- coding: utf-8 -*- from odoo import http # class BpPoCartonBoxReport(http.Controller): # @http.route('/bp_po_carton_box_report/bp_po_carton_box_report/', auth='public') # def index(self, **kw): # return "Hello, world" # @http.route('/bp_po_carton_box_report/bp_po_carton_box_report/objects/', auth='public') # def list(self, **kw): # return http.request.render('bp_po_carton_box_report.listing', { # 'root': '/bp_po_carton_box_report/bp_po_carton_box_report', # 'objects': http.request.env['bp_po_carton_box_report.bp_po_carton_box_report'].search([]), # }) # @http.route('/bp_po_carton_box_report/bp_po_carton_box_report/objects/<model("bp_po_carton_box_report.bp_po_carton_box_report"):obj>/', auth='public') # def object(self, obj, **kw): # return http.request.render('bp_po_carton_box_report.object', { # 'object': obj # })
fa3baa33bdde98c67faa2adaf19e614694f489e8
00a9128553902cb398dc99865d36e09159285b86
/python/p3.py
1f8b36777f8dc1b69701b73902672fc201d33c6f
[]
no_license
horacepan/aoc2020
f498faa8c8bba4cabcfba6508a73074adb51d84c
f6d38f2b37245e89fb6f8eb4c55c74423626ca04
refs/heads/main
2023-02-06T03:16:21.530346
2020-12-20T21:33:53
2020-12-20T21:33:53
317,640,340
0
0
null
null
null
null
UTF-8
Python
false
false
775
py
import pdb def solve(mat, dx, dy): rows = len(mat) cols = len(mat[0]) loc_x = 0 loc_y = 0 ntrees = 0 while 1: if loc_x < rows: ntrees += int(mat[loc_x][loc_y] == '#') else: break loc_x += dx loc_y = (loc_y + dy) % cols return ntrees def main(): fname = '../data/p3.txt' with open(fname, 'r') as f: mat = [] for line in f.readlines(): mat.append(line.strip()) a1 = solve(mat, 1, 1) a2 = solve(mat, 1, 3) a3 = solve(mat, 1, 5) a4 = solve(mat, 1, 7) a5 = solve(mat, 2, 1) prod = a1 * a2 * a3 * a4 * a5 print("Part one:", a2) print("Part two:", prod) if __name__ == '__main__': main()
f0c5883130fd3be48d07d17a2b6ee9e5550cafc8
7914768f745808e372617ccf39aeebe44790a842
/apps/arena/views.py
6267d4a70f7116881c4e24988903d8dea0b939c0
[]
no_license
POISON-B/gz_v1.0.0
edee99b9a25594ea4dff8b04d8779a21fa154cc6
6e7c23f6c097efd2923562ab9c64843cd91e02c2
refs/heads/master
2020-03-27T01:31:38.676597
2018-09-23T08:47:05
2018-09-23T08:47:05
145,719,283
0
0
null
2018-09-23T05:56:32
2018-08-22T14:18:20
JavaScript
UTF-8
Python
false
false
9,276
py
from django.shortcuts import render # Create your views here. from rest_framework.response import Response from rest_framework import mixins from rest_framework.pagination import PageNumberPagination from rest_framework import viewsets from rest_framework import filters from django_filters.rest_framework import DjangoFilterBackend from apps.utils.tools import format_time from rest_framework.permissions import IsAuthenticated from rest_framework_jwt.authentication import JSONWebTokenAuthentication from rest_framework.authentication import SessionAuthentication from apps.users.models import UserProfile from apps.user_relationship.models import UserAchievement from .serializers import * from apps.utils.permissions import * from .models import * from users.serializers import UserProfileSerializers #排行榜页面 class TotalRankViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): """ 获取总排行信息 """ queryset = UserAchievement.objects.order_by('total_ranking') serializer_class = TotalRankSerializers class WeekRankViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): """ 获取周排行信息 首先查询用户信息 获取用户所在城市 查询该城市下用户排行信息 """ permission_classes = (IsAuthenticated, IsOwnerOrReadOnly) authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication) serializer_class = WeekRankSerializers def get_queryset(self): return UserAchievement.objects.filter(user__city=self.request.user.city).order_by('monthly_rankings') class DayRankViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): """ 获取日排行信息 首先查询用户信息 获取用户班级 查询该班级下用户排行信息 """ permission_classes = (IsAuthenticated, IsOwnerOrReadOnly) authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication) serializer_class = ClassRankSerializers def get_queryset(self): return UserAchievement.objects.filter(user__in_class=self.request.user.in_class).order_by('class_rankings') #闯关模式 class PassPagination(PageNumberPagination): page_size = 10 page_size_query_param = 'page_size' page_query_param = "page" max_page_size = 100 class PassListViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): """ 获取关卡信息 http://127.0.0.1:8000/pass/访问所有关卡 内含分页信息 http://127.0.0.1:8000/pass/2/ 按关卡id访问某一个关卡 """ # throttle_classes = (UserRateThrottle, ) queryset = Pass.objects.all().order_by('pass_no') serializer_class = PassSerializer pagination_class = PassPagination class UserPassListViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.CreateModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): """ 获取用户关卡信息 http://127.0.0.1:8000/pass/访问用户所有关卡 内含分页信息 http://127.0.0.1:8000/pass/2/ 按关卡id访问某一个关卡 """ # throttle_classes = (UserRateThrottle, ) queryset = Pass.objects.all().order_by('pass_no') serializer_class = UserPassSerializer pagination_class = PassPagination permission_classes = (IsAuthenticated, IsOwnerOrReadOnly) authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication) def get_queryset(self): return UserPass.objects.filter(user_id=self.request.user) def update(self, request, *args, **kwargs): import os from django.utils import timezone #获取用户的代码 #保存代码为xxx.java文件 #编译代码 os.popen("javac test.java") #执行代码 os.popen("java mypack.test").read() #将执行结果和答案比较 cur_user_pass = UserPass.objects.get(id=kwargs['pk']) cur_user_pass.submit_num += 1 #提交次数加1 request.data['submit_num'] = cur_user_pass.submit_num request.data['submit_time'] = timezone.now() code = request.data['user_submit'] print(code) with open('test.java', 'w+') as f: f.write(code) ret = os.popen("javac test.java") print(ret) result = os.popen("java test").read() print(result) if result and result[0:5] in cur_user_pass.user_pass.pass_answer: request.data['pass_score'] = 100 #cur_user_pass.pass_score = 100 else: request.data['pass_score'] = 0 return mixins.UpdateModelMixin.update(self, request, args, kwargs) """ pk页面 前端查询逻辑如下: pk首页 首先按照各种挑战模式 查询挑战者表 相关信息显示在首页上部 如果挑战者表中的挑战状态为 发起挑战 按钮显示迎战 否则显示继续挑战 再查询被挑战者信息 显示可以接受挑战的人员 点击迎战按钮后 向后端发起请求 生成pk信息 生成后 利用返回的pk信息构建pk详情页面 点击继续挑战按钮后 直接获取当前进行的pk信息 并显示pk详情页 """ class ChallengerTimeModList(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): """ 获取挑战者列表信息 竞技场对战模式上方发起挑战者人员数据 时间赛 """ # throttle_classes = (UserRateThrottle, ) permission_classes = (IsAuthenticated, IsOwnerOrReadOnly) authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication) serializer_class = ChallengerSerializer def get_queryset(self): return Challenger.objects.filter(be_challenged=self.request.user).filter(pk_mode=1) class ChallengerSpeedModList(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): """ 获取挑战者列表信息 竞技场对战模式上方发起挑战者人员数据 速度赛 """ # throttle_classes = (UserRateThrottle, ) permission_classes = (IsAuthenticated, IsOwnerOrReadOnly) authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication) serializer_class = ChallengerSerializer def get_queryset(self): return Challenger.objects.filter(be_challenged=self.request.user).filter(pk_mode=2) class ChallengerProgramModList(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): """ 获取挑战者列表信息 竞技场对战模式上方发起挑战者人员数据 编程赛 """ # throttle_classes = (UserRateThrottle, ) permission_classes = (IsAuthenticated, IsOwnerOrReadOnly) authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication) serializer_class = ChallengerSerializer def get_queryset(self): return Challenger.objects.filter(be_challenged=self.request.user).filter(pk_mode=3) class WantChallengeredList(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): """ 获取希望被挑战的人员列表 竞技场对战模式下方候选挑战者显示数据 """ # throttle_classes = (UserRateThrottle, ) permission_classes = (IsAuthenticated, IsOwnerOrReadOnly) authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication) serializer_class = UserProfileSerializers def get_queryset(self): return UserProfile.objects.filter(want_be_challenged=True)[0:5] class LaunchChallenge(mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.CreateModelMixin, viewsets.GenericViewSet): """ 获取用户发起的挑战列表 详情 创建新的挑战 """ permission_classes = (IsAuthenticated, IsOwnerOrReadOnly) authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication) serializer_class = ChallengerSerializer def get_queryset(self): return Challenger.objects.filter(challenger=self.request.user) class PkDetail(mixins.RetrieveModelMixin, mixins.CreateModelMixin, viewsets.GenericViewSet): """ pk详情信息 bb 实现创建pk详情页的接口 """ # throttle_classes = (UserRateThrottle, ) permission_classes = (IsAuthenticated, IsOwnerOrReadOnly) authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication) serializer_class = PkDetailSerializers def get_queryset(self): return UserPkDetail.objects.get(user=self.request.user) def create(self, request, *args, **kwargs): return mixins.CreateModelMixin.create(self, request, args, kwargs) class TeamCompPagination(PageNumberPagination): """ 团赛信息分页器 """ page_size = 5 page_size_query_param = 'page_size' page_query_param = "page" max_page_size = 30 class TeamCompList(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): """ 团赛列表页 """ queryset = TeamComp.objects.all() serializer_class = TeamCompSerializers pagination_class = TeamCompPagination class JoinTeamComp(mixins.CreateModelMixin, viewsets.GenericViewSet): """ 加入团赛 """ serializer_class = UserTeamCompSerializers
1ff5e6418d6c7022185c32908f90cd6a14694df6
396787df1b472ddfab7d934c149b150352342f03
/python_fundemental/128_linked_list_sorting.py
712365f8902750b846816a2c36bcdd9923bc47b4
[]
no_license
Deanwinger/python_project
a47b50a9dfc88853a5557da090b0a2ac3f3ce191
8c0c2a8bcd51825e6902e4d03dabbaf6f303ba83
refs/heads/master
2022-07-10T16:41:56.853165
2019-07-21T13:08:48
2019-07-21T13:08:48
107,653,001
0
0
null
null
null
null
UTF-8
Python
false
false
29
py
# leetcode 148. 排序链表
c40632c9ff9da232e070e4cdd547dfd5cc32f0d4
31e1261588e953d4e702a76e1c5306a8a97cff04
/monk/gluon/datasets/paths.py
c4656a51daa42e9c5cca5e8dd8767763c42c03c2
[ "Apache-2.0" ]
permissive
Varun0801/monk_v1
cff6e8390a9248208ba825eb0046119f4f284ab1
559ff37669d88fd2cfaaf9d22ad84cd6cef9d176
refs/heads/master
2022-04-17T05:19:53.372524
2020-04-11T13:11:35
2020-04-11T13:11:35
255,113,878
1
0
null
2020-04-12T15:35:25
2020-04-12T15:35:24
null
UTF-8
Python
false
false
4,546
py
from gluon.datasets.imports import * from system.imports import * @accepts(dict, [str, list, bool], [float, int, bool], [str, list, bool], str, post_trace=True) @TraceFunction(trace_args=False, trace_rv=False) def set_dataset_train_path(system_dict, path, split, path_to_csv, delimiter): ''' Set dataset train path Args: system_dict (dict): System dictionary containing all the variables path (str, list): Dataset folder path 1) String : For dataset with no validation set 2) List: For dataset with validation set in order [train_set, val_set] split (float): Indicating train validation split Division happens as follows: train - total dataset * split * 100 val - total dataset * (1-split) * 100 path_to_csv (str, list): Path to csv pointing to images delimiter (str): Delimiter for the csv path provided Returns: dict: Updated System dictionary ''' dataset_type = None; dataset_train_path = None; dataset_val_path = None; csv_train = None; csv_val = None; train_val_split = None; if(path_to_csv): if(type(path) == str): dataset_type = "csv_train"; csv_train = path_to_csv; dataset_train_path = path; train_val_split = split; label_type = find_label_type(path_to_csv) elif(type(path) == list): dataset_type = "csv_train-val"; csv_train = path_to_csv[0]; csv_val = path_to_csv[1]; dataset_train_path = path[0]; dataset_val_path = path[1]; train_val_split = None; label_type = find_label_type(path_to_csv[0]) else: if(type(path) == str): dataset_type = "train"; dataset_train_path = path; train_val_split = split; label_type = "single"; elif(type(path) == list): dataset_type = "train-val"; dataset_train_path = path[0]; dataset_val_path = path[1]; train_val_split = None; label_type = "single"; system_dict["dataset"]["dataset_type"] = dataset_type; system_dict["dataset"]["train_path"] = dataset_train_path; system_dict["dataset"]["val_path"] = dataset_val_path; system_dict["dataset"]["csv_train"] = csv_train; system_dict["dataset"]["csv_val"] = csv_val; system_dict["dataset"]["params"]["train_val_split"] = train_val_split; system_dict["dataset"]["params"]["delimiter"] = delimiter; system_dict["dataset"]["label_type"] = label_type; return system_dict; @accepts(str, post_trace=True) @TraceFunction(trace_args=True, trace_rv=True) def find_label_type(csv_file): ''' Find label type - single or multiple Args: csv_file (str): Path to training csv file Returns: str: Label Type ''' label_type = "single"; df = pd.read_csv(csv_file); columns = df.columns; for i in range(len(df)): label = str(df[columns[1]][i]); if(len(label.split(" ")) > 1): label_type = "multiple"; break; return label_type; @accepts(dict, [str, bool], [str, bool], str, post_trace=True) @TraceFunction(trace_args=False, trace_rv=False) def set_dataset_test_path(system_dict, path, path_to_csv, delimiter): ''' Set dataset train path Args: system_dict (dict): System dictionary containing all the variables path (str, list): Dataset folder path 1) String : For dataset with no validation set 2) List: For dataset with validation set in order [train_set, val_set] path_to_csv (str, list): Path to csv pointing to images delimiter (str): Delimiter for the csv path provided Returns: dict: Updated System dictionary ''' dataset_test_type = None; dataset_test_path = None; csv_test = None; if(path_to_csv): csv_test = path_to_csv; dataset_test_path = path; dataset_test_type = "csv"; else: dataset_test_path = path; dataset_test_type = "foldered"; system_dict["dataset"]["test_path"] = dataset_test_path; system_dict["dataset"]["csv_test"] = csv_test; system_dict["dataset"]["params"]["test_delimiter"] = delimiter; system_dict["dataset"]["params"]["dataset_test_type"] = dataset_test_type; return system_dict;
4e23c014e2a3e9ed57bddae2ec3e70d526a28c65
4fc9c61450de38ce003e20e0452af3e636f28be3
/language_model/layer/attention.py
e64cc42d1eaf11d71b2cc146934438e793eb8709
[ "Apache-2.0" ]
permissive
SunYanCN/language_model_tf
a6c453c3c3aa1b34ac240cff94674e9eaa679ec9
d39f335e5410d2bd7a23760dedbfcca36338d591
refs/heads/master
2020-05-02T03:58:59.634898
2019-05-24T20:31:52
2019-05-24T20:31:52
177,740,244
0
0
Apache-2.0
2019-12-30T06:17:43
2019-03-26T07:56:25
Python
UTF-8
Python
false
false
62,450
py
import numpy as np import tensorflow as tf from util.default_util import * from util.language_model_util import * from layer.basic import * __all__ = ["Attention", "MaxAttention", "CoAttention", "GatedAttention", "MultiHeadAttention"] def _create_attention_matrix(src_unit_dim, trg_unit_dim, attention_unit_dim, attention_score_type, regularizer, random_seed, trainable, scope="att_matrix"): """create attetnion matrix""" scope = "{0}/{1}".format(scope, attention_score_type) if attention_score_type == "dot": attention_matrix = [] elif attention_score_type == "scaled_dot": attention_matrix = [] elif attention_score_type == "linear": attention_matrix = _create_linear_attention_matrix(src_unit_dim, trg_unit_dim, regularizer, random_seed, trainable, scope) elif attention_score_type == "bilinear": attention_matrix = _create_bilinear_attention_matrix(src_unit_dim, trg_unit_dim, regularizer, random_seed, trainable, scope) elif attention_score_type == "nonlinear": attention_matrix = _create_nonlinear_attention_matrix(src_unit_dim, trg_unit_dim, attention_unit_dim, regularizer, random_seed, trainable, scope) elif attention_score_type == "linear_plus": attention_matrix = _create_linear_plus_attention_matrix(src_unit_dim, trg_unit_dim, regularizer, random_seed, trainable, scope) elif attention_score_type == "nonlinear_plus": attention_matrix = _create_nonlinear_plus_attention_matrix(src_unit_dim, trg_unit_dim, attention_unit_dim, regularizer, random_seed, trainable, scope) elif attention_score_type == "trilinear": attention_matrix = _create_trilinear_attention_matrix(src_unit_dim, trg_unit_dim, regularizer, random_seed, trainable, scope) else: raise ValueError("unsupported attention score type {0}".format(attention_score_type)) return attention_matrix def _create_linear_attention_matrix(src_unit_dim, trg_unit_dim, regularizer, random_seed, trainable, scope="linear"): """create linear attetnion matrix""" weight_initializer = create_variable_initializer("glorot_uniform", random_seed) linear_src_weight = tf.get_variable("{0}/src_weight".format(scope), shape=[1, src_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) linear_trg_weight = tf.get_variable("{0}/trg_weight".format(scope), shape=[1, trg_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) attention_matrix = [linear_src_weight, linear_trg_weight] return attention_matrix def _create_bilinear_attention_matrix(src_unit_dim, trg_unit_dim, regularizer, random_seed, trainable, scope="bilinear"): """create bilinear attetnion matrix""" weight_initializer = create_variable_initializer("glorot_uniform", random_seed) bilinear_weight = tf.get_variable("{0}/weight".format(scope), shape=[src_unit_dim, trg_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) attention_matrix = [bilinear_weight] return attention_matrix def _create_nonlinear_attention_matrix(src_unit_dim, trg_unit_dim, attention_unit_dim, regularizer, random_seed, trainable, scope="nonlinear"): """create nonlinear attetnion matrix""" weight_initializer = create_variable_initializer("glorot_uniform", random_seed) bias_initializer = create_variable_initializer("zero") pre_nonlinear_src_weight = tf.get_variable("{0}/pre/src_weight".format(scope), shape=[attention_unit_dim, src_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) pre_nonlinear_trg_weight = tf.get_variable("{0}/pre/trg_weight".format(scope), shape=[attention_unit_dim, trg_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) pre_nonlinear_bias = tf.get_variable("{0}/pre/bias".format(scope), shape=[attention_unit_dim], initializer=bias_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) post_nonlinear_weight = tf.get_variable("{0}/post/weight".format(scope), shape=[1, attention_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) attention_matrix = [pre_nonlinear_src_weight, pre_nonlinear_trg_weight, pre_nonlinear_bias, post_nonlinear_weight] return attention_matrix def _create_linear_plus_attention_matrix(src_unit_dim, trg_unit_dim, regularizer, random_seed, trainable, scope="linear_plus"): """create linear plus attetnion matrix""" weight_initializer = create_variable_initializer("glorot_uniform", random_seed) if src_unit_dim != trg_unit_dim: raise ValueError("src dim {0} and trg dim must be the same for linear plus attention".format(src_unit_dim, trg_unit_dim)) else: mul_unit_dim = src_unit_dim linear_plus_src_weight = tf.get_variable("{0}/src_weight".format(scope), shape=[1, src_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) linear_plus_trg_weight = tf.get_variable("{0}/trg_weight".format(scope), shape=[1, trg_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) linear_plus_mul_weight = tf.get_variable("{0}/mul_weight".format(scope), shape=[1, mul_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) attention_matrix = [linear_plus_src_weight, linear_plus_trg_weight, linear_plus_mul_weight] return attention_matrix def _create_nonlinear_plus_attention_matrix(src_unit_dim, trg_unit_dim, attention_unit_dim, regularizer, random_seed, trainable, scope="nonlinear_plus"): """create nonlinear plus attetnion matrix""" weight_initializer = create_variable_initializer("glorot_uniform", random_seed) bias_initializer = create_variable_initializer("zero") if src_unit_dim != trg_unit_dim: raise ValueError("src dim {0} and trg dim must be the same for nonlinear plus attention".format(src_unit_dim, trg_unit_dim)) else: mul_unit_dim = src_unit_dim pre_nonlinear_plus_src_weight = tf.get_variable("{0}/pre/src_weight".format(scope), shape=[attention_unit_dim, src_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) pre_nonlinear_plus_trg_weight = tf.get_variable("{0}/pre/trg_weight".format(scope), shape=[attention_unit_dim, trg_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) pre_nonlinear_plus_mul_weight = tf.get_variable("{0}/pre/mul_weight".format(scope), shape=[attention_unit_dim, mul_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) pre_nonlinear_plus_bias = tf.get_variable("{0}/pre/bias".format(scope), shape=[attention_unit_dim], initializer=bias_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) post_nonlinear_plus_weight = tf.get_variable("{0}/post/weight".format(scope), shape=[1, attention_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) attention_matrix = [pre_nonlinear_plus_src_weight, pre_nonlinear_plus_trg_weight, pre_nonlinear_plus_mul_weight, pre_nonlinear_plus_bias, post_nonlinear_plus_weight] return attention_matrix def _create_trilinear_attention_matrix(src_unit_dim, trg_unit_dim, regularizer, random_seed, trainable, scope="trilinear"): """create trilinear attetnion matrix""" weight_initializer = create_variable_initializer("glorot_uniform", random_seed) if src_unit_dim != trg_unit_dim: raise ValueError("src dim {0} and trg dim must be the same for trilinear attention".format(src_unit_dim, trg_unit_dim)) else: mul_unit_dim = src_unit_dim trilinear_src_weight = tf.get_variable("{0}/src_weight".format(scope), shape=[src_unit_dim, 1], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) trilinear_trg_weight = tf.get_variable("{0}/trg_weight".format(scope), shape=[trg_unit_dim, 1], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) trilinear_mul_weight = tf.get_variable("{0}/mul_weight".format(scope), shape=[1, 1, mul_unit_dim], initializer=weight_initializer, regularizer=regularizer, trainable=trainable, dtype=tf.float32) attention_matrix = [trilinear_src_weight, trilinear_trg_weight, trilinear_mul_weight] return attention_matrix def _generate_attention_score(input_src_data, input_trg_data, attention_matrix, attention_score_type): """generate attention score""" if attention_score_type == "dot": input_attention_score = _generate_dot_attention_score(input_src_data, input_trg_data) elif attention_score_type == "scaled_dot": input_attention_score = _generate_scaled_dot_attention_score(input_src_data, input_trg_data) elif attention_score_type == "linear": input_attention_score = _generate_linear_attention_score(input_src_data, input_trg_data, attention_matrix) elif attention_score_type == "bilinear": input_attention_score = _generate_bilinear_attention_score(input_src_data, input_trg_data, attention_matrix) elif attention_score_type == "nonlinear": input_attention_score = _generate_nonlinear_attention_score(input_src_data, input_trg_data, attention_matrix) elif attention_score_type == "linear_plus": input_attention_score = _generate_linear_plus_attention_score(input_src_data, input_trg_data, attention_matrix) elif attention_score_type == "nonlinear_plus": input_attention_score = _generate_nonlinear_plus_attention_score(input_src_data, input_trg_data, attention_matrix) elif attention_score_type == "trilinear": input_attention_score = _generate_trilinear_attention_score(input_src_data, input_trg_data, attention_matrix) else: raise ValueError("unsupported attention score type {0}".format(attention_score_type)) return input_attention_score def _generate_dot_attention_score(input_src_data, input_trg_data): """generate dot-product attention score""" input_attention = tf.matmul(input_src_data, input_trg_data, transpose_b=True) return input_attention def _generate_scaled_dot_attention_score(input_src_data, input_trg_data): """generate scaled dot-product attention score""" src_unit_dim = tf.shape(input_src_data)[2] input_attention = tf.matmul(input_src_data, input_trg_data, transpose_b=True) input_attention = input_attention / tf.sqrt(tf.cast(src_unit_dim, dtype=tf.float32)) return input_attention def _generate_linear_attention_score(input_src_data, input_trg_data, attention_matrix): """generate linear attention score""" input_src_shape = tf.shape(input_src_data) input_trg_shape = tf.shape(input_trg_data) batch_size = input_src_shape[0] src_max_length = input_src_shape[1] trg_max_length = input_trg_shape[1] src_unit_dim = input_src_shape[2] trg_unit_dim = input_trg_shape[2] linear_src_weight = attention_matrix[0] linear_trg_weight = attention_matrix[1] input_src_data = tf.reshape(input_src_data, shape=[-1, src_unit_dim]) input_src_data = tf.matmul(input_src_data, linear_src_weight, transpose_b=True) input_src_data = tf.reshape(input_src_data, shape=[batch_size, src_max_length, 1, -1]) input_trg_data = tf.reshape(input_trg_data, shape=[-1, trg_unit_dim]) input_trg_data = tf.matmul(input_trg_data, linear_trg_weight, transpose_b=True) input_trg_data = tf.reshape(input_trg_data, shape=[batch_size, 1, trg_max_length, -1]) input_src_data = tf.tile(input_src_data, multiples=[1, 1, trg_max_length, 1]) input_trg_data = tf.tile(input_trg_data, multiples=[1, src_max_length, 1, 1]) input_attention = input_src_data + input_trg_data input_attention = tf.reshape(input_attention, shape=[batch_size, src_max_length, trg_max_length]) return input_attention def _generate_bilinear_attention_score(input_src_data, input_trg_data, attention_matrix): """generate bilinear attention score""" input_src_shape = tf.shape(input_src_data) batch_size = input_src_shape[0] src_max_length = input_src_shape[1] src_unit_dim = input_src_shape[2] bilinear_weight = attention_matrix[0] input_src_data = tf.reshape(input_src_data, shape=[-1, src_unit_dim]) input_src_data = tf.matmul(input_src_data, bilinear_weight) input_src_data = tf.reshape(input_src_data, shape=[batch_size, src_max_length, -1]) input_attention = tf.matmul(input_src_data, input_trg_data, transpose_b=True) return input_attention def _generate_nonlinear_attention_score(input_src_data, input_trg_data, attention_matrix): """generate linear attention score""" input_src_shape = tf.shape(input_src_data) input_trg_shape = tf.shape(input_trg_data) batch_size = input_src_shape[0] src_max_length = input_src_shape[1] trg_max_length = input_trg_shape[1] src_unit_dim = input_src_shape[2] trg_unit_dim = input_trg_shape[2] pre_nonlinear_src_weight = attention_matrix[0] pre_nonlinear_trg_weight = attention_matrix[1] pre_nonlinear_bias = tf.reshape(attention_matrix[2], shape=[1, 1, 1, -1]) post_nonlinear_weight = attention_matrix[3] input_src_data = tf.reshape(input_src_data, shape=[-1, src_unit_dim]) input_src_data = tf.matmul(input_src_data, pre_nonlinear_src_weight, transpose_b=True) input_src_data = tf.reshape(input_src_data, shape=[batch_size, src_max_length, 1, -1]) input_trg_data = tf.reshape(input_trg_data, shape=[-1, trg_unit_dim]) input_trg_data = tf.matmul(input_trg_data, pre_nonlinear_trg_weight, transpose_b=True) input_trg_data = tf.reshape(input_trg_data, shape=[batch_size, 1, trg_max_length, -1]) input_src_data = tf.tile(input_src_data, multiples=[1, 1, trg_max_length, 1]) input_trg_data = tf.tile(input_trg_data, multiples=[1, src_max_length, 1, 1]) input_attention = input_src_data + input_trg_data input_attention = tf.nn.tanh(input_attention + pre_nonlinear_bias) attention_dim = tf.shape(input_attention)[-1] input_attention = tf.reshape(input_attention, shape=[-1, attention_dim]) input_attention = tf.matmul(input_attention, post_nonlinear_weight, transpose_b=True) input_attention = tf.reshape(input_attention, shape=[batch_size, src_max_length, trg_max_length]) return input_attention def _generate_linear_plus_attention_score(input_src_data, input_trg_data, attention_matrix): """generate linear plus attention score""" input_src_shape = tf.shape(input_src_data) input_trg_shape = tf.shape(input_trg_data) batch_size = input_src_shape[0] src_max_length = input_src_shape[1] trg_max_length = input_trg_shape[1] src_unit_dim = input_src_shape[2] trg_unit_dim = input_trg_shape[2] mul_unit_dim = src_unit_dim linear_plus_src_weight = attention_matrix[0] linear_plus_trg_weight = attention_matrix[1] linear_plus_mul_weight = attention_matrix[2] input_src_data = tf.expand_dims(input_src_data, axis=2) input_trg_data = tf.expand_dims(input_trg_data, axis=1) input_src_data = tf.tile(input_src_data, multiples=[1, 1, trg_max_length, 1]) input_trg_data = tf.tile(input_trg_data, multiples=[1, src_max_length, 1, 1]) input_mul_data = input_src_data * input_trg_data input_src_data = tf.reshape(input_src_data, shape=[-1, src_unit_dim]) input_src_data = tf.matmul(input_src_data, linear_plus_src_weight, transpose_b=True) input_trg_data = tf.reshape(input_trg_data, shape=[-1, trg_unit_dim]) input_trg_data = tf.matmul(input_trg_data, linear_plus_trg_weight, transpose_b=True) input_mul_data = tf.reshape(input_mul_data, shape=[-1, mul_unit_dim]) input_mul_data = tf.matmul(input_mul_data, linear_plus_mul_weight, transpose_b=True) input_attention = input_src_data + input_trg_data + input_mul_data input_attention = tf.reshape(input_attention, shape=[batch_size, src_max_length, trg_max_length]) return input_attention def _generate_nonlinear_plus_attention_score(input_src_data, input_trg_data, attention_matrix): """generate nonlinear plus attention score""" input_src_shape = tf.shape(input_src_data) input_trg_shape = tf.shape(input_trg_data) batch_size = input_src_shape[0] src_max_length = input_src_shape[1] trg_max_length = input_trg_shape[1] src_unit_dim = input_src_shape[2] trg_unit_dim = input_trg_shape[2] mul_unit_dim = src_unit_dim pre_nonlinear_plus_src_weight = attention_matrix[0] pre_nonlinear_plus_trg_weight = attention_matrix[1] pre_nonlinear_plus_mul_weight = attention_matrix[2] pre_nonlinear_plus_bias = tf.reshape(attention_matrix[3], shape=[1, 1, 1, -1]) post_nonlinear_plus_weight = attention_matrix[4] input_src_data = tf.reshape(input_src_data, shape=[batch_size, src_max_length, 1, -1]) input_trg_data = tf.reshape(input_trg_data, shape=[batch_size, 1, trg_max_length, -1]) input_src_data = tf.tile(input_src_data, multiples=[1, 1, trg_max_length, 1]) input_trg_data = tf.tile(input_trg_data, multiples=[1, src_max_length, 1, 1]) input_mul_data = input_src_data * input_trg_data input_src_data = tf.reshape(input_src_data, shape=[-1, src_unit_dim]) input_src_data = tf.matmul(input_src_data, pre_nonlinear_plus_src_weight, transpose_b=True) input_trg_data = tf.reshape(input_trg_data, shape=[-1, trg_unit_dim]) input_trg_data = tf.matmul(input_trg_data, pre_nonlinear_plus_trg_weight, transpose_b=True) input_mul_data = tf.reshape(input_mul_data, shape=[-1, mul_unit_dim]) input_mul_data = tf.matmul(input_mul_data, pre_nonlinear_plus_mul_weight, transpose_b=True) input_attention = input_src_data + input_trg_data + input_mul_data input_attention = tf.nn.tanh(input_attention + pre_nonlinear_plus_bias) input_attention = tf.matmul(input_attention, post_nonlinear_plus_weight, transpose_b=True) input_attention = tf.reshape(input_attention, shape=[batch_size, src_max_length, trg_max_length]) return input_attention def _generate_trilinear_attention_score(input_src_data, input_trg_data, attention_matrix): """generate trilinear attention score""" input_src_shape = tf.shape(input_src_data) # [batch_size, src_len, d] input_trg_shape = tf.shape(input_trg_data) # [batch_size, trg_len, d] batch_size = input_src_shape[0] src_max_length = input_src_shape[1] trg_max_length = input_trg_shape[1] src_unit_dim = input_src_shape[2] trg_unit_dim = input_trg_shape[2] mul_unit_dim = src_unit_dim trilinear_src_weight = attention_matrix[0] # [d, 1] trilinear_trg_weight = attention_matrix[1] # [d, 1] trilinear_mul_weight = attention_matrix[2] # [1, 1, d] input_src_part = tf.reshape(input_src_data, shape=[-1, src_unit_dim]) # [-1, d] input_trg_part = tf.reshape(input_trg_data, shape=[-1, trg_unit_dim]) # [-1, d] input_src_part = tf.matmul(input_src_part, trilinear_src_weight) # [-1, 1] input_trg_part = tf.matmul(input_trg_part, trilinear_trg_weight) # [-1, 1] input_src_part = tf.reshape(input_src_part, shape=[batch_size, src_max_length, 1]) # [batch_size, src_len, 1] input_trg_part = tf.reshape(input_trg_part, shape=[batch_size, 1, trg_max_length]) # [batch_size, 1, trg_len] input_src_score = tf.tile(input_src_part, multiples=[1, 1, trg_max_length]) # [batch_size, src_len, trg_len] input_trg_score = tf.tile(input_trg_part, multiples=[1, src_max_length, 1]) # [batch_size, src_len, trg_len] input_src_part = input_src_data * trilinear_mul_weight # [batch_size, src_len, d] input_trg_part = tf.transpose(input_trg_data, perm=[0, 2, 1]) # [batch_size, d, trg_len] input_mul_score = tf.matmul(input_src_part, input_trg_part) # [batch_size, src_len, trg_len] input_attention = input_src_score + input_trg_score + input_mul_score # [batch_size, src_len, trg_len] return input_attention def _generate_attention_mask(input_src_mask, input_trg_mask, remove_diag=False): """generate attention mask""" input_mask = tf.matmul(input_src_mask, input_trg_mask, transpose_b=True) if remove_diag == True: src_max_length = tf.shape(input_src_mask)[1] trg_max_length = tf.shape(input_trg_mask)[1] input_mask = input_mask * (1 - tf.eye(src_max_length, trg_max_length)) return input_mask def _create_projection_layer(unit_dim, hidden_activation, use_bias, regularizer, random_seed, trainable, name): """create projection layer""" weight_initializer = create_variable_initializer("glorot_uniform", random_seed) bias_initializer = create_variable_initializer("zero") projection_layer = tf.layers.Dense(units=unit_dim, activation=hidden_activation, use_bias=use_bias, kernel_initializer=weight_initializer, bias_initializer=bias_initializer, kernel_regularizer=regularizer, bias_regularizer=regularizer, trainable=trainable, name=name) return projection_layer class Attention(object): """attention layer""" def __init__(self, src_dim, trg_dim, att_dim, score_type, dropout, att_dropout=0.0, layer_dropout=0.0, layer_norm=False, residual_connect=False, is_self=False, external_matrix=None, num_gpus=1, default_gpu_id=0, regularizer=None, random_seed=0, trainable=True, scope="attention"): """initialize attention layer""" self.src_dim = src_dim self.trg_dim = trg_dim self.att_dim = att_dim self.score_type = score_type self.dropout = dropout self.att_dropout = att_dropout self.layer_dropout = layer_dropout self.layer_norm = layer_norm self.residual_connect = residual_connect self.is_self = is_self self.regularizer = regularizer self.random_seed = random_seed self.trainable = trainable self.scope = scope self.device_spec = get_device_spec(default_gpu_id, num_gpus) with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE), tf.device(self.device_spec): if external_matrix == None: self.attention_matrix = _create_attention_matrix(self.src_dim, self.trg_dim, self.att_dim, self.score_type, self.regularizer, self.random_seed, self.trainable, "att_matrix") else: self.attention_matrix = external_matrix self.dropout_layer = Dropout(rate=self.dropout, num_gpus=num_gpus, default_gpu_id=default_gpu_id, random_seed=self.random_seed) self.att_dropout_layer = Dropout(rate=self.att_dropout, num_gpus=num_gpus, default_gpu_id=default_gpu_id, random_seed=self.random_seed, scope="att_dropout") if self.layer_norm == True: self.src_norm_layer = LayerNorm(layer_dim=self.src_dim, num_gpus=num_gpus, default_gpu_id=default_gpu_id, regularizer=self.regularizer, trainable=self.trainable, scope="src_layer_norm") if self.is_self == True: self.trg_norm_layer = self.src_norm_layer else: self.trg_norm_layer = LayerNorm(layer_dim=self.trg_dim, num_gpus=num_gpus, default_gpu_id=default_gpu_id, regularizer=self.regularizer, trainable=self.trainable, scope="trg_layer_norm") def __call__(self, input_src_data, input_trg_data, input_src_mask, input_trg_mask): """call attention layer""" with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE), tf.device(self.device_spec): input_src_shape = tf.shape(input_src_data) input_trg_shape = tf.shape(input_trg_data) input_src_mask_shape = tf.shape(input_src_mask) input_trg_mask_shape = tf.shape(input_trg_mask) src_shape_size = len(input_src_data.get_shape().as_list()) trg_shape_size = len(input_trg_data.get_shape().as_list()) if src_shape_size > 3: input_src_data = tf.reshape(input_src_data, shape=tf.concat([[-1], input_src_shape[-2:]], axis=0)) input_src_mask = tf.reshape(input_src_mask, shape=tf.concat([[-1], input_src_mask_shape[-2:]], axis=0)) if trg_shape_size > 3: input_trg_data = tf.reshape(input_trg_data, shape=tf.concat([[-1], input_trg_shape[-2:]], axis=0)) input_trg_mask = tf.reshape(input_trg_mask, shape=tf.concat([[-1], input_trg_mask_shape[-2:]], axis=0)) input_src_attention = input_src_data input_trg_attention = input_trg_data input_src_attention_mask = input_src_mask input_trg_attention_mask = input_trg_mask if self.layer_norm == True: input_src_attention, input_src_attention_mask = self.src_norm_layer(input_src_attention, input_src_attention_mask) input_trg_attention, input_trg_attention_mask = self.trg_norm_layer(input_trg_attention, input_trg_attention_mask) input_attention_score = _generate_attention_score(input_src_attention, input_trg_attention, self.attention_matrix, self.score_type) input_attention_mask = _generate_attention_mask(input_src_attention_mask, input_trg_attention_mask, self.is_self) input_attention_score = input_attention_score * input_attention_mask input_attention_weight = softmax_with_mask(input_attention_score, input_attention_mask, axis=-1) * input_attention_mask input_attention_weight, _ = self.att_dropout_layer(input_attention_weight, input_attention_mask) input_attention = tf.matmul(input_attention_weight, input_trg_attention) input_attention, _ = self.dropout_layer(input_attention, input_src_mask) if self.residual_connect == True and self.is_self == True: output_attention, output_mask = tf.cond(tf.random_uniform([]) < self.layer_dropout, lambda: (input_src_data, input_src_mask), lambda: (input_attention + input_src_data, input_src_mask)) output_attention = output_attention * output_mask else: output_attention = input_attention * input_src_mask output_mask = input_src_mask if src_shape_size > 3: output_attention = tf.reshape(output_attention, shape=tf.concat([input_src_shape[:-2], input_trg_shape[-2:]], axis=0)) output_mask = tf.reshape(output_mask, shape=tf.concat([input_src_mask_shape[:-2], input_trg_mask_shape[-2:]], axis=0)) return output_attention, output_mask, output_attention_score, output_score_mask def get_attention_matrix(self): return self.attention_matrix class MaxAttention(object): """max-attention layer""" def __init__(self, src_dim, trg_dim, att_dim, score_type, dropout, att_dropout=0.0, layer_dropout=0.0, layer_norm=False, residual_connect=False, is_self=False, external_matrix=None, num_gpus=1, default_gpu_id=0, regularizer=None, random_seed=0, trainable=True, scope="max_att"): """initialize max-attention layer""" self.src_dim = src_dim self.trg_dim = trg_dim self.att_dim = att_dim self.score_type = score_type self.dropout = dropout self.att_dropout = att_dropout self.layer_dropout = layer_dropout self.layer_norm = layer_norm self.residual_connect = residual_connect self.is_self = is_self self.regularizer = regularizer self.random_seed = random_seed self.trainable = trainable self.scope = scope self.device_spec = get_device_spec(default_gpu_id, num_gpus) with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE), tf.device(self.device_spec): if external_matrix == None: self.attention_matrix = _create_attention_matrix(self.src_dim, self.trg_dim, self.att_dim, self.score_type, self.regularizer, self.random_seed, self.trainable, "att_matrix") else: self.attention_matrix = external_matrix self.dropout_layer = Dropout(rate=self.dropout, num_gpus=num_gpus, default_gpu_id=default_gpu_id, random_seed=self.random_seed) self.att_dropout_layer = Dropout(rate=self.att_dropout, num_gpus=num_gpus, default_gpu_id=default_gpu_id, random_seed=self.random_seed, scope="att_dropout") if self.layer_norm == True: self.src_norm_layer = LayerNorm(layer_dim=self.src_dim, num_gpus=num_gpus, default_gpu_id=default_gpu_id, regularizer=self.regularizer, trainable=self.trainable, scope="src_layer_norm") if self.is_self == True: self.trg_norm_layer = self.src_norm_layer else: self.trg_norm_layer = LayerNorm(layer_dim=self.trg_dim, num_gpus=num_gpus, default_gpu_id=default_gpu_id, regularizer=self.regularizer, trainable=self.trainable, scope="trg_layer_norm") def __call__(self, input_src_data, input_trg_data, input_src_mask, input_trg_mask): """call max-attention layer""" with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE), tf.device(self.device_spec): input_src_shape = tf.shape(input_src_data) input_trg_shape = tf.shape(input_trg_data) input_src_mask_shape = tf.shape(input_src_mask) input_trg_mask_shape = tf.shape(input_trg_mask) src_shape_size = len(input_src_data.get_shape().as_list()) trg_shape_size = len(input_trg_data.get_shape().as_list()) if src_shape_size > 3: input_src_data = tf.reshape(input_src_data, shape=tf.concat([[-1], input_src_shape[-2:]], axis=0)) input_src_mask = tf.reshape(input_src_mask, shape=tf.concat([[-1], input_src_mask_shape[-2:]], axis=0)) if trg_shape_size > 3: input_trg_data = tf.reshape(input_trg_data, shape=tf.concat([[-1], input_trg_shape[-2:]], axis=0)) input_trg_mask = tf.reshape(input_trg_mask, shape=tf.concat([[-1], input_trg_mask_shape[-2:]], axis=0)) input_src_attention = input_src_data input_src_attention_mask = input_src_mask input_trg_attention = input_trg_data input_trg_attention_mask = input_trg_mask if self.layer_norm == True: input_src_attention, input_src_attention_mask = self.src_norm_layer(input_src_attention, input_src_attention_mask) input_trg_attention, input_trg_attention_mask = self.trg_norm_layer(input_trg_attention, input_trg_attention_mask) input_attention_score = _generate_attention_score(input_src_attention, input_trg_attention, self.attention_matrix, self.score_type) input_attention_mask = _generate_attention_mask(input_src_attention_mask, input_trg_attention_mask, self.is_self) input_attention_score = tf.transpose(tf.reduce_max(input_attention_score, axis=-1, keepdims=True), perm=[0, 2, 1]) input_attention_mask = tf.transpose(tf.reduce_max(input_attention_mask, axis=-1, keepdims=True), perm=[0, 2, 1]) input_attention_score = input_attention_score * input_attention_mask input_attention_weight = softmax_with_mask(input_attention_score, input_attention_mask, axis=-1) * input_attention_mask input_attention_weight, _ = self.att_dropout_layer(input_attention_weight, input_attention_mask) input_attention = tf.matmul(input_attention_weight, input_src_attention) input_attention, _ = self.dropout_layer(input_attention, input_src_mask) src_max_length = tf.shape(input_src_attention)[1] input_attention = tf.tile(input_attention, multiples=[1, src_max_length, 1]) if self.residual_connect == True and self.is_self == True: output_attention, output_mask = tf.cond(tf.random_uniform([]) < self.layer_dropout, lambda: (input_src_data, input_src_mask), lambda: (input_attention + input_src_data, input_src_mask)) output_attention = output_attention * output_mask else: output_attention = input_attention * input_src_mask output_mask = input_src_mask if src_shape_size > 3: output_attention = tf.reshape(output_attention, shape=tf.concat([input_src_shape[:-2], input_trg_shape[-2:]], axis=0)) output_mask = tf.reshape(output_mask, shape=tf.concat([input_src_mask_shape[:-2], input_trg_mask_shape[-2:]], axis=0)) return output_attention, output_mask def get_attention_matrix(self): return self.attention_matrix class CoAttention(object): """co-attention layer""" def __init__(self, src_dim, trg_dim, att_dim, score_type, dropout, att_dropout=0.0, layer_dropout=0.0, layer_norm=False, residual_connect=False, is_self=False, external_matrix=None, num_gpus=1, default_gpu_id=0, regularizer=None, random_seed=0, trainable=True, scope="co_att"): """initialize co-attention layer""" self.src_dim = src_dim self.trg_dim = trg_dim self.att_dim = att_dim self.score_type = score_type self.dropout = dropout self.att_dropout = att_dropout self.layer_dropout = layer_dropout self.layer_norm = layer_norm self.residual_connect = residual_connect self.is_self = is_self self.regularizer = regularizer self.random_seed = random_seed self.trainable = trainable self.scope = scope self.device_spec = get_device_spec(default_gpu_id, num_gpus) with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE), tf.device(self.device_spec): if external_matrix == None: self.attention_matrix = _create_attention_matrix(self.src_dim, self.trg_dim, self.att_dim, self.score_type, self.regularizer, self.random_seed, self.trainable, "att_matrix") else: self.attention_matrix = external_matrix self.dropout_layer = Dropout(rate=self.dropout, num_gpus=num_gpus, default_gpu_id=default_gpu_id, random_seed=self.random_seed) self.s2t_att_dropout_layer = Dropout(rate=self.att_dropout, num_gpus=num_gpus, default_gpu_id=default_gpu_id, random_seed=self.random_seed, scope="s2t_att_dropout") self.t2s_att_dropout_layer = Dropout(rate=self.att_dropout, num_gpus=num_gpus, default_gpu_id=default_gpu_id, random_seed=self.random_seed, scope="t2s_att_dropout") if self.layer_norm == True: self.src_norm_layer = LayerNorm(layer_dim=self.src_dim, num_gpus=num_gpus, default_gpu_id=default_gpu_id, regularizer=self.regularizer, trainable=self.trainable, scope="src_layer_norm") if self.is_self == True: self.trg_norm_layer = self.src_norm_layer else: self.trg_norm_layer = LayerNorm(layer_dim=self.trg_dim, num_gpus=num_gpus, default_gpu_id=default_gpu_id, regularizer=self.regularizer, trainable=self.trainable, scope="trg_layer_norm") def __call__(self, input_src_data, input_trg_data, input_src_mask, input_trg_mask): """call co-attention layer""" with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE), tf.device(self.device_spec): input_src_shape = tf.shape(input_src_data) input_trg_shape = tf.shape(input_trg_data) input_src_mask_shape = tf.shape(input_src_mask) input_trg_mask_shape = tf.shape(input_trg_mask) src_shape_size = len(input_src_data.get_shape().as_list()) trg_shape_size = len(input_trg_data.get_shape().as_list()) if src_shape_size > 3: input_src_data = tf.reshape(input_src_data, shape=tf.concat([[-1], input_src_shape[-2:]], axis=0)) input_src_mask = tf.reshape(input_src_mask, shape=tf.concat([[-1], input_src_mask_shape[-2:]], axis=0)) if trg_shape_size > 3: input_trg_data = tf.reshape(input_trg_data, shape=tf.concat([[-1], input_trg_shape[-2:]], axis=0)) input_trg_mask = tf.reshape(input_trg_mask, shape=tf.concat([[-1], input_trg_mask_shape[-2:]], axis=0)) input_src_attention = input_src_data input_src_attention_mask = input_src_mask input_trg_attention = input_trg_data input_trg_attention_mask = input_trg_mask if self.layer_norm == True: input_src_attention, input_src_attention_mask = self.src_norm_layer(input_src_attention, input_src_attention_mask) input_trg_attention, input_trg_attention_mask = self.trg_norm_layer(input_trg_attention, input_trg_attention_mask) input_attention_score = _generate_attention_score(input_src_attention, input_trg_attention, self.attention_matrix, self.score_type) input_attention_mask = _generate_attention_mask(input_src_attention_mask, input_trg_attention_mask, self.is_self) input_s2t_att_score = input_attention_score input_s2t_att_mask = input_attention_mask input_s2t_att_score = input_s2t_att_score * input_s2t_att_mask input_t2s_att_score = tf.transpose(input_attention_score, perm=[0, 2, 1]) input_t2s_att_mask = tf.transpose(input_attention_mask, perm=[0, 2, 1]) input_t2s_att_score = input_t2s_att_score * input_t2s_att_mask input_s2t_att_weight = softmax_with_mask(input_s2t_att_score, input_s2t_att_mask, axis=-1) * input_s2t_att_mask input_s2t_att_weight, _ = self.s2t_att_dropout_layer(input_s2t_att_weight, input_s2t_att_mask) input_t2s_att_weight = softmax_with_mask(input_t2s_att_score, input_t2s_att_mask, axis=-1) * input_t2s_att_mask input_t2s_att_weight, _ = self.t2s_att_dropout_layer(input_t2s_att_weight, input_t2s_att_mask) input_attention_weight = tf.matmul(input_s2t_att_weight, input_t2s_att_weight) input_attention = tf.matmul(input_attention_weight, input_src_attention) input_attention, _ = self.dropout_layer(input_attention, input_src_mask) if self.residual_connect == True and self.is_self == True: output_attention, output_mask = tf.cond(tf.random_uniform([]) < self.layer_dropout, lambda: (input_src_data, input_src_mask), lambda: (input_attention + input_src_data, input_src_mask)) output_attention = output_attention * output_mask else: output_attention = input_attention * input_src_mask output_mask = input_src_mask if src_shape_size > 3: output_attention = tf.reshape(output_attention, shape=tf.concat([input_src_shape[:-2], input_trg_shape[-2:]], axis=0)) output_mask = tf.reshape(output_mask, shape=tf.concat([input_src_mask_shape[:-2], input_trg_mask_shape[-2:]], axis=0)) return output_attention, output_mask def get_attention_matrix(self): return self.attention_matrix class GatedAttention(object): """gated-attention layer""" def __init__(self, src_dim, trg_dim, att_dim, score_type, dropout, att_dropout=0.0, layer_dropout=0.0, layer_norm=False, residual_connect=False, is_self=False, external_matrix=None, num_gpus=1, default_gpu_id=0, regularizer=None, random_seed=0, trainable=True, scope="gated_att"): """initialize gated-attention layer""" self.src_dim = src_dim self.trg_dim = trg_dim self.att_dim = att_dim self.score_type = score_type self.dropout = dropout self.att_dropout = att_dropout self.layer_dropout = layer_dropout self.layer_norm = layer_norm self.residual_connect = residual_connect self.is_self = is_self self.regularizer = regularizer self.random_seed = random_seed self.trainable = trainable self.scope = scope self.device_spec = get_device_spec(default_gpu_id, num_gpus) with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE), tf.device(self.device_spec): if external_matrix == None: self.attention_matrix = _create_attention_matrix(self.src_dim, self.trg_dim, self.att_dim, self.score_type, self.regularizer, self.random_seed, self.trainable, "att_matrix") else: self.attention_matrix = external_matrix self.dropout_layer = Dropout(rate=self.dropout, num_gpus=num_gpus, default_gpu_id=default_gpu_id, random_seed=self.random_seed) self.att_dropout_layer = Dropout(rate=self.att_dropout, num_gpus=num_gpus, default_gpu_id=default_gpu_id, random_seed=self.random_seed, scope="att_dropout") if self.layer_norm == True: self.src_norm_layer = LayerNorm(layer_dim=self.src_dim, num_gpus=num_gpus, default_gpu_id=default_gpu_id, regularizer=self.regularizer, trainable=self.trainable, scope="src_layer_norm") if self.is_self == True: self.trg_norm_layer = self.src_norm_layer else: self.trg_norm_layer = LayerNorm(layer_dim=self.trg_dim, num_gpus=num_gpus, default_gpu_id=default_gpu_id, regularizer=self.regularizer, trainable=self.trainable, scope="trg_layer_norm") weight_initializer = create_variable_initializer("glorot_uniform") gate_activation = create_activation_function("sigmoid") if self.residual_connect == True and self.is_self == True: self.gate_layer = tf.layers.Dense(units=self.trg_dim, activation=gate_activation, kernel_initializer=weight_initializer, kernel_regularizer=self.regularizer, trainable=self.trainable) else: self.gate_layer = tf.layers.Dense(units=self.src_dim+self.trg_dim, activation=gate_activation, kernel_initializer=weight_initializer, kernel_regularizer=self.regularizer, trainable=self.trainable) def __call__(self, input_src_data, input_trg_data, input_src_mask, input_trg_mask): """call gated-attention layer""" with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE), tf.device(self.device_spec): input_src_shape = tf.shape(input_src_data) input_trg_shape = tf.shape(input_trg_data) input_src_mask_shape = tf.shape(input_src_mask) input_trg_mask_shape = tf.shape(input_trg_mask) src_shape_size = len(input_src_data.get_shape().as_list()) trg_shape_size = len(input_trg_data.get_shape().as_list()) if src_shape_size > 3: input_src_data = tf.reshape(input_src_data, shape=tf.concat([[-1], input_src_shape[-2:]], axis=0)) input_src_mask = tf.reshape(input_src_mask, shape=tf.concat([[-1], input_src_mask_shape[-2:]], axis=0)) if trg_shape_size > 3: input_trg_data = tf.reshape(input_trg_data, shape=tf.concat([[-1], input_trg_shape[-2:]], axis=0)) input_trg_mask = tf.reshape(input_trg_mask, shape=tf.concat([[-1], input_trg_mask_shape[-2:]], axis=0)) input_src_attention = input_src_data input_src_attention_mask = input_src_mask input_trg_attention = input_trg_data input_trg_attention_mask = input_trg_mask if self.layer_norm == True: input_src_attention, input_src_attention_mask = self.src_norm_layer(input_src_attention, input_src_attention_mask) input_trg_attention, input_trg_attention_mask = self.trg_norm_layer(input_trg_attention, input_trg_attention_mask) input_attention_score = _generate_attention_score(input_src_attention, input_trg_attention, self.attention_matrix, self.score_type) input_attention_mask = _generate_attention_mask(input_src_attention_mask, input_trg_attention_mask, self.is_self) input_attention_score = input_attention_score * input_attention_mask input_attention_weight = softmax_with_mask(input_attention_score, input_attention_mask, axis=-1) * input_attention_mask input_attention_weight, _ = self.att_dropout_layer(input_attention_weight, input_attention_mask) input_attention = tf.matmul(input_attention_weight, input_trg_attention) input_attention, _ = self.dropout_layer(input_attention, input_src_mask) if self.residual_connect == True and self.is_self == True: output_attention, output_mask = tf.cond(tf.random_uniform([]) < self.layer_dropout, lambda: (input_src_data, input_src_mask), lambda: (self.gate_layer(input_attention) * input_attention + input_src_data, input_src_mask)) output_attention = output_attention * output_mask else: input_attention = tf.concat([input_src_data, input_attention], axis=-1) gate = self.gate_layer(input_attention) output_attention = gate * input_attention * input_src_mask output_mask = input_src_mask if src_shape_size > 3: output_attention = tf.reshape(output_attention, shape=tf.concat([input_src_shape[:-2], input_trg_shape[-2:]], axis=0)) output_mask = tf.reshape(output_mask, shape=tf.concat([input_src_mask_shape[:-2], input_trg_mask_shape[-2:]], axis=0)) return output_attention, output_mask def get_attention_matrix(self): return self.attention_matrix class MultiHeadAttention(object): """multi-head attention layer""" def __init__(self, src_dim, trg_dim, att_dim, num_head, score_type, dropout, att_dropout=0.0, layer_dropout=0.0, layer_norm=False, residual_connect=False, is_self=False, external_matrix=None, num_gpus=1, default_gpu_id=0, regularizer=None, random_seed=0, trainable=True, scope="multi_head_att"): """initialize multi-head attention layer""" self.src_dim = src_dim self.trg_dim = trg_dim self.att_dim = att_dim self.num_head = num_head self.score_type = score_type self.dropout = dropout self.att_dropout = att_dropout self.layer_dropout = layer_dropout self.layer_norm = layer_norm self.residual_connect = residual_connect self.is_self = is_self self.regularizer = regularizer self.random_seed = random_seed self.trainable = trainable self.scope = scope self.device_spec = get_device_spec(default_gpu_id, num_gpus) with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE), tf.device(self.device_spec): if external_matrix == None: query_dim = self.att_dim key_dim = self.att_dim value_dim = self.trg_dim self.projection_layer = { "query": _create_projection_layer(query_dim, None, False, self.regularizer, self.random_seed, self.trainable, "query_projection"), "key": _create_projection_layer(key_dim, None, False, self.regularizer, self.random_seed, self.trainable, "key_projection"), "value": _create_projection_layer(value_dim, None, False, self.regularizer, self.random_seed, self.trainable, "value_projection") } if self.att_dim % self.num_head != 0 or self.att_dim / self.num_head == 0: raise ValueError("att dim {0} and # head {1} mis-match".format(self.att_dim, self.num_head)) head_dim = self.att_dim / self.num_head self.attention_matrix = _create_attention_matrix(head_dim, head_dim, head_dim, self.score_type, self.regularizer, self.random_seed, self.trainable, "att_matrix") else: self.projection_layer = external_matrix["projection"] self.attention_matrix = external_matrix["attention"] self.dropout_layer = Dropout(rate=self.dropout, num_gpus=num_gpus, default_gpu_id=default_gpu_id, random_seed=self.random_seed) self.att_dropout_layer = Dropout(rate=self.att_dropout, num_gpus=num_gpus, default_gpu_id=default_gpu_id, random_seed=self.random_seed, scope="att_dropout") if self.layer_norm == True: self.src_norm_layer = LayerNorm(layer_dim=self.src_dim, num_gpus=num_gpus, default_gpu_id=default_gpu_id, regularizer=self.regularizer, trainable=self.trainable, scope="src_layer_norm") if self.is_self == True: self.trg_norm_layer = self.src_norm_layer else: self.trg_norm_layer = LayerNorm(layer_dim=self.trg_dim, num_gpus=num_gpus, default_gpu_id=default_gpu_id, regularizer=self.regularizer, trainable=self.trainable, scope="trg_layer_norm") def __call__(self, input_src_data, input_trg_data, input_src_mask, input_trg_mask): """call multi-head attention layer""" with tf.variable_scope(self.scope, reuse=tf.AUTO_REUSE), tf.device(self.device_spec): input_src_shape = tf.shape(input_src_data) input_trg_shape = tf.shape(input_trg_data) input_src_mask_shape = tf.shape(input_src_mask) input_trg_mask_shape = tf.shape(input_trg_mask) src_shape_size = len(input_src_data.get_shape().as_list()) trg_shape_size = len(input_trg_data.get_shape().as_list()) if src_shape_size > 3: input_src_data = tf.reshape(input_src_data, shape=tf.concat([[-1], input_src_shape[-2:]], axis=0)) input_src_mask = tf.reshape(input_src_mask, shape=tf.concat([[-1], input_src_mask_shape[-2:]], axis=0)) if trg_shape_size > 3: input_trg_data = tf.reshape(input_trg_data, shape=tf.concat([[-1], input_trg_shape[-2:]], axis=0)) input_trg_mask = tf.reshape(input_trg_mask, shape=tf.concat([[-1], input_trg_mask_shape[-2:]], axis=0)) input_src_attention = input_src_data input_trg_attention = input_trg_data input_src_attention_mask = input_src_mask input_trg_attention_mask = input_trg_mask input_src_attention_shape = tf.shape(input_src_attention) input_trg_attention_shape = tf.shape(input_trg_attention) if self.layer_norm == True: input_src_attention, input_src_attention_mask = self.src_norm_layer(input_src_attention, input_src_attention_mask) input_trg_attention, input_trg_attention_mask = self.trg_norm_layer(input_trg_attention, input_trg_attention_mask) input_query_attention = self.projection_layer["query"](input_src_attention) input_key_attention = self.projection_layer["key"](input_trg_attention) input_value_attention = self.projection_layer["value"](input_trg_attention) input_query_attention = self.__split_multi_head(input_query_attention, input_src_attention_shape[0], input_src_attention_shape[1], self.num_head) input_key_attention = self.__split_multi_head(input_key_attention, input_trg_attention_shape[0], input_trg_attention_shape[1], self.num_head) input_value_attention = self.__split_multi_head(input_value_attention, input_trg_attention_shape[0], input_trg_attention_shape[1], self.num_head) input_query_attention_mask = self.__split_multi_head_mask(input_src_attention_mask, input_src_attention_shape[0], input_src_attention_shape[1], self.num_head) input_key_attention_mask = self.__split_multi_head_mask(input_trg_attention_mask, input_trg_attention_shape[0], input_trg_attention_shape[1], self.num_head) input_attention_score = _generate_attention_score(input_query_attention, input_key_attention, self.attention_matrix, self.score_type) input_attention_mask = _generate_attention_mask(input_query_attention_mask, input_key_attention_mask, self.is_self) input_attention_score = input_attention_score * input_attention_mask input_attention_weight = softmax_with_mask(input_attention_score, input_attention_mask, axis=-1) * input_attention_mask input_attention_weight, _ = self.att_dropout_layer(input_attention_weight, input_attention_mask) input_attention = tf.matmul(input_attention_weight, input_value_attention) input_attention = self.__merge_multi_head(input_attention, input_src_attention_shape[0], input_src_attention_shape[1], self.num_head) input_attention, _ = self.dropout_layer(input_attention, input_src_mask) if self.residual_connect == True and self.is_self == True: output_attention, output_mask = tf.cond(tf.random_uniform([]) < self.layer_dropout, lambda: (input_src_data, input_src_mask), lambda: (input_attention + input_src_data, input_src_mask)) output_attention = output_attention * output_mask else: output_attention = input_attention * input_src_mask output_mask = input_src_mask if src_shape_size > 3: output_attention = tf.reshape(output_attention, shape=tf.concat([input_src_shape[:-2], input_trg_shape[-2:]], axis=0)) output_mask = tf.reshape(output_mask, shape=tf.concat([input_src_mask_shape[:-2], input_trg_mask_shape[-2:]], axis=0)) return output_attention, output_mask def __split_multi_head(self, input_data, batch_size, max_length, num_head): """split multi-head""" input_split_data = tf.reshape(input_data, shape=[batch_size, max_length, num_head, -1]) # [batch_size, max_len, num_head, -1] input_split_data = tf.transpose(input_split_data, perm=[0,2,1,3]) # [batch_size, num_head, max_len, -1] input_split_data = tf.reshape(input_split_data, shape=[batch_size * num_head, max_length, -1]) # [batch_size * num_head, max_len, -1] return input_split_data def __split_multi_head_mask(self, input_mask, batch_size, max_length, num_head): """split multi-head""" input_split_mask = tf.expand_dims(input_mask, axis=1) # [batch_size, 1, max_len, 1] input_split_mask = tf.tile(input_split_mask, multiples=[1, num_head, 1, 1]) # [batch_size, num_head, max_len, 1] input_split_mask = tf.reshape(input_split_mask, shape=[batch_size * num_head, max_length, 1]) # [batch_size * num_head, max_len, 1] return input_split_mask def __merge_multi_head(self, input_data, batch_size, max_length, num_head): """merge multi-head""" input_merge_data = tf.reshape(input_data, shape=[batch_size, num_head, max_length, -1]) # [batch_size, num_head, max_len, -1] input_merge_data = tf.transpose(input_merge_data, perm=[0,2,1,3]) # [batch_size, max_len, num_head, -1] input_merge_data = tf.reshape(input_merge_data, shape=[batch_size, max_length, -1]) # [batch_size, max_len, -1] return input_merge_data def get_projection_matrix(self): return self.projection_matrix
26990ac7c79c866a255710ff1fb2e98dd0243201
48e124e97cc776feb0ad6d17b9ef1dfa24e2e474
/sdk/python/pulumi_azure_native/containerservice/v20210901/get_agent_pool.py
63e6af29ba922a7d0043ccb2fb5e7028f8ebff9b
[ "BSD-3-Clause", "Apache-2.0" ]
permissive
bpkgoud/pulumi-azure-native
0817502630062efbc35134410c4a784b61a4736d
a3215fe1b87fba69294f248017b1591767c2b96c
refs/heads/master
2023-08-29T22:39:49.984212
2021-11-15T12:43:41
2021-11-15T12:43:41
null
0
0
null
null
null
null
UTF-8
Python
false
false
28,542
py
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities from . import outputs __all__ = [ 'GetAgentPoolResult', 'AwaitableGetAgentPoolResult', 'get_agent_pool', 'get_agent_pool_output', ] @pulumi.output_type class GetAgentPoolResult: """ Agent Pool. """ def __init__(__self__, availability_zones=None, count=None, creation_data=None, enable_auto_scaling=None, enable_encryption_at_host=None, enable_fips=None, enable_node_public_ip=None, enable_ultra_ssd=None, gpu_instance_profile=None, id=None, kubelet_config=None, kubelet_disk_type=None, linux_os_config=None, max_count=None, max_pods=None, min_count=None, mode=None, name=None, node_image_version=None, node_labels=None, node_public_ip_prefix_id=None, node_taints=None, orchestrator_version=None, os_disk_size_gb=None, os_disk_type=None, os_sku=None, os_type=None, pod_subnet_id=None, power_state=None, provisioning_state=None, proximity_placement_group_id=None, scale_down_mode=None, scale_set_eviction_policy=None, scale_set_priority=None, spot_max_price=None, tags=None, type=None, upgrade_settings=None, vm_size=None, vnet_subnet_id=None, workload_runtime=None): if availability_zones and not isinstance(availability_zones, list): raise TypeError("Expected argument 'availability_zones' to be a list") pulumi.set(__self__, "availability_zones", availability_zones) if count and not isinstance(count, int): raise TypeError("Expected argument 'count' to be a int") pulumi.set(__self__, "count", count) if creation_data and not isinstance(creation_data, dict): raise TypeError("Expected argument 'creation_data' to be a dict") pulumi.set(__self__, "creation_data", creation_data) if enable_auto_scaling and not isinstance(enable_auto_scaling, bool): raise TypeError("Expected argument 'enable_auto_scaling' to be a bool") pulumi.set(__self__, "enable_auto_scaling", enable_auto_scaling) if enable_encryption_at_host and not isinstance(enable_encryption_at_host, bool): raise TypeError("Expected argument 'enable_encryption_at_host' to be a bool") pulumi.set(__self__, "enable_encryption_at_host", enable_encryption_at_host) if enable_fips and not isinstance(enable_fips, bool): raise TypeError("Expected argument 'enable_fips' to be a bool") pulumi.set(__self__, "enable_fips", enable_fips) if enable_node_public_ip and not isinstance(enable_node_public_ip, bool): raise TypeError("Expected argument 'enable_node_public_ip' to be a bool") pulumi.set(__self__, "enable_node_public_ip", enable_node_public_ip) if enable_ultra_ssd and not isinstance(enable_ultra_ssd, bool): raise TypeError("Expected argument 'enable_ultra_ssd' to be a bool") pulumi.set(__self__, "enable_ultra_ssd", enable_ultra_ssd) if gpu_instance_profile and not isinstance(gpu_instance_profile, str): raise TypeError("Expected argument 'gpu_instance_profile' to be a str") pulumi.set(__self__, "gpu_instance_profile", gpu_instance_profile) if id and not isinstance(id, str): raise TypeError("Expected argument 'id' to be a str") pulumi.set(__self__, "id", id) if kubelet_config and not isinstance(kubelet_config, dict): raise TypeError("Expected argument 'kubelet_config' to be a dict") pulumi.set(__self__, "kubelet_config", kubelet_config) if kubelet_disk_type and not isinstance(kubelet_disk_type, str): raise TypeError("Expected argument 'kubelet_disk_type' to be a str") pulumi.set(__self__, "kubelet_disk_type", kubelet_disk_type) if linux_os_config and not isinstance(linux_os_config, dict): raise TypeError("Expected argument 'linux_os_config' to be a dict") pulumi.set(__self__, "linux_os_config", linux_os_config) if max_count and not isinstance(max_count, int): raise TypeError("Expected argument 'max_count' to be a int") pulumi.set(__self__, "max_count", max_count) if max_pods and not isinstance(max_pods, int): raise TypeError("Expected argument 'max_pods' to be a int") pulumi.set(__self__, "max_pods", max_pods) if min_count and not isinstance(min_count, int): raise TypeError("Expected argument 'min_count' to be a int") pulumi.set(__self__, "min_count", min_count) if mode and not isinstance(mode, str): raise TypeError("Expected argument 'mode' to be a str") pulumi.set(__self__, "mode", mode) if name and not isinstance(name, str): raise TypeError("Expected argument 'name' to be a str") pulumi.set(__self__, "name", name) if node_image_version and not isinstance(node_image_version, str): raise TypeError("Expected argument 'node_image_version' to be a str") pulumi.set(__self__, "node_image_version", node_image_version) if node_labels and not isinstance(node_labels, dict): raise TypeError("Expected argument 'node_labels' to be a dict") pulumi.set(__self__, "node_labels", node_labels) if node_public_ip_prefix_id and not isinstance(node_public_ip_prefix_id, str): raise TypeError("Expected argument 'node_public_ip_prefix_id' to be a str") pulumi.set(__self__, "node_public_ip_prefix_id", node_public_ip_prefix_id) if node_taints and not isinstance(node_taints, list): raise TypeError("Expected argument 'node_taints' to be a list") pulumi.set(__self__, "node_taints", node_taints) if orchestrator_version and not isinstance(orchestrator_version, str): raise TypeError("Expected argument 'orchestrator_version' to be a str") pulumi.set(__self__, "orchestrator_version", orchestrator_version) if os_disk_size_gb and not isinstance(os_disk_size_gb, int): raise TypeError("Expected argument 'os_disk_size_gb' to be a int") pulumi.set(__self__, "os_disk_size_gb", os_disk_size_gb) if os_disk_type and not isinstance(os_disk_type, str): raise TypeError("Expected argument 'os_disk_type' to be a str") pulumi.set(__self__, "os_disk_type", os_disk_type) if os_sku and not isinstance(os_sku, str): raise TypeError("Expected argument 'os_sku' to be a str") pulumi.set(__self__, "os_sku", os_sku) if os_type and not isinstance(os_type, str): raise TypeError("Expected argument 'os_type' to be a str") pulumi.set(__self__, "os_type", os_type) if pod_subnet_id and not isinstance(pod_subnet_id, str): raise TypeError("Expected argument 'pod_subnet_id' to be a str") pulumi.set(__self__, "pod_subnet_id", pod_subnet_id) if power_state and not isinstance(power_state, dict): raise TypeError("Expected argument 'power_state' to be a dict") pulumi.set(__self__, "power_state", power_state) if provisioning_state and not isinstance(provisioning_state, str): raise TypeError("Expected argument 'provisioning_state' to be a str") pulumi.set(__self__, "provisioning_state", provisioning_state) if proximity_placement_group_id and not isinstance(proximity_placement_group_id, str): raise TypeError("Expected argument 'proximity_placement_group_id' to be a str") pulumi.set(__self__, "proximity_placement_group_id", proximity_placement_group_id) if scale_down_mode and not isinstance(scale_down_mode, str): raise TypeError("Expected argument 'scale_down_mode' to be a str") pulumi.set(__self__, "scale_down_mode", scale_down_mode) if scale_set_eviction_policy and not isinstance(scale_set_eviction_policy, str): raise TypeError("Expected argument 'scale_set_eviction_policy' to be a str") pulumi.set(__self__, "scale_set_eviction_policy", scale_set_eviction_policy) if scale_set_priority and not isinstance(scale_set_priority, str): raise TypeError("Expected argument 'scale_set_priority' to be a str") pulumi.set(__self__, "scale_set_priority", scale_set_priority) if spot_max_price and not isinstance(spot_max_price, float): raise TypeError("Expected argument 'spot_max_price' to be a float") pulumi.set(__self__, "spot_max_price", spot_max_price) if tags and not isinstance(tags, dict): raise TypeError("Expected argument 'tags' to be a dict") pulumi.set(__self__, "tags", tags) if type and not isinstance(type, str): raise TypeError("Expected argument 'type' to be a str") pulumi.set(__self__, "type", type) if upgrade_settings and not isinstance(upgrade_settings, dict): raise TypeError("Expected argument 'upgrade_settings' to be a dict") pulumi.set(__self__, "upgrade_settings", upgrade_settings) if vm_size and not isinstance(vm_size, str): raise TypeError("Expected argument 'vm_size' to be a str") pulumi.set(__self__, "vm_size", vm_size) if vnet_subnet_id and not isinstance(vnet_subnet_id, str): raise TypeError("Expected argument 'vnet_subnet_id' to be a str") pulumi.set(__self__, "vnet_subnet_id", vnet_subnet_id) if workload_runtime and not isinstance(workload_runtime, str): raise TypeError("Expected argument 'workload_runtime' to be a str") pulumi.set(__self__, "workload_runtime", workload_runtime) @property @pulumi.getter(name="availabilityZones") def availability_zones(self) -> Optional[Sequence[str]]: """ The list of Availability zones to use for nodes. This can only be specified if the AgentPoolType property is 'VirtualMachineScaleSets'. """ return pulumi.get(self, "availability_zones") @property @pulumi.getter def count(self) -> Optional[int]: """ Number of agents (VMs) to host docker containers. Allowed values must be in the range of 0 to 1000 (inclusive) for user pools and in the range of 1 to 1000 (inclusive) for system pools. The default value is 1. """ return pulumi.get(self, "count") @property @pulumi.getter(name="creationData") def creation_data(self) -> Optional['outputs.CreationDataResponse']: """ CreationData to be used to specify the source Snapshot ID if the node pool will be created/upgraded using a snapshot. """ return pulumi.get(self, "creation_data") @property @pulumi.getter(name="enableAutoScaling") def enable_auto_scaling(self) -> Optional[bool]: """ Whether to enable auto-scaler """ return pulumi.get(self, "enable_auto_scaling") @property @pulumi.getter(name="enableEncryptionAtHost") def enable_encryption_at_host(self) -> Optional[bool]: """ This is only supported on certain VM sizes and in certain Azure regions. For more information, see: https://docs.microsoft.com/azure/aks/enable-host-encryption """ return pulumi.get(self, "enable_encryption_at_host") @property @pulumi.getter(name="enableFIPS") def enable_fips(self) -> Optional[bool]: """ See [Add a FIPS-enabled node pool](https://docs.microsoft.com/azure/aks/use-multiple-node-pools#add-a-fips-enabled-node-pool-preview) for more details. """ return pulumi.get(self, "enable_fips") @property @pulumi.getter(name="enableNodePublicIP") def enable_node_public_ip(self) -> Optional[bool]: """ Some scenarios may require nodes in a node pool to receive their own dedicated public IP addresses. A common scenario is for gaming workloads, where a console needs to make a direct connection to a cloud virtual machine to minimize hops. For more information see [assigning a public IP per node](https://docs.microsoft.com/azure/aks/use-multiple-node-pools#assign-a-public-ip-per-node-for-your-node-pools). The default is false. """ return pulumi.get(self, "enable_node_public_ip") @property @pulumi.getter(name="enableUltraSSD") def enable_ultra_ssd(self) -> Optional[bool]: """ Whether to enable UltraSSD """ return pulumi.get(self, "enable_ultra_ssd") @property @pulumi.getter(name="gpuInstanceProfile") def gpu_instance_profile(self) -> Optional[str]: """ GPUInstanceProfile to be used to specify GPU MIG instance profile for supported GPU VM SKU. """ return pulumi.get(self, "gpu_instance_profile") @property @pulumi.getter def id(self) -> str: """ Resource ID. """ return pulumi.get(self, "id") @property @pulumi.getter(name="kubeletConfig") def kubelet_config(self) -> Optional['outputs.KubeletConfigResponse']: """ The Kubelet configuration on the agent pool nodes. """ return pulumi.get(self, "kubelet_config") @property @pulumi.getter(name="kubeletDiskType") def kubelet_disk_type(self) -> Optional[str]: """ Determines the placement of emptyDir volumes, container runtime data root, and Kubelet ephemeral storage. """ return pulumi.get(self, "kubelet_disk_type") @property @pulumi.getter(name="linuxOSConfig") def linux_os_config(self) -> Optional['outputs.LinuxOSConfigResponse']: """ The OS configuration of Linux agent nodes. """ return pulumi.get(self, "linux_os_config") @property @pulumi.getter(name="maxCount") def max_count(self) -> Optional[int]: """ The maximum number of nodes for auto-scaling """ return pulumi.get(self, "max_count") @property @pulumi.getter(name="maxPods") def max_pods(self) -> Optional[int]: """ The maximum number of pods that can run on a node. """ return pulumi.get(self, "max_pods") @property @pulumi.getter(name="minCount") def min_count(self) -> Optional[int]: """ The minimum number of nodes for auto-scaling """ return pulumi.get(self, "min_count") @property @pulumi.getter def mode(self) -> Optional[str]: """ A cluster must have at least one 'System' Agent Pool at all times. For additional information on agent pool restrictions and best practices, see: https://docs.microsoft.com/azure/aks/use-system-pools """ return pulumi.get(self, "mode") @property @pulumi.getter def name(self) -> str: """ The name of the resource that is unique within a resource group. This name can be used to access the resource. """ return pulumi.get(self, "name") @property @pulumi.getter(name="nodeImageVersion") def node_image_version(self) -> str: """ The version of node image """ return pulumi.get(self, "node_image_version") @property @pulumi.getter(name="nodeLabels") def node_labels(self) -> Optional[Mapping[str, str]]: """ The node labels to be persisted across all nodes in agent pool. """ return pulumi.get(self, "node_labels") @property @pulumi.getter(name="nodePublicIPPrefixID") def node_public_ip_prefix_id(self) -> Optional[str]: """ This is of the form: /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIPPrefixName} """ return pulumi.get(self, "node_public_ip_prefix_id") @property @pulumi.getter(name="nodeTaints") def node_taints(self) -> Optional[Sequence[str]]: """ The taints added to new nodes during node pool create and scale. For example, key=value:NoSchedule. """ return pulumi.get(self, "node_taints") @property @pulumi.getter(name="orchestratorVersion") def orchestrator_version(self) -> Optional[str]: """ As a best practice, you should upgrade all node pools in an AKS cluster to the same Kubernetes version. The node pool version must have the same major version as the control plane. The node pool minor version must be within two minor versions of the control plane version. The node pool version cannot be greater than the control plane version. For more information see [upgrading a node pool](https://docs.microsoft.com/azure/aks/use-multiple-node-pools#upgrade-a-node-pool). """ return pulumi.get(self, "orchestrator_version") @property @pulumi.getter(name="osDiskSizeGB") def os_disk_size_gb(self) -> Optional[int]: """ OS Disk Size in GB to be used to specify the disk size for every machine in the master/agent pool. If you specify 0, it will apply the default osDisk size according to the vmSize specified. """ return pulumi.get(self, "os_disk_size_gb") @property @pulumi.getter(name="osDiskType") def os_disk_type(self) -> Optional[str]: """ The default is 'Ephemeral' if the VM supports it and has a cache disk larger than the requested OSDiskSizeGB. Otherwise, defaults to 'Managed'. May not be changed after creation. For more information see [Ephemeral OS](https://docs.microsoft.com/azure/aks/cluster-configuration#ephemeral-os). """ return pulumi.get(self, "os_disk_type") @property @pulumi.getter(name="osSKU") def os_sku(self) -> Optional[str]: """ Specifies an OS SKU. This value must not be specified if OSType is Windows. """ return pulumi.get(self, "os_sku") @property @pulumi.getter(name="osType") def os_type(self) -> Optional[str]: """ The operating system type. The default is Linux. """ return pulumi.get(self, "os_type") @property @pulumi.getter(name="podSubnetID") def pod_subnet_id(self) -> Optional[str]: """ If omitted, pod IPs are statically assigned on the node subnet (see vnetSubnetID for more details). This is of the form: /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName} """ return pulumi.get(self, "pod_subnet_id") @property @pulumi.getter(name="powerState") def power_state(self) -> Optional['outputs.PowerStateResponse']: """ When an Agent Pool is first created it is initially Running. The Agent Pool can be stopped by setting this field to Stopped. A stopped Agent Pool stops all of its VMs and does not accrue billing charges. An Agent Pool can only be stopped if it is Running and provisioning state is Succeeded """ return pulumi.get(self, "power_state") @property @pulumi.getter(name="provisioningState") def provisioning_state(self) -> str: """ The current deployment or provisioning state. """ return pulumi.get(self, "provisioning_state") @property @pulumi.getter(name="proximityPlacementGroupID") def proximity_placement_group_id(self) -> Optional[str]: """ The ID for Proximity Placement Group. """ return pulumi.get(self, "proximity_placement_group_id") @property @pulumi.getter(name="scaleDownMode") def scale_down_mode(self) -> Optional[str]: """ This also effects the cluster autoscaler behavior. If not specified, it defaults to Delete. """ return pulumi.get(self, "scale_down_mode") @property @pulumi.getter(name="scaleSetEvictionPolicy") def scale_set_eviction_policy(self) -> Optional[str]: """ This cannot be specified unless the scaleSetPriority is 'Spot'. If not specified, the default is 'Delete'. """ return pulumi.get(self, "scale_set_eviction_policy") @property @pulumi.getter(name="scaleSetPriority") def scale_set_priority(self) -> Optional[str]: """ The Virtual Machine Scale Set priority. If not specified, the default is 'Regular'. """ return pulumi.get(self, "scale_set_priority") @property @pulumi.getter(name="spotMaxPrice") def spot_max_price(self) -> Optional[float]: """ Possible values are any decimal value greater than zero or -1 which indicates the willingness to pay any on-demand price. For more details on spot pricing, see [spot VMs pricing](https://docs.microsoft.com/azure/virtual-machines/spot-vms#pricing) """ return pulumi.get(self, "spot_max_price") @property @pulumi.getter def tags(self) -> Optional[Mapping[str, str]]: """ The tags to be persisted on the agent pool virtual machine scale set. """ return pulumi.get(self, "tags") @property @pulumi.getter def type(self) -> str: """ The type of Agent Pool. """ return pulumi.get(self, "type") @property @pulumi.getter(name="upgradeSettings") def upgrade_settings(self) -> Optional['outputs.AgentPoolUpgradeSettingsResponse']: """ Settings for upgrading the agentpool """ return pulumi.get(self, "upgrade_settings") @property @pulumi.getter(name="vmSize") def vm_size(self) -> Optional[str]: """ VM size availability varies by region. If a node contains insufficient compute resources (memory, cpu, etc) pods might fail to run correctly. For more details on restricted VM sizes, see: https://docs.microsoft.com/azure/aks/quotas-skus-regions """ return pulumi.get(self, "vm_size") @property @pulumi.getter(name="vnetSubnetID") def vnet_subnet_id(self) -> Optional[str]: """ If this is not specified, a VNET and subnet will be generated and used. If no podSubnetID is specified, this applies to nodes and pods, otherwise it applies to just nodes. This is of the form: /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName} """ return pulumi.get(self, "vnet_subnet_id") @property @pulumi.getter(name="workloadRuntime") def workload_runtime(self) -> Optional[str]: """ Determines the type of workload a node can run. """ return pulumi.get(self, "workload_runtime") class AwaitableGetAgentPoolResult(GetAgentPoolResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetAgentPoolResult( availability_zones=self.availability_zones, count=self.count, creation_data=self.creation_data, enable_auto_scaling=self.enable_auto_scaling, enable_encryption_at_host=self.enable_encryption_at_host, enable_fips=self.enable_fips, enable_node_public_ip=self.enable_node_public_ip, enable_ultra_ssd=self.enable_ultra_ssd, gpu_instance_profile=self.gpu_instance_profile, id=self.id, kubelet_config=self.kubelet_config, kubelet_disk_type=self.kubelet_disk_type, linux_os_config=self.linux_os_config, max_count=self.max_count, max_pods=self.max_pods, min_count=self.min_count, mode=self.mode, name=self.name, node_image_version=self.node_image_version, node_labels=self.node_labels, node_public_ip_prefix_id=self.node_public_ip_prefix_id, node_taints=self.node_taints, orchestrator_version=self.orchestrator_version, os_disk_size_gb=self.os_disk_size_gb, os_disk_type=self.os_disk_type, os_sku=self.os_sku, os_type=self.os_type, pod_subnet_id=self.pod_subnet_id, power_state=self.power_state, provisioning_state=self.provisioning_state, proximity_placement_group_id=self.proximity_placement_group_id, scale_down_mode=self.scale_down_mode, scale_set_eviction_policy=self.scale_set_eviction_policy, scale_set_priority=self.scale_set_priority, spot_max_price=self.spot_max_price, tags=self.tags, type=self.type, upgrade_settings=self.upgrade_settings, vm_size=self.vm_size, vnet_subnet_id=self.vnet_subnet_id, workload_runtime=self.workload_runtime) def get_agent_pool(agent_pool_name: Optional[str] = None, resource_group_name: Optional[str] = None, resource_name: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAgentPoolResult: """ Agent Pool. :param str agent_pool_name: The name of the agent pool. :param str resource_group_name: The name of the resource group. :param str resource_name: The name of the managed cluster resource. """ __args__ = dict() __args__['agentPoolName'] = agent_pool_name __args__['resourceGroupName'] = resource_group_name __args__['resourceName'] = resource_name if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('azure-native:containerservice/v20210901:getAgentPool', __args__, opts=opts, typ=GetAgentPoolResult).value return AwaitableGetAgentPoolResult( availability_zones=__ret__.availability_zones, count=__ret__.count, creation_data=__ret__.creation_data, enable_auto_scaling=__ret__.enable_auto_scaling, enable_encryption_at_host=__ret__.enable_encryption_at_host, enable_fips=__ret__.enable_fips, enable_node_public_ip=__ret__.enable_node_public_ip, enable_ultra_ssd=__ret__.enable_ultra_ssd, gpu_instance_profile=__ret__.gpu_instance_profile, id=__ret__.id, kubelet_config=__ret__.kubelet_config, kubelet_disk_type=__ret__.kubelet_disk_type, linux_os_config=__ret__.linux_os_config, max_count=__ret__.max_count, max_pods=__ret__.max_pods, min_count=__ret__.min_count, mode=__ret__.mode, name=__ret__.name, node_image_version=__ret__.node_image_version, node_labels=__ret__.node_labels, node_public_ip_prefix_id=__ret__.node_public_ip_prefix_id, node_taints=__ret__.node_taints, orchestrator_version=__ret__.orchestrator_version, os_disk_size_gb=__ret__.os_disk_size_gb, os_disk_type=__ret__.os_disk_type, os_sku=__ret__.os_sku, os_type=__ret__.os_type, pod_subnet_id=__ret__.pod_subnet_id, power_state=__ret__.power_state, provisioning_state=__ret__.provisioning_state, proximity_placement_group_id=__ret__.proximity_placement_group_id, scale_down_mode=__ret__.scale_down_mode, scale_set_eviction_policy=__ret__.scale_set_eviction_policy, scale_set_priority=__ret__.scale_set_priority, spot_max_price=__ret__.spot_max_price, tags=__ret__.tags, type=__ret__.type, upgrade_settings=__ret__.upgrade_settings, vm_size=__ret__.vm_size, vnet_subnet_id=__ret__.vnet_subnet_id, workload_runtime=__ret__.workload_runtime) @_utilities.lift_output_func(get_agent_pool) def get_agent_pool_output(agent_pool_name: Optional[pulumi.Input[str]] = None, resource_group_name: Optional[pulumi.Input[str]] = None, resource_name: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAgentPoolResult]: """ Agent Pool. :param str agent_pool_name: The name of the agent pool. :param str resource_group_name: The name of the resource group. :param str resource_name: The name of the managed cluster resource. """ ...
3a9bd176b6447bea26b249ad12815762e165a913
c80314871502377180b1d496d0d4e7dc9e8cdba8
/exercise/python_1040_practice_algo_recursion_hanoi.py
abbc470f6e534414af43740e26a3719053d2293e
[]
no_license
tomboxfan/PythonExample
996896bcbc0bf83fbca7d28bcb207dca35875f6b
8b071314b4dc4c3e3acccb835405c44630a15722
refs/heads/master
2023-02-11T13:55:26.591124
2021-01-09T03:20:46
2021-01-09T03:20:46
275,275,138
0
0
null
null
null
null
UTF-8
Python
false
false
1,672
py
''' Requirement: The Tower of Hanoi puzzle was invented by the French mathematician Edouard Lucas in 1883. He was inspired by a legend that tells of a Hindu temple where the puzzle was presented to young priests. At the beginning of time, the priests were given three poles and a stack of 64 gold disks, each disk a little smaller than the one beneath it. Their assignment was to transfer all 64 disks from one of the three poles to another, with two important constraints. 1) They could only move one disk at a time. 2) They could never place a larger disk on top of a smaller one. The priests worked very efficiently, day and night, moving one disk every second. When they finished their work, the legend said, the temple would crumble into dust and the world would vanish. Although the legend is interesting, you need not worry about the world ending any time soon. The number of moves required to correctly move a tower of 64 disks is 2^64−1=18,446,744,073,709,551,615. At a rate of one move per second, that is 584,942,417,355 years! Clearly there is more to this puzzle than meets the eye. ''' def tower_of_hanoi(n, from_rod, to_rod, help_rod): if n == 1: print(f"Move disk {n} from {from_rod} to {to_rod}" ) else: # Step 1) Move n-1 plates from 'from_rod' to 'help_rod', via 'to_rod'. tower_of_hanoi(n-1, from_rod, help_rod, to_rod) # Step 2) Move plate n from 'from_rod' to 'to_rod' print(f"Move disk {n} from {from_rod} to {to_rod}") # Step 3) Move n-1 plates from 'help_rod' to 'to_rod', via 'from_rod'. tower_of_hanoi(n-1, help_rod, to_rod, from_rod) tower_of_hanoi(4, 'A', 'C', 'B')
e6f61f08c4027bfec92381e04e2087c07efa6800
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p03592/s369623086.py
0473e1e86b92c11c4dd5abc861aa31996a132b45
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
926
py
import sys, re from collections import deque, defaultdict, Counter from math import ceil, sqrt, hypot, factorial, pi, sin, cos, radians from itertools import accumulate, permutations, combinations, product, groupby from operator import itemgetter, mul from copy import deepcopy from string import ascii_lowercase, ascii_uppercase, digits from bisect import bisect, bisect_left from fractions import gcd from heapq import heappush, heappop from functools import reduce def input(): return sys.stdin.readline().strip() def INT(): return int(input()) def MAP(): return map(int, input().split()) def LIST(): return list(map(int, input().split())) def ZIP(n): return zip(*(MAP() for _ in range(n))) sys.setrecursionlimit(10 ** 9) INF = float('inf') mod = 10 ** 9 + 7 H, W, K = MAP() for x in range(H+1): y = (K-x*W)/(H-2*x) if H !=2*x else (H*W-K)/H if y.is_integer() and 0 <= y <= W: print("Yes") break else: print("No")
5b17863999522ec2777bdd7083007c20f69cbd08
df7f13ec34591fe1ce2d9aeebd5fd183e012711a
/hata/discord/channel/channel_metadata/tests/test__ChannelMetadataGuildThreadAnnouncements__utility.py
79cd654d880a07831297665da13916f7b7c4630c
[ "LicenseRef-scancode-warranty-disclaimer" ]
permissive
HuyaneMatsu/hata
63e2f6a2d7a7539fd8f18498852d9d3fe5c41d2e
53f24fdb38459dc5a4fd04f11bdbfee8295b76a4
refs/heads/master
2023-08-20T15:58:09.343044
2023-08-20T13:09:03
2023-08-20T13:09:03
163,677,173
3
3
Apache-2.0
2019-12-18T03:46:12
2018-12-31T14:59:47
Python
UTF-8
Python
false
false
7,445
py
from datetime import datetime as DateTime import vampytest from ..guild_thread_base import ChannelMetadataGuildThreadBase from .test__ChannelMetadataGuildThreadBase__constructor import _assert_fields_set def test__ChannelMetadataGuildThreadBase__copy(): """ Tests whether ``ChannelMetadataGuildThreadBase.copy` works as intended. """ name = 'alice' parent_id = 202304120047 created_at = DateTime(2016, 4, 4) archived = False archived_at = DateTime(2017, 4, 4) auto_archive_after = 3600 open_ = True owner_id = 202304120048 slowmode = 30 channel_metadata = ChannelMetadataGuildThreadBase( name = name, parent_id = parent_id, created_at = created_at, archived = archived, archived_at = archived_at, auto_archive_after = auto_archive_after, open = open_, owner_id = owner_id, slowmode = slowmode, ) copy = channel_metadata.copy() _assert_fields_set(copy) vampytest.assert_is_not(copy, channel_metadata) vampytest.assert_eq(copy, channel_metadata) def test__ChannelMetadataGuildThreadBase__copy_with__0(): """ Tests whether ``ChannelMetadataGuildThreadBase.copy_with` works as intended. Case: No fields. """ name = 'alice' parent_id = 202304120049 created_at = DateTime(2016, 4, 4) archived = False archived_at = DateTime(2017, 4, 4) auto_archive_after = 3600 open_ = True owner_id = 202304120050 slowmode = 30 channel_metadata = ChannelMetadataGuildThreadBase( name = name, parent_id = parent_id, created_at = created_at, archived = archived, archived_at = archived_at, auto_archive_after = auto_archive_after, open = open_, owner_id = owner_id, slowmode = slowmode, ) copy = channel_metadata.copy_with() _assert_fields_set(copy) vampytest.assert_is_not(copy, channel_metadata) vampytest.assert_eq(copy, channel_metadata) def test__ChannelMetadataGuildThreadBase__copy_with__1(): """ Tests whether ``ChannelMetadataGuildThreadBase.copy_with` works as intended. Case: All fields. """ old_name = 'alice' old_parent_id = 202304120051 old_created_at = DateTime(2016, 4, 4) old_archived = False old_archived_at = DateTime(2017, 4, 4) old_auto_archive_after = 3600 old_open = True old_owner_id = 202304120052 old_slowmode = 30 new_name = 'emotion' new_parent_id = 202304120053 new_created_at = DateTime(2016, 4, 5) new_archived = True new_archived_at = DateTime(2017, 4, 5) new_auto_archive_after = 604800 new_open = False new_owner_id = 202304120054 new_slowmode = 31 channel_metadata = ChannelMetadataGuildThreadBase( name = old_name, parent_id = old_parent_id, created_at = old_created_at, archived = old_archived, archived_at = old_archived_at, auto_archive_after = old_auto_archive_after, open = old_open, owner_id = old_owner_id, slowmode = old_slowmode, ) copy = channel_metadata.copy_with( name = new_name, parent_id = new_parent_id, created_at = new_created_at, archived = new_archived, archived_at = new_archived_at, auto_archive_after = new_auto_archive_after, open = new_open, owner_id = new_owner_id, slowmode = new_slowmode, ) _assert_fields_set(copy) vampytest.assert_is_not(copy, channel_metadata) vampytest.assert_eq(copy.name, new_name) vampytest.assert_eq(copy.parent_id, new_parent_id) vampytest.assert_eq(copy._created_at, new_created_at) vampytest.assert_eq(copy.archived, new_archived) vampytest.assert_eq(copy.archived_at, new_archived_at) vampytest.assert_eq(copy.auto_archive_after, new_auto_archive_after) vampytest.assert_eq(copy.open, new_open) vampytest.assert_eq(copy.owner_id, new_owner_id) vampytest.assert_eq(copy.slowmode, new_slowmode) def test__ChannelMetadataGuildThreadBase__copy_with_keyword_parameters__0(): """ Tests whether ``ChannelMetadataGuildThreadBase.copy_with_keyword_parameters` works as intended. Case: No fields. """ name = 'alice' parent_id = 202304120055 created_at = DateTime(2016, 4, 4) archived = False archived_at = DateTime(2017, 4, 4) auto_archive_after = 3600 open_ = True owner_id = 202304120056 slowmode = 30 channel_metadata = ChannelMetadataGuildThreadBase( name = name, parent_id = parent_id, created_at = created_at, archived = archived, archived_at = archived_at, auto_archive_after = auto_archive_after, open = open_, owner_id = owner_id, slowmode = slowmode, ) keyword_parameters = {} copy = channel_metadata.copy_with_keyword_parameters(keyword_parameters) _assert_fields_set(copy) vampytest.assert_is_not(copy, channel_metadata) vampytest.assert_eq(keyword_parameters, {}) vampytest.assert_eq(copy, channel_metadata) def test__ChannelMetadataGuildThreadBase__copy_with_keyword_parameters__1(): """ Tests whether ``ChannelMetadataGuildThreadBase.copy_with_keyword_parameters` works as intended. Case: All fields. """ old_name = 'alice' old_parent_id = 202304120057 old_created_at = DateTime(2016, 4, 4) old_archived = False old_archived_at = DateTime(2017, 4, 4) old_auto_archive_after = 3600 old_open = True old_owner_id = 202304120058 old_slowmode = 30 new_name = 'emotion' new_parent_id = 202304120059 new_created_at = DateTime(2016, 4, 5) new_archived = True new_archived_at = DateTime(2017, 4, 5) new_auto_archive_after = 604800 new_open = False new_owner_id = 202304120060 new_slowmode = 31 channel_metadata = ChannelMetadataGuildThreadBase( name = old_name, parent_id = old_parent_id, created_at = old_created_at, archived = old_archived, archived_at = old_archived_at, auto_archive_after = old_auto_archive_after, open = old_open, owner_id = old_owner_id, slowmode = old_slowmode, ) keyword_parameters = { 'name': new_name, 'parent_id': new_parent_id, 'created_at': new_created_at, 'archived': new_archived, 'archived_at': new_archived_at, 'auto_archive_after': new_auto_archive_after, 'open': new_open, 'owner_id': new_owner_id, 'slowmode': new_slowmode, } copy = channel_metadata.copy_with_keyword_parameters(keyword_parameters) _assert_fields_set(copy) vampytest.assert_is_not(copy, channel_metadata) vampytest.assert_eq(keyword_parameters, {}) vampytest.assert_eq(copy.name, new_name) vampytest.assert_eq(copy.parent_id, new_parent_id) vampytest.assert_eq(copy._created_at, new_created_at) vampytest.assert_eq(copy.archived, new_archived) vampytest.assert_eq(copy.archived_at, new_archived_at) vampytest.assert_eq(copy.auto_archive_after, new_auto_archive_after) vampytest.assert_eq(copy.open, new_open) vampytest.assert_eq(copy.owner_id, new_owner_id) vampytest.assert_eq(copy.slowmode, new_slowmode)
2f716cb7b50e626cfc3fb1549ff0e4f0ef60f3e3
eaaecada4c78c899bfdb6a83aaf66502a7d4bc4c
/data_augmentation/eda/image/task.py
b35e6ade3e8418844f9ba5b2dd23402bc1d86d19
[ "MIT" ]
permissive
simran-arora/emmental-tutorials
72552d6bcb3311e011f99fa6d164fa619c913283
249a82a57be58e960408a45e2e0daa72980d210a
refs/heads/master
2022-12-01T20:12:55.613955
2020-08-13T08:16:12
2020-08-13T08:16:12
286,825,852
0
0
MIT
2020-08-11T19:01:59
2020-08-11T19:01:58
null
UTF-8
Python
false
false
2,621
py
import logging from functools import partial import numpy as np import torch import torch.nn.functional as F from emmental.scorer import Scorer from emmental.task import EmmentalTask from torch import nn from eda.image.config import TASK_INPUT_SIZE, TASK_METRIC, TASK_NUM_CLASS from eda.image.models import ALL_MODELS from eda.image.modules.soft_cross_entropy_loss import SoftCrossEntropyLoss logger = logging.getLogger(__name__) SCE = SoftCrossEntropyLoss(reduction="none") def sce_loss(module_name, intermediate_output_dict, Y, active): if len(Y.size()) == 1: label = intermediate_output_dict[module_name][0].new_zeros( intermediate_output_dict[module_name][0].size() ) label.scatter_(1, Y.view(Y.size()[0], 1), 1.0) else: label = Y return SCE(intermediate_output_dict[module_name][0][active], label[active]) def output_classification(module_name, immediate_output_dict): return F.softmax(immediate_output_dict[module_name][0], dim=1) def create_task(args): task_name = args.task n_class = TASK_NUM_CLASS[args.task] if args.model in ["wide_resnet"]: feature_extractor = ALL_MODELS[args.model]( args.wide_resnet_depth, args.wide_resnet_width, args.wide_resnet_dropout, n_class, has_fc=False, ) n_hidden_dim = feature_extractor( torch.randn(TASK_INPUT_SIZE[args.task]) ).size()[-1] elif args.model == "mlp": n_hidden_dim = args.mlp_hidden_dim input_dim = np.prod(TASK_INPUT_SIZE[args.task]) feature_extractor = ALL_MODELS[args.model]( input_dim, n_hidden_dim, n_class, has_fc=False ) else: raise ValueError(f"Invalid model {args.model}") loss = sce_loss output = output_classification logger.info(f"Built model: {feature_extractor}") return EmmentalTask( name=args.task, module_pool=nn.ModuleDict( { "feature": feature_extractor, f"{task_name}_pred_head": nn.Linear(n_hidden_dim, n_class), } ), task_flow=[ {"name": "feature", "module": "feature", "inputs": [("_input_", "image")]}, { "name": f"{task_name}_pred_head", "module": f"{task_name}_pred_head", "inputs": [("feature", 0)], }, ], loss_func=partial(loss, f"{task_name}_pred_head"), output_func=partial(output, f"{task_name}_pred_head"), scorer=Scorer(metrics=TASK_METRIC[task_name]), )
dd93ddbc2c6ada0eec838318e43428eb9841c4f1
e19ddf30bf87a4efdc449fa49b9621ca1460a515
/castle/theme/interfaces.py
5d42e09dbd195823850c02a445e872c1457cc0c0
[]
no_license
castlecms/castle.theme
a220d25b1cf40fa47fb4af9be3cfa8d6a1cc75c9
4a36537ddc4db59ea2902a71e544f5a319a5a15c
refs/heads/master
2022-11-02T10:53:04.758867
2020-02-21T21:28:10
2020-02-21T21:28:10
72,666,053
5
3
null
2022-10-05T11:20:53
2016-11-02T17:47:59
CSS
UTF-8
Python
false
false
209
py
from zope.interface import Interface class ICustomTheme(Interface): """Marker interface that defines a Zope 3 browser layer. """ class IUtils(Interface): def get_folder_section(): pass
512d012e85f13dd74f93a158a73f9939eceac984
2a33588917f591d0250671d24bb9b5b1d8493d70
/wechat/base_data.py
12ffb5e7043492746dea36b673fa74d57d9b313a
[]
no_license
coblan/eface
283365c04f239e68a5d1476dcb7e1605bd9b9aa4
5f645e541875daf3365ff4542129d27a1f7957a9
refs/heads/master
2023-08-08T03:20:50.371189
2023-08-06T05:04:25
2023-08-06T05:04:25
163,195,597
0
0
null
null
null
null
UTF-8
Python
false
false
17
py
wechat_page_dc={}
911fe05a24b4aea8350196fde947b9a287d1e07d
638af6b8c580eeae23fc1034882c4b514195137a
/Packages/cmor/Test/test_python_common.py
c4566b2f070f86cc7058dd832773ef15518464f7
[]
no_license
doutriaux1/uvcdat
83684a86b514b8cac4d8900a503fc13d557fc4d2
37e9635f988696c346b4c3cdb49144d1e21dab5d
refs/heads/master
2021-01-17T07:57:22.897539
2015-02-02T22:52:12
2015-02-02T22:52:12
14,878,320
1
0
null
2015-02-19T20:54:25
2013-12-02T23:44:46
C
UTF-8
Python
false
false
4,668
py
import numpy # this test tries to mimic ippc_test_code.c but from python # This one is using direct C calls from python not the python around it ntimes=2 lon=4 lat=3 lev=5 lev2=17 varin3d=["CLOUD", "U", "T" ]; # /* Units appropriate to my data */ units3d=["%", "m s-1", "K"]; # /* Corresponding IPCC Table A1c entry (variable name) */ entry3d=["cl","ua","ta"]; # /* My variable names for IPCC Table A1a fields */ varin2d=[ "LATENT","TSURF","SOIL_WET","PSURF" ]; # /* Units appropriate to my data */ units2d=[ "W m-2","K","kg m-2","Pa"]; positive2d=["down"," ", " ", " "]; # /* Corresponding IPCC Table A1a entry (variable name) */ entry2d=["hfls", "tas","mrsos","ps"]; def gen_irreg_grid(lon,lat): lon0 = 280. lat0=0.; delta_lon = 10.; delta_lat = 10.; y = numpy.arange(lat) x = numpy.arange(lon) lon_coords = numpy.zeros((lat,lon)) lat_coords = numpy.zeros((lat,lon)) lon_vertices = numpy.zeros((lat,lon,4)) lat_vertices = numpy.zeros((lat,lon,4)) for j in range(lat): # really porr coding i know for i in range(lon): # getting worse i know lon_coords[j,i] = lon0+delta_lon*(j+1+i); lat_coords[j,i] = lat0+delta_lat*(j+1-i); lon_vertices[j,i,0] = lon_coords[j,i]-delta_lon; lon_vertices[j,i,1] = lon_coords[j,i]; lon_vertices[j,i,2] = lon_coords[j,i]+delta_lon; lon_vertices[j,i,3] = lon_coords[j,i]; ## !!$ /* vertices lat */ lat_vertices[j,i,0] = lat_coords[j,i]; lat_vertices[j,i,1] = lat_coords[j,i]-delta_lat; lat_vertices[j,i,2] = lat_coords[j,i]; lat_vertices[j,i,3] = lat_coords[j,i]+delta_lat; return x,y,lon_coords,lat_coords,lon_vertices,lat_vertices # read_data funcs are highly unoptimzed.... def read_coords(lon,lat,lev): alons = numpy.zeros(lon) bnds_lon = numpy.zeros(2*lon) alats = numpy.zeros(lat) bnds_lat = numpy.zeros(2*lat) plevs = numpy.zeros(lev,dtype='i') for i in range(lon): alons[i] = i*360./lon bnds_lon[2*i] = (i - 0.5)*360./lon bnds_lon[2*i+1] = (i + 0.5)*360./lon for i in range(lat): alats[i] = (lat-i)*10 bnds_lat[2*i] = (lat-i)*10 + 5. bnds_lat[2*i+1] = (lat-i)*10 - 5. plevs = numpy.array([100000., 92500., 85000., 70000., 60000., 50000., 40000., 30000., 25000., 20000., 15000., 10000., 7000., 5000., 3000., 2000., 1000.]) return alats, alons, plevs, bnds_lat, bnds_lon def read_time(it): time = [0] time_bnds=[0,0] time[0] = (it-0.5)*30.; time_bnds[0] = (it-1)*30.; time_bnds[1] = it*30.; time[0]=it; time_bnds[0] = it; time_bnds[1] = it+1; return time[0],numpy.array(time_bnds) def read_3d_input_files(it, varname, n0, n1, n2, ntimes): if varname=="CLOUD": factor = 0.1; offset = -50.; elif varname=="U": factor = 1. offset = 100. elif varname=="T": factor = 0.5; offset = -150.; field = numpy.zeros((n2,n1,n0),dtype='d') for k in range(n2): for j in range(n1): for i in range(n0): field[k,j,i] = (k*64 + j*16 + i*4 + it)*factor - offset; return field def read_2d_input_files(it, varname, n0, n1): if varname=="LATENT": factor = 1.25; offset = 100.; elif varname == "TSURF": factor = 2.0; offset = -230.; elif varname=="SOIL_WET": factor = 10.; offset = 0.; elif varname == "PSURF": factor = 1.; offset = -9.7e2; field = numpy.zeros((n0,n1),dtype='d') for j in range(n0): for i in range(n1): tmp = (j*16. + i*4. + it)*factor - offset; field[j,i] = tmp; return field alats, alons, plevs, bnds_lat, bnds_lon = read_coords(lon,lat,lev); Time = numpy.zeros(ntimes,dtype='d') bnds_time = numpy.zeros(ntimes*2,dtype='d') Time[0],bnds_time[0:2] = read_time(0) Time[1],bnds_time[2:4] = read_time(1) zlevs = numpy.zeros(5,dtype='d') zlevs[0]=0.1999999999999999999; zlevs[1]= 0.3; zlevs[2]=0.55; zlevs[3]= 0.7; zlevs[4] = 0.99999999; zlev_bnds = numpy.zeros(6,dtype='d') zlev_bnds[0] = 0. zlev_bnds[1] = 0.2 zlev_bnds[2] = 0.42 zlev_bnds[3] = 0.62 zlev_bnds[4] = 0.8 zlev_bnds[5] = 1. regions = numpy.array(["atlantic_arctic_ocean", "indian_pacific_ocean", "pacific_ocean", "global_ocean", "sf_bay"]) a_coeff=numpy.array([ 0.1, 0.2, 0.3, 0.22, 0.1 ]) b_coeff=numpy.array([ 0.0, 0.1, 0.2, 0.5, 0.8 ]) p0= numpy.array([1.e5,]) a_coeff_bnds=numpy.array([0.,.15, .25, .25, .16, 0.]) b_coeff_bnds=numpy.array([0.,.05, .15, .35, .65, 1.])
7ca0c3b8af9cf6e87bd1d80617d64194a8692408
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
/cases/synthetic/sieve-big-1313.py
e0f5e294b989debaae1c8f5df4c0084f1ad3c62a
[]
no_license
Virtlink/ccbench-chocopy
c3f7f6af6349aff6503196f727ef89f210a1eac8
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
refs/heads/main
2023-04-07T15:07:12.464038
2022-02-03T15:42:39
2022-02-03T15:42:39
451,969,776
0
0
null
null
null
null
UTF-8
Python
false
false
31,751
py
# A resizable list of integers class Vector(object): items: [int] = None size: int = 0 def __init__(self:"Vector"): self.items = [0] # Returns current capacity def capacity(self:"Vector") -> int: return len(self.items) # Increases capacity of vector by one element def increase_capacity(self:"Vector") -> int: self.items = self.items + [0] return self.capacity() # Appends one item to end of vector def append(self:"Vector", item: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends many items to end of vector def append_all(self:"Vector", new_items: [int]) -> object: item:int = 0 for item in new_items: self.append(item) # Removes an item from the middle of vector def remove_at(self:"Vector", idx: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Retrieves an item at a given index def get(self:"Vector", idx: int) -> int: return self.items[idx] # Retrieves the current size of the vector def length(self:"Vector") -> int: return self.size # A resizable list of integers class Vector2(object): items: [int] = None items2: [int] = None size: int = 0 size2: int = 0 def __init__(self:"Vector2"): self.items = [0] # Returns current capacity def capacity(self:"Vector2") -> int: return len(self.items) # Returns current capacity def capacity2(self:"Vector2") -> int: return len(self.items) # Increases capacity of vector by one element def increase_capacity(self:"Vector2") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity2(self:"Vector2") -> int: self.items = self.items + [0] return self.capacity() # Appends one item to end of vector def append(self:"Vector2", item: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append2(self:"Vector2", item: int, item2: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends many items to end of vector def append_all(self:"Vector2", new_items: [int]) -> object: item:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object: item:int = 0 item2:int = 0 for item in new_items: self.append(item) # Removes an item from the middle of vector def remove_at(self:"Vector2", idx: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at2(self:"Vector2", idx: int, idx2: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Retrieves an item at a given index def get(self:"Vector2", idx: int) -> int: return self.items[idx] # Retrieves an item at a given index def get2(self:"Vector2", idx: int, idx2: int) -> int: return self.items[idx] # Retrieves the current size of the vector def length(self:"Vector2") -> int: return self.size # Retrieves the current size of the vector def length2(self:"Vector2") -> int: return self.size # A resizable list of integers class Vector3(object): items: [int] = None items2: [int] = None items3: [int] = None size: int = 0 size2: int = 0 size3: int = 0 def __init__(self:"$ID"): self.items = [0] # Returns current capacity def capacity(self:"Vector3") -> int: return len(self.items) # Returns current capacity def capacity2(self:"Vector3") -> int: return len(self.items) # Returns current capacity def capacity3(self:"Vector3") -> int: return len(self.items) # Increases capacity of vector by one element def increase_capacity(self:"Vector3") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity2(self:"Vector3") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity3(self:"Vector3") -> int: self.items = self.items + [0] return self.capacity() # Appends one item to end of vector def append(self:"Vector3", item: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append2(self:"Vector3", item: int, item2: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append3(self:"Vector3", item: int, item2: int, item3: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends many items to end of vector def append_all(self:"Vector3", new_items: [int]) -> object: item:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object: item:int = 0 item2:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 for item in new_items: self.append(item) # Removes an item from the middle of vector def remove_at(self:"Vector3", idx: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at2(self:"Vector3", idx: int, idx2: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Retrieves an item at a given index def get(self:"Vector3", idx: int) -> int: return self.items[idx] # Retrieves an item at a given index def get2(self:"Vector3", idx: int, idx2: int) -> int: return self.items[idx] # Retrieves an item at a given index def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int: return self.items[idx] # Retrieves the current size of the vector def length(self:"Vector3") -> int: return self.size # Retrieves the current size of the vector def length2(self:"Vector3") -> int: return self.size # Retrieves the current size of the vector def length3(self:"Vector3") -> int: return self.size # A resizable list of integers class Vector4(object): items: [int] = None items2: [int] = None items3: [int] = None items4: [int] = None size: int = 0 size2: int = 0 size3: int = 0 size4: int = 0 def __init__(self:"Vector4"): self.items = [0] # Returns current capacity def capacity(self:"Vector4") -> int: return len(self.items) # Returns current capacity def capacity2(self:"Vector4") -> int: return len(self.items) # Returns current capacity def capacity3(self:"Vector4") -> int: return len(self.items) # Returns current capacity def capacity4(self:"Vector4") -> int: return len(self.items) # Increases capacity of vector by one element def increase_capacity(self:"Vector4") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity2(self:"Vector4") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity3(self:"Vector4") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity4(self:"Vector4") -> int: self.items = self.items + [0] return self.capacity() # Appends one item to end of vector def append(self:"Vector4", item: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append2(self:"Vector4", item: int, item2: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append3(self:"Vector4", item: int, item2: int, item3: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends many items to end of vector def append_all(self:"Vector4", new_items: [int]) -> object: item:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object: item:int = 0 item2:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 item4:int = 0 for item in new_items: self.append(item) # Removes an item from the middle of vector def remove_at(self:"Vector4", idx: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at2(self:"Vector4", idx: int, idx2: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Retrieves an item at a given index def get(self:"Vector4", idx: int) -> int: return self.items[idx] # Retrieves an item at a given index def get2(self:"Vector4", idx: int, idx2: int) -> int: return self.items[idx] # Retrieves an item at a given index def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int: return self.items[idx] # Retrieves an item at a given index def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int: return self.items[idx] # Retrieves the current size of the vector def length(self:"Vector4") -> int: return self.size # Retrieves the current size of the vector def length2(self:"Vector4") -> int: return self.size # Retrieves the current size of the vector def length3(self:"Vector4") -> int: return self.size # Retrieves the current size of the vector def length4(self:"Vector4") -> int: return self.size # A resizable list of integers class Vector5(object): items: [int] = None items2: [int] = None items3: [int] = None items4: [int] = None items5: [int] = None size: int = 0 size2: int = 0 size3: int = 0 size4: int = 0 size5: int = 0 def __init__(self:"Vector5"): self.items = [0] # Returns current capacity def capacity(self:"Vector5") -> int: return len(self.items) # Returns current capacity def capacity2(self:"Vector5") -> int: return len(self.items) # Returns current capacity def capacity3(self:"Vector5") -> int: return len(self.items) # Returns current capacity def capacity4(self:"Vector5") -> int: return len(self.items) # Returns current capacity def capacity5(self:"Vector5") -> int: return len(self.items) # Increases capacity of vector by one element def increase_capacity(self:"Vector5") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity2(self:"Vector5") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity3(self:"Vector5") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity4(self:"Vector5") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity5(self:"Vector5") -> int: self.items = self.items + [0] return self.capacity() # Appends one item to end of vector def append(self:"Vector5", item: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append2(self:"Vector5", item: int, item2: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append3(self:"Vector5", item: int, item2: int, item3: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends many items to end of vector def append_all(self:"Vector5", new_items: [int]) -> object: item:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object: item:int = 0 item2:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 item4:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 item4:int = 0 item5:int = 0 for item in new_items: self.append(item) # Removes an item from the middle of vector def remove_at(self:"Vector5", idx: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at2(self:"Vector5", idx: int, idx2: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Retrieves an item at a given index def get(self:"Vector5", idx: int) -> int: return self.items[idx] # Retrieves an item at a given index def get2(self:"Vector5", idx: int, idx2: int) -> int: return self.items[idx] # Retrieves an item at a given index def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int: return self.items[idx] # Retrieves an item at a given index def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int: return self.items[idx] # Retrieves an item at a given index def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int: return self.items[idx] # Retrieves the current size of the vector def length(self:"Vector5") -> int: return self.size # Retrieves the current size of the vector def length2(self:"Vector5") -> int: return self.size # Retrieves the current size of the vector def length3(self:"Vector5") -> int: return self.size # Retrieves the current size of the vector def length4(self:"Vector5") -> int: return self.size # Retrieves the current size of the vector def length5(self:"Vector5") -> int: return self.size # A faster (but more memory-consuming) implementation of vector class DoublingVector(Vector): doubling_limit:int = 1000 # Overriding to do fewer resizes def increase_capacity(self:"DoublingVector") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # A faster (but more memory-consuming) implementation of vector class DoublingVector2(Vector): doubling_limit:int = 1000 doubling_limit2:int = 1000 # Overriding to do fewer resizes def increase_capacity(self:"DoublingVector2") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity2(self:"DoublingVector2") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # A faster (but more memory-consuming) implementation of vector class DoublingVector3(Vector): doubling_limit:int = 1000 doubling_limit2:int = 1000 doubling_limit3:int = 1000 # Overriding to do fewer resizes def increase_capacity(self:"DoublingVector3") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity2(self:"DoublingVector3") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity3(self:"DoublingVector3") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # A faster (but more memory-consuming) implementation of vector class DoublingVector4(Vector): doubling_limit:int = 1000 doubling_limit2:int = 1000 doubling_limit3:int = 1000 doubling_limit4:int = 1000 # Overriding to do fewer resizes def increase_capacity(self:"DoublingVector4") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity2(self:"DoublingVector4") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity3(self:"DoublingVector4") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity4(self:"DoublingVector4") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # A faster (but more memory-consuming) implementation of vector class DoublingVector5(Vector): doubling_limit:int = 1000 doubling_limit2:int = 1000 doubling_limit3:int = 1000 doubling_limit4:int = 1000 doubling_limit5:int = 1000 # Overriding to do fewer resizes def increase_capacity(self:"DoublingVector5") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity2(self:"DoublingVector5") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity3(self:"DoublingVector5") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity4(self:"DoublingVector5") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity5(self:"DoublingVector5") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Makes a vector in the range [i, j) def vrange(i:int, j:int) -> Vector: v:Vector = None v = DoublingVector() while i < j: v.append(i) i = i + 1 return v def vrange2(i:int, j:int, i2:int, j2:int) -> Vector: v:Vector = None v2:Vector = None v = DoublingVector() while i < j: v.append(i) i = i + 1 return v def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector: v:Vector = None v2:Vector = None v3:Vector = None v = DoublingVector() while i < j: v.append(i) i = i + 1 return v def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector: v:Vector = None v2:Vector = None v3:Vector = None v4:Vector = None v = DoublingVector() while i < j: v.append(i) i = i + 1 return v def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector: v:Vector = None v2:Vector = None v3:Vector = None v4:Vector = None v5:Vector = None v = DoublingVector() while i < j: v.append(i) i = i + 1 return v # Sieve of Eratosthenes (not really) def sieve(v:Vector) -> object: i:int = 0 j:int = 0 k:int = 0 while i < v.length(): k = v.get(i) j = i + 1 while j < v.length(): if v.get(j) % k == 0: v.remove_at(j) else: j = j + 1 i = i + 1 def sieve2(v:Vector, v2:Vector) -> object: i:int = 0 i2:int = 0 j:int = 0 j2:int = 0 k:int = 0 k2:int = 0 while i < v.length(): k = v.get(i) j = i + 1 while j < v.length(): if v.get(j) % k == 0: v.remove_at(j) else: j = j + 1 i = i + 1 def sieve3(v:Vector, v2:Vector, v3:Vector) -> object: i:int = 0 i2:int = 0 i3:int = 0 j:int = 0 j2:int = 0 j3:int = 0 k:int = 0 k2:int = 0 k3:int = 0 while i < v.length(): k = v.get(i) j = i + 1 while j < v.length(): if v.get(j) % k == 0: v.remove_at(j) else: j = j + 1 i = i + 1 def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object: i:int = 0 i2:int = 0 i3:int = 0 i4:int = 0 j:int = 0 j2:int = 0 j3:int = 0 j4:int = 0 k:int = 0 k2:int = 0 k3:int = 0 k4:int = 0 while i < v.length(): k = v.get(i) j = i + 1 while j < v.length(): if v.get(j) % k == 0: v.remove_at(j) else: j = j + 1 i = i + 1 def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object: i:int = 0 i2:int = 0 i3:int = 0 i4:int = 0 i5:int = 0 j:int = 0 j2:int = 0 j3:int = 0 j4:int = 0 j5:int = 0 k:int = 0 k2:int = 0 k3:int = 0 k4:int = 0 k5:int = 0 while i < v.length(): k = v.get(i) j = i + 1 while j < v.length(): if v.get(j) % k == 0: v.remove_at(j) else: j = j + 1 i = i + 1 # Input parameter n:int = 50 n2:int = 50 n3:int = 50 n4:int = 50 n5:int = 50 # Data v:Vector = None v2:Vector = None v3:Vector = None v4:Vector = None v5:Vector = None i:int = 0 i2:int = 0 i3:int = 0 i4:int = 0 i5:int = 0 # Crunch v = vrange(2, n) v2 = vrange(2, n) v3 = vrange(2, n) v4 = vrange(2, n) v5 = vrange(2, n) sieve(v) # Print while i < v.length(): print(v.get(i)) i = i + 1
2be8133758f27f4a9c1c013af1563b5ba0ad76a3
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
/cases/synthetic/tree-big-2269.py
d22a9b2b565a6add21a3f4a6735e52790e2fc6f3
[]
no_license
Virtlink/ccbench-chocopy
c3f7f6af6349aff6503196f727ef89f210a1eac8
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
refs/heads/main
2023-04-07T15:07:12.464038
2022-02-03T15:42:39
2022-02-03T15:42:39
451,969,776
0
0
null
null
null
null
UTF-8
Python
false
false
23,289
py
# Binary-search trees class TreeNode(object): value:int = 0 left:"TreeNode" = None right:"TreeNode" = None def insert(self:"TreeNode", x:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode(x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode(x) return True else: return self.right.insert(x) return False def contains(self:"TreeNode", x:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True class TreeNode2(object): value:int = 0 value2:int = 0 left:"TreeNode2" = None left2:"TreeNode2" = None right:"TreeNode2" = None right2:"TreeNode2" = None def insert(self:"TreeNode2", x:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode2(x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode2(x, x) return True else: return self.right.insert(x) return False def insert2(self:"TreeNode2", x:int, x2:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode2(x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode2(x, x) return True else: return self.right.insert(x) return False def contains(self:"TreeNode2", x:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True def contains2(self:"TreeNode2", x:int, x2:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True class TreeNode3(object): value:int = 0 value2:int = 0 value3:int = 0 left:"TreeNode3" = None left2:"TreeNode3" = None left3:"TreeNode3" = None right:"TreeNode3" = None right2:"TreeNode3" = None right3:"TreeNode3" = None def insert(self:"TreeNode3", x:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode3(x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode3(x, x, x) return True else: return self.right.insert(x) return False def insert2(self:"TreeNode3", x:int, x2:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode3(x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode3(x, x, x) return True else: return self.right.insert(x) return False def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode3(x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode3(x, x, x) return True else: return self.right.insert(x) return False def contains(self:"TreeNode3", x:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True def contains2(self:"TreeNode3", x:int, x2:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True class TreeNode4(object): value:int = 0 value2:int = 0 value3:int = 0 value4:int = 0 left:"TreeNode4" = None left2:"TreeNode4" = None left3:"TreeNode4" = None left4:"TreeNode4" = None right:"TreeNode4" = None right2:"TreeNode4" = None right3:"TreeNode4" = None right4:"TreeNode4" = None def insert(self:"TreeNode4", x:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode4(x, x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode4(x, x, x, x) return True else: return self.right.insert(x) return False def insert2(self:"TreeNode4", x:int, x2:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode4(x, x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: $Exp.right = makeNode4(x, x, x, x) return True else: return self.right.insert(x) return False def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode4(x, x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode4(x, x, x, x) return True else: return self.right.insert(x) return False def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode4(x, x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode4(x, x, x, x) return True else: return self.right.insert(x) return False def contains(self:"TreeNode4", x:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True def contains2(self:"TreeNode4", x:int, x2:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True class TreeNode5(object): value:int = 0 value2:int = 0 value3:int = 0 value4:int = 0 value5:int = 0 left:"TreeNode5" = None left2:"TreeNode5" = None left3:"TreeNode5" = None left4:"TreeNode5" = None left5:"TreeNode5" = None right:"TreeNode5" = None right2:"TreeNode5" = None right3:"TreeNode5" = None right4:"TreeNode5" = None right5:"TreeNode5" = None def insert(self:"TreeNode5", x:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode5(x, x, x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode5(x, x, x, x, x) return True else: return self.right.insert(x) return False def insert2(self:"TreeNode5", x:int, x2:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode5(x, x, x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode5(x, x, x, x, x) return True else: return self.right.insert(x) return False def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode5(x, x, x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode5(x, x, x, x, x) return True else: return self.right.insert(x) return False def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode5(x, x, x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode5(x, x, x, x, x) return True else: return self.right.insert(x) return False def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool: if x < self.value: if self.left is None: self.left = makeNode5(x, x, x, x, x) return True else: return self.left.insert(x) elif x > self.value: if self.right is None: self.right = makeNode5(x, x, x, x, x) return True else: return self.right.insert(x) return False def contains(self:"TreeNode5", x:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True def contains2(self:"TreeNode5", x:int, x2:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool: if x < self.value: if self.left is None: return False else: return self.left.contains(x) elif x > self.value: if self.right is None: return False else: return self.right.contains(x) else: return True class Tree(object): root:TreeNode = None size:int = 0 def insert(self:"Tree", x:int) -> object: if self.root is None: self.root = makeNode(x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def contains(self:"Tree", x:int) -> bool: if self.root is None: return False else: return self.root.contains(x) class Tree2(object): root:TreeNode2 = None root2:TreeNode2 = None size:int = 0 size2:int = 0 def insert(self:"Tree2", x:int) -> object: if self.root is None: self.root = makeNode2(x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def insert2(self:"Tree2", x:int, x2:int) -> object: if self.root is None: self.root = makeNode2(x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def contains(self:"Tree2", x:int) -> bool: if self.root is None: return False else: return self.root.contains(x) def contains2(self:"Tree2", x:int, x2:int) -> bool: if self.root is None: return False else: return self.root.contains(x) class Tree3(object): root:TreeNode3 = None root2:TreeNode3 = None root3:TreeNode3 = None size:int = 0 size2:int = 0 size3:int = 0 def insert(self:"Tree3", x:int) -> object: if self.root is None: self.root = makeNode3(x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def insert2(self:"Tree3", x:int, x2:int) -> object: if self.root is None: self.root = makeNode3(x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object: if self.root is None: self.root = makeNode3(x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def contains(self:"Tree3", x:int) -> bool: if self.root is None: return False else: return self.root.contains(x) def contains2(self:"Tree3", x:int, x2:int) -> bool: if self.root is None: return False else: return self.root.contains(x) def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool: if self.root is None: return False else: return self.root.contains(x) class Tree4(object): root:TreeNode4 = None root2:TreeNode4 = None root3:TreeNode4 = None root4:TreeNode4 = None size:int = 0 size2:int = 0 size3:int = 0 size4:int = 0 def insert(self:"Tree4", x:int) -> object: if self.root is None: self.root = makeNode4(x, x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def insert2(self:"Tree4", x:int, x2:int) -> object: if self.root is None: self.root = makeNode4(x, x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object: if self.root is None: self.root = makeNode4(x, x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object: if self.root is None: self.root = makeNode4(x, x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def contains(self:"Tree4", x:int) -> bool: if self.root is None: return False else: return self.root.contains(x) def contains2(self:"Tree4", x:int, x2:int) -> bool: if self.root is None: return False else: return self.root.contains(x) def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool: if self.root is None: return False else: return self.root.contains(x) def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool: if self.root is None: return False else: return self.root.contains(x) class Tree5(object): root:TreeNode5 = None root2:TreeNode5 = None root3:TreeNode5 = None root4:TreeNode5 = None root5:TreeNode5 = None size:int = 0 size2:int = 0 size3:int = 0 size4:int = 0 size5:int = 0 def insert(self:"Tree5", x:int) -> object: if self.root is None: self.root = makeNode5(x, x, x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def insert2(self:"Tree5", x:int, x2:int) -> object: if self.root is None: self.root = makeNode5(x, x, x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object: if self.root is None: self.root = makeNode5(x, x, x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object: if self.root is None: self.root = makeNode5(x, x, x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object: if self.root is None: self.root = makeNode5(x, x, x, x, x) self.size = 1 else: if self.root.insert(x): self.size = self.size + 1 def contains(self:"Tree5", x:int) -> bool: if self.root is None: return False else: return self.root.contains(x) def contains2(self:"Tree5", x:int, x2:int) -> bool: if self.root is None: return False else: return self.root.contains(x) def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool: if self.root is None: return False else: return self.root.contains(x) def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool: if self.root is None: return False else: return self.root.contains(x) def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool: if self.root is None: return False else: return self.root.contains(x) def makeNode(x: int) -> TreeNode: b:TreeNode = None b = TreeNode() b.value = x return b def makeNode2(x: int, x2: int) -> TreeNode2: b:TreeNode2 = None b2:TreeNode2 = None b = TreeNode2() b.value = x return b def makeNode3(x: int, x2: int, x3: int) -> TreeNode3: b:TreeNode3 = None b2:TreeNode3 = None b3:TreeNode3 = None b = TreeNode3() b.value = x return b def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4: b:TreeNode4 = None b2:TreeNode4 = None b3:TreeNode4 = None b4:TreeNode4 = None b = TreeNode4() b.value = x return b def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5: b:TreeNode5 = None b2:TreeNode5 = None b3:TreeNode5 = None b4:TreeNode5 = None b5:TreeNode5 = None b = TreeNode5() b.value = x return b # Input parameters n:int = 100 n2:int = 100 n3:int = 100 n4:int = 100 n5:int = 100 c:int = 4 c2:int = 4 c3:int = 4 c4:int = 4 c5:int = 4 # Data t:Tree = None t2:Tree = None t3:Tree = None t4:Tree = None t5:Tree = None i:int = 0 i2:int = 0 i3:int = 0 i4:int = 0 i5:int = 0 k:int = 37813 k2:int = 37813 k3:int = 37813 k4:int = 37813 k5:int = 37813 # Crunch t = Tree() while i < n: t.insert(k) k = (k * 37813) % 37831 if i % c != 0: t.insert(i) i = i + 1 print(t.size) for i in [4, 8, 15, 16, 23, 42]: if t.contains(i): print(i)
252a5265eda2101371d397eb500378a31d787fa2
2e356d3be3eb83ef89317a7804e8fa4567898d6f
/chapter1/code/metadata/extract_articles.py
fca044b690428e12f7d91aeeee034f30287dd60e
[ "MIT" ]
permissive
PacktPublishing/Advanced-Web-Scraping-with-Python
91069bbf925e142ee64e8c80ae97c28077def052
6624b71b2889a6fcfa3f080a6e15b979e582cce6
refs/heads/master
2021-07-09T10:29:35.394560
2021-01-21T07:12:34
2021-01-21T07:12:34
213,933,836
17
8
null
null
null
null
UTF-8
Python
false
false
751
py
#!/usr/bin/env python # -*- coding: utf-8 -*- import newspaper cnn_paper = newspaper.build('http://cnn.com') print('*****************************category urls************************************\n') for category in cnn_paper.category_urls(): print(category) print('*****************************url articles************************************\n') for article in cnn_paper.articles: print(article.url) print('*****************************download first article************************************\n') cnn_article = cnn_paper.articles[0] cnn_article.download() cnn_article.parse() #print(cnn_article.html) print(cnn_article.text) print(cnn_article.keywords) print(cnn_article.summary) print(cnn_article.authors) print(cnn_article.publish_date)
940e980bebc322bec60c56ff452cc02f1fa66e97
f9033131dc4d66ede2c5c22fcaa4a0be5b682152
/SegmentTrees/Tasks/eolymp(2941).py
3ebde2e3290bba8960436b76b06836cf33e41562
[]
no_license
Invalid-coder/Data-Structures-and-algorithms
9bd755ce3d4eb11e605480db53302096c9874364
42c6eb8656e85b76f1c0043dcddc9c526ae12ba1
refs/heads/main
2023-04-29T08:40:34.661184
2021-05-19T10:57:37
2021-05-19T10:57:37
301,458,981
0
0
null
null
null
null
UTF-8
Python
false
false
2,112
py
#https://www.e-olymp.com/uk/submissions/7648758 from math import log2, ceil class SegmentTree: ''' Дерево відрізків з операцією суми.''' def __init__(self, array): k = len(array) n = 1 << ceil(log2(k)) self.items = n * [0] + array + (n - k) * [0] for i in range(n - 1, 0, -1): # Визначаємо навантаження предків self.items[i] = self.items[i * 2] + self.items[i * 2 + 1] self.size = n def update(self, i, item): ''' Міняє елемент масиву на позиції i (початок з нуля) на item.''' i += self.size self.items[i] = item while i != 1: # Поки не дійшли до кореня i = i // 2 # Беремо номер батька # Визначаємо його навантаження self.items[i] = self.items[i * 2] + self.items[i * 2 + 1] def sum(self, left, right): ''' Повертає суму елементів відрізка.''' left += self.size right += self.size result = 0 while left <= right: if left % 2 == 1: # Якщо правий син result += self.items[left] if right % 2 == 0: # Якщо лівий син result += self.items[right] left = (left + 1) // 2 # Беремо індекс батька вузла справа right = (right - 1) // 2 # Беремо індекс батька вузла зліва return result if __name__ == '__main__': with open('input.txt') as inp: n, q = map(int, inp.readline().split()) array = list(map(int, inp.readline().split())) tree = SegmentTree(array) for _ in range(q): command = inp.readline().split() if command[0] == '=': tree.update(int(command[1]) - 1, int(command[2])) elif command[0] == '?': print(tree.sum(int(command[1]) - 1, int(command[2]) - 1))
87519a17efdc5d4e72c93dfbdad14363392a71de
3146c4485faf26b663bd9db197057a2444e92602
/openldap_migration/export_opendj.py
8f9fa9733033183b5d2870778f3435e6bb7aa31d
[ "MIT" ]
permissive
TCBL/community-edition-setup
7ab06776a774f2e24179ee5d6245666aa921a779
1f91ea493ef8da0066030fb9ec72d2bc84a7dbc9
refs/heads/master
2020-03-30T20:23:20.389766
2018-10-02T19:18:53
2018-10-02T19:18:53
151,586,078
0
0
MIT
2018-10-05T11:44:25
2018-10-04T14:32:42
Python
UTF-8
Python
false
false
8,318
py
#!/usr/bin/python import traceback import sys import os import shutil import hashlib import getpass import tempfile import logging # Unix commands mkdir = '/bin/mkdir' cat = '/bin/cat' hostname = '/bin/hostname' grep = '/bin/grep' ldapsearch = "/opt/opendj/bin/ldapsearch" unzip = "/usr/bin/unzip" find = "/usr/bin/find" mkdir = "/bin/mkdir" log = "./export_opendj.log" logError = "./export_opendj.error" bu_folder = "./opendj_export" propertiesFn = "%s/setup.properties" % bu_folder # LDAP Stuff password_file = tempfile.mkstemp()[1] ldap_creds = ['-h', 'localhost', '-p', '1636', '-Z', '-X', '-D', '"cn=directory manager"', '-j', password_file] base_dns = ['ou=people', 'ou=groups', 'ou=attributes', 'ou=scopes', 'ou=clients', 'ou=scripts', 'ou=uma', 'ou=hosts', 'ou=u2f'] # configure logging logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)-8s %(message)s', filename='export_24.log', filemode='w') console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(levelname)-8s %(message)s') console.setFormatter(formatter) logging.getLogger('').addHandler(console) def clean(s): return s.replace('@', '').replace('!', '').replace('.', '') def copyFile(fn, dir): parent_Dir = os.path.split(fn)[0] bu_dir = "%s/%s" % (bu_folder, parent_Dir) if not os.path.exists(bu_dir): runCommand([mkdir, "-p", bu_dir]) bu_fn = os.path.join(bu_dir, os.path.split(fn)[-1]) shutil.copyfile(fn, bu_fn) def getOrgInum(): args = [ldapsearch] + ldap_creds + ['-s', 'one', '-b', 'o=gluu', 'o=*', 'dn'] output = runCommand(args) return output.split(",")[0].split("o=")[-1] def getLdif(): logging.info('Creating backup of LDAP data') orgInum = getOrgInum() # Backup the data for basedn in base_dns: args = [ldapsearch] + ldap_creds + [ '-b', '%s,o=%s,o=gluu' % (basedn, orgInum), 'objectclass=*'] output = runCommand(args) ou = basedn.split("=")[-1] f = open("%s/ldif/%s.ldif" % (bu_folder, ou), 'w') f.write(output) f.close() # Backup the appliance config args = [ldapsearch] + ldap_creds + \ ['-b', 'ou=appliances,o=gluu', '-s', 'one', 'objectclass=*'] output = runCommand(args) f = open("%s/ldif/appliance.ldif" % bu_folder, 'w') f.write(output) f.close() # Backup the oxtrust config args = [ldapsearch] + ldap_creds + \ ['-b', 'ou=appliances,o=gluu', 'objectclass=oxTrustConfiguration'] output = runCommand(args) f = open("%s/ldif/oxtrust_config.ldif" % bu_folder, 'w') f.write(output) f.close() # Backup the oxauth config args = [ldapsearch] + ldap_creds + \ ['-b', 'ou=appliances,o=gluu', 'objectclass=oxAuthConfiguration'] output = runCommand(args) f = open("%s/ldif/oxauth_config.ldif" % bu_folder, 'w') f.write(output) f.close() # Backup the trust relationships args = [ldapsearch] + ldap_creds + ['-b', 'ou=appliances,o=gluu', 'objectclass=gluuSAMLconfig'] output = runCommand(args) f = open("%s/ldif/trust_relationships.ldif" % bu_folder, 'w') f.write(output) f.close() # Backup the org args = [ldapsearch] + ldap_creds + ['-s', 'base', '-b', 'o=%s,o=gluu' % orgInum, 'objectclass=*'] output = runCommand(args) f = open("%s/ldif/organization.ldif" % bu_folder, 'w') f.write(output) f.close() # Backup o=site args = [ldapsearch] + ldap_creds + ['-b', 'ou=people,o=site', '-s', 'one', 'objectclass=*'] output = runCommand(args) f = open("%s/ldif/site.ldif" % bu_folder, 'w') f.write(output) f.close() def runCommand(args, return_list=False): try: logging.debug("Running command : %s", " ".join(args)) output = None if return_list: output = os.popen(" ".join(args)).readlines() else: output = os.popen(" ".join(args)).read().strip() return output except: logging.error("Error running command : %s", " ".join(args)) logging.debug(traceback.format_exc()) sys.exit(1) def getProp(prop): with open('/install/community-edition-setup/setup.properties.last', 'r') \ as sf: for line in sf: if "{0}=".format(prop) in line: return line.split('=')[-1].strip() def genProperties(): logging.info('Creating setup.properties backup file') props = {} props['ldapPass'] = runCommand([cat, password_file]) props['hostname'] = runCommand([hostname]) props['inumAppliance'] = runCommand( [grep, "^inum", "%s/ldif/appliance.ldif" % bu_folder] ).split("\n")[0].split(":")[-1].strip() props['inumApplianceFN'] = clean(props['inumAppliance']) props['inumOrg'] = getOrgInum() props['inumOrgFN'] = clean(props['inumOrg']) props['baseInum'] = props['inumOrg'][:21] props['encode_salt'] = runCommand( [cat, "/opt/tomcat/conf/salt"]).split("=")[-1].strip() props['oxauth_client_id'] = getProp('oxauth_client_id') props['scim_rs_client_id'] = getProp('scim_rs_client_id') props['scim_rp_client_id'] = getProp('scim_rp_client_id') props['version'] = getProp('githubBranchName').split('_')[-1] # As the certificates are copied over to the new installation, their pass # are required for accessing them and validating them props['httpdKeyPass'] = getProp('httpdKeyPass') props['shibJksPass'] = getProp('shibJksPass') props['asimbaJksPass'] = getProp('asimbaJksPass') # Preferences for installation of optional components installSaml = raw_input("\tIs Shibboleth SAML IDP installed? (Y/N):") props['installSaml'] = 'y' in installSaml.lower() props['installAsimba'] = os.path.isfile('/opt/tomcat/webapps/asimba.war') props['installCas'] = os.path.isfile('/opt/tomcat/webapps/cas.war') props['installOxAuthRP'] = os.path.isfile( '/opt/tomcat/webapps/oxauth-rp.war') f = open(propertiesFn, 'a') for key in props.keys(): # NOTE: old version of setup.py will interpret any string as True # Hence, store only the True values, the defaults are False if props[key]: f.write("%s=%s\n" % (key, props[key])) f.close() def hash_file(filename): # From http://www.programiz.com/python-programming/examples/hash-file h = hashlib.sha1() with open(filename, 'rb') as file: chunk = 0 while chunk != b'': chunk = file.read(1024) h.update(chunk) return h.hexdigest() def makeFolders(): folders = [bu_folder, "%s/ldif" % bu_folder] for folder in folders: try: if not os.path.exists(folder): runCommand([mkdir, '-p', folder]) except: logging.error("Error making folder: %s", folder) logging.debug(traceback.format_exc()) sys.exit(3) def prepareLdapPW(): ldap_pass = None # read LDAP pass from setup.properties with open('/install/community-edition-setup/setup.properties.last', 'r') \ as sfile: for line in sfile: if 'ldapPass=' in line: ldap_pass = line.split('=')[-1] # write it to the tmp file with open(password_file, 'w') as pfile: pfile.write(ldap_pass) # perform sample search sample = getOrgInum() if not sample: # get the password from the user if it fails ldap_pass = getpass.getpass("Enter LDAP Passsword: ") with open(password_file, 'w') as pfile: pfile.write(ldap_pass) def main(): prepareLdapPW() makeFolders() getLdif() genProperties() # remove the tempfile with the ldap password os.remove(password_file) if __name__ == "__main__": main()
311e5628fc15ce4639452642b0f5bc2cc980bb8d
b5ce6908490cfb8e6a1e1cbe4745d675122ddce0
/questions/single-element-in-a-sorted-array/Solution.py
3654674cf86e3e46733b7d40cc107476d78e5504
[ "MIT" ]
permissive
franklingu/leetcode-solutions
8895910f13208e1d8e604100d84c2dd35684cde4
7ad7e5c1c040510b7b7bd225ed4297054464dbc6
refs/heads/master
2023-01-09T01:34:08.097518
2023-01-02T02:05:35
2023-01-02T02:05:35
43,345,677
155
66
MIT
2020-10-02T03:41:36
2015-09-29T04:54:38
Python
UTF-8
Python
false
false
839
py
""" You are given a sorted array consisting of only integers where every element appears exactly twice, except for one element which appears exactly once. Find this single element that appears only once. Follow up: Your solution should run in O(log n) time and O(1) space.   Example 1: Input: nums = [1,1,2,3,3,4,4,8,8] Output: 2 Example 2: Input: nums = [3,3,7,7,10,11,11] Output: 10   Constraints: 1 <= nums.length <= 10^5 0 <= nums[i] <= 10^5 """ class Solution: def singleNonDuplicate(self, nums: List[int]) -> int: low = 0 high = len(nums) - 1 while low < high: mid = (low + high)//2 if mid%2 ==0: mid +=1 if nums[mid] == nums[mid - 1]: low = mid + 1 else: high = mid - 1 return nums[low]
b6c5a444d53fac0f7a74a29f4133549428f0157a
c3a0d8cc1e386717dffd93d0eb58bec752e26b0a
/test787-keras_block/main.py
9503c190eb670d02b58aab4d7b06af569f387d96
[]
no_license
matthiaswh/bit4
0ce0e385d889a30620426bc60aa47de0ecef21de
0633d7357d157b5f47c70091dc676dc2e06c1ae1
refs/heads/master
2022-11-10T07:44:28.706805
2020-06-21T13:12:26
2020-06-21T13:12:26
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,152
py
import keras import keras.backend as K import keras.layers as L import numpy as np from layers import LayerBlock, Module # Weight regularization works but batch normalization does not work! def main(): tcn = TCN(10, kernel_size=5, dilation_rates=[1, 2, 4, 8, 16]) model = keras.Sequential( [ L.InputLayer((100, 2)), tcn, L.Dense(1, activation="sigmoid"), ] ) model.summary() model.compile( loss="binary_crossentropy", optimizer=keras.optimizers.Adam(0.01), metrics=["accuracy", "binary_crossentropy"], ) X = np.random.uniform(-1, 1, size=(5000, 100, 2)) a = np.mean(X[:, :50, 0] + X[:, 50:, 1], axis=1) b = np.mean(X[:, :50, 1] + X[:, 50:, 0], axis=1) y = a < b model.fit(X, y, batch_size=100, epochs=10, validation_split=0.1) class TCN(Module): def __init__(self, encoding_dim, kernel_size, dilation_rates, **kwargs): super().__init__(**kwargs) self.frontend = L.Dense(encoding_dim, kernel_regularizer="l2", name="frontend") self.tcn_steps = LayerBlock(name="steps") for i, d in enumerate(dilation_rates): step = LayerBlock(residual=True, name=f"step{i + 1}") step.add( L.Conv1D( encoding_dim, kernel_size, dilation_rate=d, padding="same", activation="elu", kernel_constraint=keras.constraints.MaxNorm(1), ) ) self.tcn_steps.add(step) self.pool = L.GlobalAveragePooling1D(name="pool") def build(self, input_shape): self.frontend.build(input_shape) encoding_shape = self.frontend.compute_output_shape(input_shape) self.tcn_steps.build(encoding_shape) self.pool.build(encoding_shape) super().build(input_shape) def compute_output_shape(self, input_shape): return input_shape[0], self.frontend.units def call(self, x): return self.pool(self.tcn_steps(self.frontend(x))) if __name__ == "__main__": main()
3936237e41a796d8b7cea9c0aef0a060bba62c82
97d5efaf0e15c537d4380ae3b61b88ee3d8e84ab
/MiSeguroVirtualBackend/insurances/migrations/0013_userpolicy_adviser_cellphone.py
01562d2fd74b1024f2331ea628035c909f6ab024
[]
no_license
dmontoya1/mi-seguro-virtual
af49f0d4492264cea23b6d50a2a2b27c9816e843
6e14fb5e38b3a7192e532a46b842d6a2f80d5ea7
refs/heads/master
2023-05-06T07:43:16.335977
2019-04-03T17:57:32
2019-04-03T17:57:32
371,432,047
0
0
null
null
null
null
UTF-8
Python
false
false
490
py
# Generated by Django 2.0.6 on 2018-12-18 03:36 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('insurances', '0012_auto_20181206_2028'), ] operations = [ migrations.AddField( model_name='userpolicy', name='adviser_cellphone', field=models.CharField(blank=True, help_text='Agregar en caso de que aplique', max_length=255, verbose_name='Celular asesor'), ), ]
5bbb725cb6a7f41b724a108f31fc041271cc5ebc
41ea088695ed956ef8c6e34ace4d8ab19c8b4352
/XDG_CACHE_HOME/Microsoft/Python Language Server/stubs.v1/ebnjyiNMGOHuosJ_EE7SGKALWfhDqU3P4hGf55ouVTM=/_sparsetools.cpython-37m-x86_64-linux-gnu.pyi
d5f00fb6c5fb347fd16db47ae3c4ea852c2c1bd8
[]
no_license
ljbelenky/decline
d5c1d57fd927fa6a8ea99c1e08fedbeb83170d01
432ef82a68168e4ac8635a9386af2aa26cd73eef
refs/heads/master
2021-06-18T17:01:46.969491
2021-04-26T18:34:55
2021-04-26T18:34:55
195,559,200
0
0
null
null
null
null
UTF-8
Python
false
false
2,625
pyi
__doc__ = None __file__ = '/home/land/.local/lib/python3.7/site-packages/scipy/sparse/_sparsetools.cpython-37m-x86_64-linux-gnu.so' __name__ = 'scipy.sparse._sparsetools' __package__ = 'scipy.sparse' def bsr_diagonal(): pass def bsr_eldiv_bsr(): pass def bsr_elmul_bsr(): pass def bsr_ge_bsr(): pass def bsr_gt_bsr(): pass def bsr_le_bsr(): pass def bsr_lt_bsr(): pass def bsr_matmat_pass2(): pass def bsr_matvec(): pass def bsr_matvecs(): pass def bsr_maximum_bsr(): pass def bsr_minimum_bsr(): pass def bsr_minus_bsr(): pass def bsr_ne_bsr(): pass def bsr_plus_bsr(): pass def bsr_scale_columns(): pass def bsr_scale_rows(): pass def bsr_sort_indices(): pass def bsr_tocsr(): pass def bsr_transpose(): pass def coo_matvec(): pass def coo_tocsr(): pass def coo_todense(): pass def cs_graph_components(): pass def csc_diagonal(): pass def csc_eldiv_csc(): pass def csc_elmul_csc(): pass def csc_ge_csc(): pass def csc_gt_csc(): pass def csc_le_csc(): pass def csc_lt_csc(): pass def csc_matmat_pass1(): pass def csc_matmat_pass2(): pass def csc_matvec(): pass def csc_matvecs(): pass def csc_maximum_csc(): pass def csc_minimum_csc(): pass def csc_minus_csc(): pass def csc_ne_csc(): pass def csc_plus_csc(): pass def csc_tocsr(): pass def csr_column_index1(): pass def csr_column_index2(): pass def csr_count_blocks(): pass def csr_diagonal(): pass def csr_eldiv_csr(): pass def csr_eliminate_zeros(): pass def csr_elmul_csr(): pass def csr_ge_csr(): pass def csr_gt_csr(): pass def csr_has_canonical_format(): pass def csr_has_sorted_indices(): pass def csr_le_csr(): pass def csr_lt_csr(): pass def csr_matmat_pass1(): pass def csr_matmat_pass2(): pass def csr_matvec(): pass def csr_matvecs(): pass def csr_maximum_csr(): pass def csr_minimum_csr(): pass def csr_minus_csr(): pass def csr_ne_csr(): pass def csr_plus_csr(): pass def csr_row_index(): pass def csr_row_slice(): pass def csr_sample_offsets(): pass def csr_sample_values(): pass def csr_scale_columns(): pass def csr_scale_rows(): pass def csr_sort_indices(): pass def csr_sum_duplicates(): pass def csr_tobsr(): pass def csr_tocsc(): pass def csr_todense(): pass def dia_matvec(): pass def expandptr(): pass def get_csr_submatrix(): pass def test_throw_error(): pass
4b1eb52decc3b5781a44abbeb0ef755e29c9aa0e
c84a561927ff9c6712e521c3448531f4992f41fb
/AlgorithmicHeights/INV/inv.py
7999bdddb37a864f597dba8e6627368d857f4743
[]
no_license
Meng-Gen/rosalind
55c174005807d0fc8703e62f7358f4ed205f977d
3cf5e0ee1536e3e762ddd5354b8da4c8d378a640
refs/heads/master
2020-05-13T15:47:13.504360
2013-12-29T12:15:27
2013-12-29T12:15:27
15,453,371
3
2
null
null
null
null
UTF-8
Python
false
false
1,579
py
import sys def read_dataset(): lines = [line.strip() for line in sys.stdin.readlines()] n = int(lines[0]) array = list(map(int, lines[1].split())) assert(n == len(array)) return n, array class CoutingInversionProblem(): def __init__(self, size, array): self.size = size self.array = [None] + array def solve(self): return self.count(1, self.size) def count(self, left, right): if left >= right: return 0 middle = (left + right) // 2 return self.count(left, middle) + self.count(middle + 1, right) + self.merge(left, middle, right) def merge(self, left, middle, right): m = middle - left + 1 n = right - middle L = [None] + self.array[left:middle+1] R = [None] + self.array[middle+1:right+1] rv = 0 i, j, k = 1, 1, left while i <= m and j <= n: if L[i] <= R[j]: self.array[k] = L[i] i += 1 rv += (j - 1) else: self.array[k] = R[j] j += 1 k += 1 if i > m: for y in range(j, n + 1): self.array[middle + y] = R[y] else: for x in range(i, m + 1): self.array[right - m + x] = L[x] rv += (j - 1)*(m - i + 1) return rv def main(): n, array = read_dataset() problem = CoutingInversionProblem(n, array) print(problem.solve()) if __name__ == '__main__': sys.exit(main())
f955924de40322655374d0ebcb1737d4c9f84630
db0e8aa3a92a30c9b1cc8da03725e951ff64f3f1
/app1/forms.py
744896064b952f02b93b044a8985999dd061a2d9
[ "BSD-3-Clause" ]
permissive
shrey-c/DataLeakageDjango
ffeef61caa347520747fc70cf3f7f8b84a9610cf
a827c5a09e5501921f9fb97b656755671238dd63
refs/heads/master
2022-11-30T03:30:12.313025
2020-07-12T06:47:44
2020-07-12T06:47:44
242,569,637
6
1
BSD-3-Clause
2022-11-22T05:20:22
2020-02-23T18:33:04
Python
UTF-8
Python
false
false
1,729
py
from django import forms from app1.models import Document, DetectorUpload class ChangepwdForm(forms.Form): def __init__(self, *args, **kwargs): super(ChangepwdForm, self).__init__(*args, **kwargs) self.fields['current'].widget.attrs = { 'class' : 'form-control', 'placeholder' : 'Current Password' } self.fields['new'].widget.attrs = { 'class' : 'form-control', 'placeholder' : 'New Password' } self.fields['reenter'].widget.attrs = { 'class' : 'form-control', 'placeholder' : 'Re-enter Password' } current = forms.CharField(max_length=50, widget=forms.PasswordInput) new = forms.CharField(max_length=50, widget=forms.PasswordInput) reenter = forms.CharField(max_length=50, widget=forms.PasswordInput) class DocumentForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(DocumentForm, self).__init__(*args, **kwargs) self.fields['title'].widget.attrs = { 'class': 'form-control', 'placeholder': 'title' } self.fields['description'].widget.attrs = { 'class': 'form-control', 'placeholder': 'description' } self.fields['accesslevel'].widget.attrs = { 'class': 'form-control', 'placeholder': 'accesslevel', } title = forms.CharField(max_length=50) description = forms.CharField(max_length=500) accesslevel = forms.CharField(max_length=50) document = forms.FileField() class Meta: model = Document fields = ('title','description', 'accesslevel', 'document') class DetectorUploadForm(forms.ModelForm): document = forms.FileField() class Meta: model = DetectorUpload fields = {'document'}
a717be30c5eafe7027a31daef6d7c4b751ab3056
7ef2308e51d1d5700fbd092177ee15e2a03ebdd8
/WorkLean/Python/Scrapy/testCrawler1_0/testCrawler1_0/settings.py
9a35f942f04500c15cbc5e210353e021980e8568
[]
no_license
STAWZW/STAWZW1.0
741002eb35c2883e5739fee8d14ff430e9622c01
a835ac27aba17f968116e321bd201b26c9fb3578
refs/heads/master
2020-07-21T20:21:59.753992
2019-09-26T09:21:28
2019-09-26T09:21:28
206,965,347
2
0
null
null
null
null
UTF-8
Python
false
false
8,037
py
# -*- coding: utf-8 -*- # import random # Scrapy settings for testCrawler1_0 project # # For simplicity, this file contains only settings considered important or # commonly used. You can find more settings consulting the documentation: # # https://doc.scrapy.org/en/latest/topics/settings.html # https://doc.scrapy.org/en/latest/topics/downloader-middleware.html # https://doc.scrapy.org/en/latest/topics/spider-middleware.html BOT_NAME = 'testCrawler1_0' SPIDER_MODULES = ['testCrawler1_0.spiders'] NEWSPIDER_MODULE = 'testCrawler1_0.spiders' # 用户自定义代理库 # USER_AGENT_LIST = [ # "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1", # "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11", # "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6", # "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6", # "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1", # "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5", # "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5", # "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3", # "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3", # "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3", # "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3", # "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3", # "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3", # "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3", # "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3", # "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3", # "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24", # "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24" # ] # USER_AGENT = random.choice(USER_AGENT_LIST) #每次运行爬虫会使用不同的用户代理,但每次运行中的请求都是不变的 # 用户自定义I代理P池 # 免费代理IP[西刺]是你的好选择!(手滑) 网址:https://www.xicidaili.com/wt IPPOOL_LIST = [ {"ipaddr": "124.16.75.212:8080"}, {"ipaddr": "101.231.234.38:8080"}, {"ipaddr": "218.64.69.79:8080"}, {"ipaddr": "144.123.70.252:9999"}, {"ipaddr": "113.121.21.199:9999"}, {"ipaddr": "171.35.161.147:9999"}, {"ipaddr": "27.204.84.42:9999"}, ] # 是否服从 robots.txt 规则,服从为Teur,不服从为False,服从规则有些网站是爬取不到的 ROBOTSTXT_OBEY = False # Configure maximum concurrent requests performed by Scrapy (default: 16) #CONCURRENT_REQUESTS = 32 # Configure a delay for requests for the same website (default: 0) # See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay # See also autothrottle settings and docs #DOWNLOAD_DELAY = 3 # The download delay setting will honor only one of: #CONCURRENT_REQUESTS_PER_DOMAIN = 16 #CONCURRENT_REQUESTS_PER_IP = 16 # Disable cookies (enabled by default) #COOKIES_ENABLED = False # Disable Telnet Console (enabled by default) #TELNETCONSOLE_ENABLED = False # Override the default request headers: #DEFAULT_REQUEST_HEADERS = { # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', # 'Accept-Language': 'en', #} # Enable or disable spider middlewares # See https://doc.scrapy.org/en/latest/topics/spider-middleware.html #SPIDER_MIDDLEWARES = { # 'testCrawler1_0.middlewares.Testcrawler10SpiderMiddleware': 543, #} # Enable or disable downloader middlewares # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html DOWNLOADER_MIDDLEWARES = { # 'testCrawler1_0.middlewares.Testcrawler10DownloaderMiddleware': 543, # 自定义IP代理池中间件,优先级要高于HttpProxyMiddleware中间件 'testCrawler1_0.middlewares.IPPOOLS': 747, 'scrapy.downloadermiddlewares.httpproxy.HttpProxyMiddleware': 748, # 自定义用户代理池中间件,优先级要高于UserAgentMiddleware中间件 'testCrawler1_0.middlewares.USERAGENTS': 749, 'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware': 750, # 这里要替换为自己的项目名称,重写的优先级一定要高(数字仅代表优先级,数字越小,优先级越高) 'scrapy_splash.SplashCookiesMiddleware': 744, 'scrapy_splash.SplashMiddleware': 745, 'scrapy.downloadermiddlewares.httpcompression.HttpCompressionMiddleware': 810, } HTTPCACHE_ENABLED = True HTTPCACHE_EXPIRATION_SECS = 0 HTTPCACHE_DIR = 'httpcache' SPLASH_URL = "http://192.168.99.100:8050/" # 自己安装的docker里的splash位置 DUPEFILTER_CLASS = "scrapy_splash.SplashAwareDupeFilter" HTTPCACHE_STORAGE = 'scrapy_splash.SplashAwareFSCacheStorage' # Enable or disable extensions # See https://doc.scrapy.org/en/latest/topics/extensions.html #EXTENSIONS = { # 'scrapy.extensions.telnet.TelnetConsole': None, #} # Configure item pipelines # See https://doc.scrapy.org/en/latest/topics/item-pipeline.html ITEM_PIPELINES = { 'testCrawler1_0.pipelines.Testcrawler10Pipeline': 201, 'testCrawler1_0.pipelines.Testcrawler10ImagePipeline': 200, } IMAGES_STORE = 'D:/GitRepository/STAWZW2.0/WorkLean/Python/Scrapy/core-scrapy-master/img/' # 图片存储路径 IMAGES_URLS_FIELD = "image_urls" # 对应item里面设定的字段,取到图片的url IMAGES_RESULT_FIELD = "image_path" # 30 days of delay for images expiration IMAGES_EXPIRES = 30 # # 图片缩略图 # IMAGES_THUMBS = { # 'small': (50, 50), # 'big': (270, 270), # } # # 图片过滤器,最小高度和宽度 # IMAGES_MIN_HEIGHT = 110 # IMAGES_MIN_WIDTH = 110 # Enable and configure the AutoThrottle extension (disabled by default) # See https://doc.scrapy.org/en/latest/topics/autothrottle.html #AUTOTHROTTLE_ENABLED = True # The initial download delay #AUTOTHROTTLE_START_DELAY = 5 # The maximum download delay to be set in case of high latencies #AUTOTHROTTLE_MAX_DELAY = 60 # The average number of requests Scrapy should be sending in parallel to # each remote server #AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0 # Enable showing throttling stats for every response received: #AUTOTHROTTLE_DEBUG = False # Enable and configure HTTP caching (disabled by default) # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings #HTTPCACHE_ENABLED = True #HTTPCACHE_EXPIRATION_SECS = 0 #HTTPCACHE_DIR = 'httpcache' #HTTPCACHE_IGNORE_HTTP_CODES = [] # HTTPERROR_ALLOWED_CODES ——> HTTP请求允许的错误:[code] HTTPERROR_ALLOWED_CODES = [301] #HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage' # # 是否启用日志 # LOG_ENABLED=True # # 日志使用的编码 # LOG_ENCODING='utf-8' # # 日志文件(文件名) # LOG_FILE='testScrapyLog.log' # # 日志格式 # LOG_FORMAT='%(asctime)s [%(name)s] %(levelname)s: %(message)s' # # 日志时间格式 # LOG_DATEFORMAT='%Y-%m-%d %H:%M:%S' # # 日志级别 CRITICAL, ERROR, WARNING, INFO, DEBUG # LOG_LEVEL='DEBUG' # # 如果等于True,所有的标准输出(包括错误)都会重定向到日志,例如:print('hello') # LOG_STDOUT=True # # 如果等于True,日志仅仅包含根路径,False显示日志输出组件 # LOG_SHORT_NAMES=False
b03811a2a5a5661848fbf370e6bba4eeb45fd36a
584e9c42e6240b9facc866703a6f26b06773df94
/TwilioSendGrid/stressful_subject.py
e3f8e396652697024e58f215a4cea54ffaf77bc9
[]
no_license
anton-dovnar/checkio
48fbaf84c244b0fca7bed5cf7f34179cf850adf9
10aed757ec36f182871a03ed8c9e73319cc8824a
refs/heads/master
2023-03-24T16:23:39.524060
2021-03-12T13:07:04
2021-03-12T13:07:04
null
0
0
null
null
null
null
UTF-8
Python
false
false
756
py
#!/home/fode4cun/.local/share/virtualenvs/checkio-ufRDicT7/bin/checkio --domain=py run stressful-subject # # END_DESC import re def is_stressful(subj): """ recognize stressful subject """ if subj.isupper() or subj.endswith('!!!'): return True patterns = [r'(\b[help\!\-\.]{4,}\b)', r'(\b[asap\!\-\.]{4,}\b)', r'(\b[urgent\!\-\.]{4,}\b)'] for pattern in patterns: if re.search(pattern, subj, flags=re.IGNORECASE): return True return False if __name__ == '__main__': #These "asserts" are only for self-checking and not necessarily for auto-testing assert is_stressful("Hi") == False, "First" assert is_stressful("I neeed HELP") == True, "Second" print('Done! Go Check it!')
e12707c00339249c29f18cde9159c083556c9074
434fe69daa053ef68e11b029ebb04cde69b76ee8
/pysc2/bin/play_vs_agent.py
840a2e86b0a187314283069a5c088fbd234367b5
[ "Apache-2.0" ]
permissive
dorthrithil/pysc2
6b31e4f456015d3fc863cb44fb12b8034819f4a4
e3c561b20b78a57ed9cbcbb76157fcffb7b1bbb4
refs/heads/master
2020-03-19T02:04:10.123451
2018-08-01T07:08:48
2018-08-01T07:08:48
135,596,142
0
2
Apache-2.0
2018-08-08T09:59:25
2018-05-31T14:39:57
Python
UTF-8
Python
false
false
10,085
py
#!/usr/bin/python # Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Play as a human against an agent by setting up a LAN game. This needs to be called twice, once for the human, and once for the agent. The human plays on the host. There you run it as: $ python -m pysc2.bin.play_vs_agent --human --map <map> --remote <agent ip> And on the machine the agent plays on: $ python -m pysc2.bin.play_vs_agent --agent <import path> The `--remote` arg is used to create an SSH tunnel to the remote agent's machine, so can be dropped if it's running on the same machine. SC2 is limited to only allow LAN games on localhost, so we need to forward the ports between machines. SSH is used to do this with the `--remote` arg. If the agent is on the same machine as the host, this arg can be dropped. SSH doesn't forward UDP, so this also sets up a UDP proxy. As part of that it sets up a TCP server that is also used as a settings server. Note that you won't have an opportunity to give ssh a password, so you must use ssh keys for authentication. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import importlib from absl import logging import platform import sys import time from absl import app from absl import flags import portpicker from pysc2 import maps from pysc2 import run_configs from pysc2.env import lan_sc2_env from pysc2.env import run_loop from pysc2.env import sc2_env from pysc2.lib import renderer_human from s2clientprotocol import sc2api_pb2 as sc_pb FLAGS = flags.FLAGS flags.DEFINE_bool("render", platform.system() == "Linux", "Whether to render with pygame.") flags.DEFINE_bool("realtime", False, "Whether to run in realtime mode.") flags.DEFINE_string("agent", "pysc2.agents.random_agent.RandomAgent", "Which agent to run, as a python path to an Agent class.") flags.DEFINE_enum("agent_race", "random", sc2_env.Race._member_names_, # pylint: disable=protected-access "Agent's race.") flags.DEFINE_float("fps", 22.4, "Frames per second to run the game.") flags.DEFINE_integer("step_mul", 8, "Game steps per agent step.") flags.DEFINE_integer("feature_screen_size", 84, "Resolution for screen feature layers.") flags.DEFINE_integer("feature_minimap_size", 64, "Resolution for minimap feature layers.") flags.DEFINE_integer("rgb_screen_size", 256, "Resolution for rendered screen.") flags.DEFINE_integer("rgb_minimap_size", 128, "Resolution for rendered minimap.") flags.DEFINE_enum("action_space", "FEATURES", sc2_env.ActionSpace._member_names_, # pylint: disable=protected-access "Which action space to use. Needed if you take both feature " "and rgb observations.") flags.DEFINE_bool("use_feature_units", False, "Whether to include feature units.") flags.DEFINE_enum("user_race", "random", sc2_env.Race._member_names_, # pylint: disable=protected-access "User's race.") flags.DEFINE_string("host", "127.0.0.1", "Game Host. Can be 127.0.0.1 or ::1") flags.DEFINE_integer( "config_port", 14380, "Where to set/find the config port. The host starts a tcp server to share " "the config with the client, and to proxy udp traffic if played over an " "ssh tunnel. This sets that port, and is also the start of the range of " "ports used for LAN play.") flags.DEFINE_string("remote", None, "Where to set up the ssh tunnels to the client.") flags.DEFINE_string("map", None, "Name of a map to use to play.") flags.DEFINE_bool("human", False, "Whether to host a game as a human.") def main(unused_argv): if FLAGS.human: human() else: agent() def agent(): """Run the agent, connecting to a (remote) host started independently.""" agent_module, agent_name = FLAGS.agent.rsplit(".", 1) agent_cls = getattr(importlib.import_module(agent_module), agent_name) logging.info("Starting agent:") with lan_sc2_env.LanSC2Env( host=FLAGS.host, config_port=FLAGS.config_port, race=sc2_env.Race[FLAGS.agent_race], step_mul=FLAGS.step_mul, agent_interface_format=sc2_env.parse_agent_interface_format( feature_screen=FLAGS.feature_screen_size, feature_minimap=FLAGS.feature_minimap_size, rgb_screen=FLAGS.rgb_screen_size, rgb_minimap=FLAGS.rgb_minimap_size, action_space=FLAGS.action_space, use_feature_units=FLAGS.use_feature_units), visualize=FLAGS.render) as env: agents = [agent_cls()] logging.info("Connected, starting run_loop.") try: run_loop.run_loop(agents, env) except lan_sc2_env.RestartException: pass logging.info("Done.") def human(): """Run a host which expects one player to connect remotely.""" run_config = run_configs.get() map_inst = maps.get(FLAGS.map) if not FLAGS.rgb_screen_size or not FLAGS.rgb_minimap_size: logging.info("Use --rgb_screen_size and --rgb_minimap_size if you want rgb " "observations.") ports = [FLAGS.config_port + p for p in range(5)] # tcp + 2 * num_players if not all(portpicker.is_port_free(p) for p in ports): sys.exit("Need 5 free ports after the config port.") proc = None ssh_proc = None tcp_conn = None udp_sock = None try: proc = run_config.start(extra_ports=ports[1:], timeout_seconds=300, host=FLAGS.host, window_loc=(50, 50)) tcp_port = ports[0] settings = { "remote": FLAGS.remote, "game_version": proc.version.game_version, "realtime": FLAGS.realtime, "map_name": map_inst.name, "map_path": map_inst.path, "map_data": map_inst.data(run_config), "ports": { "server": {"game": ports[1], "base": ports[2]}, "client": {"game": ports[3], "base": ports[4]}, } } create = sc_pb.RequestCreateGame( realtime=settings["realtime"], local_map=sc_pb.LocalMap(map_path=settings["map_path"])) create.player_setup.add(type=sc_pb.Participant) create.player_setup.add(type=sc_pb.Participant) controller = proc.controller controller.save_map(settings["map_path"], settings["map_data"]) controller.create_game(create) if FLAGS.remote: ssh_proc = lan_sc2_env.forward_ports( FLAGS.remote, proc.host, [settings["ports"]["client"]["base"]], [tcp_port, settings["ports"]["server"]["base"]]) print("-" * 80) print("Join: play_vs_agent --host %s --config_port %s" % (proc.host, tcp_port)) print("-" * 80) tcp_conn = lan_sc2_env.tcp_server( lan_sc2_env.Addr(proc.host, tcp_port), settings) if FLAGS.remote: udp_sock = lan_sc2_env.udp_server( lan_sc2_env.Addr(proc.host, settings["ports"]["client"]["game"])) lan_sc2_env.daemon_thread( lan_sc2_env.tcp_to_udp, (tcp_conn, udp_sock, lan_sc2_env.Addr(proc.host, settings["ports"]["server"]["game"]))) lan_sc2_env.daemon_thread(lan_sc2_env.udp_to_tcp, (udp_sock, tcp_conn)) join = sc_pb.RequestJoinGame() join.shared_port = 0 # unused join.server_ports.game_port = settings["ports"]["server"]["game"] join.server_ports.base_port = settings["ports"]["server"]["base"] join.client_ports.add(game_port=settings["ports"]["client"]["game"], base_port=settings["ports"]["client"]["base"]) join.race = sc2_env.Race[FLAGS.user_race] if FLAGS.render: join.options.raw = True join.options.score = True if FLAGS.feature_screen_size and FLAGS.feature_minimap_size: fl = join.options.feature_layer fl.width = 24 fl.resolution.x = FLAGS.feature_screen_size fl.resolution.y = FLAGS.feature_screen_size fl.minimap_resolution.x = FLAGS.feature_minimap_size fl.minimap_resolution.y = FLAGS.feature_minimap_size if FLAGS.rgb_screen_size and FLAGS.rgb_minimap_size: join.options.render.resolution.x = FLAGS.rgb_screen_size join.options.render.resolution.y = FLAGS.rgb_screen_size join.options.render.minimap_resolution.x = FLAGS.rgb_minimap_size join.options.render.minimap_resolution.y = FLAGS.rgb_minimap_size controller.join_game(join) if FLAGS.render: renderer = renderer_human.RendererHuman( fps=FLAGS.fps, render_feature_grid=False) renderer.run(run_configs.get(), controller, max_episodes=1) else: # Still step forward so the Mac/Windows renderer works. try: while True: frame_start_time = time.time() if not FLAGS.realtime: controller.step() obs = controller.observe() if obs.player_result: break time.sleep(max(0, frame_start_time - time.time() + 1 / FLAGS.fps)) except KeyboardInterrupt: pass finally: if tcp_conn: tcp_conn.close() if proc: proc.close() if udp_sock: udp_sock.close() if ssh_proc: ssh_proc.terminate() for _ in range(5): if ssh_proc.poll() is not None: break time.sleep(1) if ssh_proc.poll() is None: ssh_proc.kill() ssh_proc.wait() def entry_point(): # Needed so setup.py scripts work. app.run(main) if __name__ == "__main__": app.run(main)
c6756abdb37e1f8b52dd5b35b3118afb8bc40f58
ab825ee0326e98d115b6dc02bbda02b302787d46
/応用編/41_csvファイルの読み書き/モジュール/01_CSVファイルの書き込み.py
bb00d85a58a09d9f502ac4e2f14bf4e26a8d34d9
[]
no_license
holothuria/python_study
295dd7c30a566b5a9688b9196e25bf6e065401a0
7e98090e64d646d23a4189e0efd68c2905b78d04
refs/heads/master
2020-03-23T20:04:38.900368
2019-03-05T12:47:53
2019-03-05T12:47:53
142,019,995
0
0
null
null
null
null
UTF-8
Python
false
false
336
py
import csv csv_file = open('./python.csv', 'w', newline='') writer = csv.writer(csv_file) row = ('python', '-', 'izm', '1') writer.writerow(row) rows = [] rows.append(('python', '-', 'izm', '2')) rows.append(('python', '-', 'izm', '3')) rows.append(('p,y,t,h,o,n', '-', 'i,z,m', '4')) writer.writerows(rows) csv_file.close()
992f5347a46a745fa991942a9bdb04ea5c918b52
b3d86713ed58e0b7fe3c1191324e36659c0d9d78
/DataScience/ch10/evaluation_data.py
94d29f7149b5f9ebae13989f4ba23eb44c469612
[]
no_license
Kose-i/machine_learning_tutorial
3d6cb30a20d65c66aa6efcba0e693de75791507a
def223fecb459ad1a6e7f9f36b3d733a89efd378
refs/heads/master
2021-07-03T10:37:26.809388
2020-07-27T12:53:19
2020-07-27T12:53:19
174,057,143
0
0
null
null
null
null
UTF-8
Python
false
false
1,989
py
import numpy as np import numpy.random as random import scipy as sp from pandas import Series, DataFrame import pandas as pd import matplotlib.pyplot as plt import matplotlib as mpl import seaborn as sns sns.set() import sklearn from sklearn.datasets import load_breast_cancer from sklearn.tree import DecisionTreeClassifier from sklearn.model_selection import cross_val_score cancer = load_breast_cancer() x_train, x_test, y_train, y_test = train_test_split(cancer.data, cancer.target, test_size=0.5, random_state=66) tree = DecisionTreeClassifier(criterion='entropy', max_depth=3, random_state=0) scores = cross_val_score(tree, cancer.data, cancer.target, cv=5) print('Cross validation scores: {}'.format(scores)) print('Cross validation scores: {:.3f}+-{:.3f}'.format(scores.mean(), scores.std())) # AUC ROC from sklearn.linear_model import LogisticRegression model = LogisticRegression(random_state=0) model.fit(x_train, y_train) results = pd.DataFrame(model.predict_proba(X_test), columns=cancer.target_names) results.head() rates = {} for threshold in np.linspace(0.01, 0.99, num=50): labels = results['benign'].map(lambda x: 1 if x>threshold else 0) m = confusion_matrix(y_test, labels) rates[threshold] = {'false positive rate':m[0,1]/m[0,:].sum(), 'true positive rate':m[1,1]/m[1,:].sum()} pd.DataFrame(rates).T.plot.scatter('false positive rate','true positive rate') from sklearn import svm from sklearn.metrics import ros_curve, auc model = svm.SVC(kernel='linear', probability=True, random_state=0) model.fit(x_train, y_train) y_pred = model.predict_proba(X_test)[:,1] fpr, tpr, thresholds = roc_curve(y_test, y_pred) auc = auc(fpr, tpr) plt.plot(fpr, tpr, color='red', label='ROC curve (area=%.3f)'%auc) plt.plot([0,1],[0,1],color='black', linestyle='--') plt.xlim([0.0, 1.0]) plt.ylim([0.0, 1.05]) plt.xlabel('False positive rate') plt.xlabel('True positive rate') plt.title('Receiver operating characteristic') plt.legend(loc="best") plt.show()
e86714e61dde836eeef750f7a800b133850db443
acad69f0abe162eea0cb13cbe15bfd88f6da08b4
/down-stream-tasks/mmdetection/mmdet/models/losses/ghm_loss.py
bc5eb774eab3c7515868c266182760c92061c911
[ "Apache-2.0" ]
permissive
zhangzjn/EMO
69afcac53800d8b9a390f1214e178e2ca4da3b24
141afbdbce04683790f0699f256327ec420be442
refs/heads/main
2023-08-27T19:04:23.313676
2023-08-15T04:09:55
2023-08-15T04:09:55
584,987,542
139
9
null
null
null
null
UTF-8
Python
false
false
8,136
py
# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn import torch.nn.functional as F from ..builder import LOSSES from .utils import weight_reduce_loss def _expand_onehot_labels(labels, label_weights, label_channels): bin_labels = labels.new_full((labels.size(0), label_channels), 0) inds = torch.nonzero( (labels >= 0) & (labels < label_channels), as_tuple=False).squeeze() if inds.numel() > 0: bin_labels[inds, labels[inds]] = 1 bin_label_weights = label_weights.view(-1, 1).expand( label_weights.size(0), label_channels) return bin_labels, bin_label_weights # TODO: code refactoring to make it consistent with other losses @LOSSES.register_module() class GHMC(nn.Module): """GHM Classification Loss. Details of the theorem can be viewed in the paper `Gradient Harmonized Single-stage Detector <https://arxiv.org/abs/1811.05181>`_. Args: bins (int): Number of the unit regions for distribution calculation. momentum (float): The parameter for moving average. use_sigmoid (bool): Can only be true for BCE based loss now. loss_weight (float): The weight of the total GHM-C loss. reduction (str): Options are "none", "mean" and "sum". Defaults to "mean" """ def __init__(self, bins=10, momentum=0, use_sigmoid=True, loss_weight=1.0, reduction='mean'): super(GHMC, self).__init__() self.bins = bins self.momentum = momentum edges = torch.arange(bins + 1).float() / bins self.register_buffer('edges', edges) self.edges[-1] += 1e-6 if momentum > 0: acc_sum = torch.zeros(bins) self.register_buffer('acc_sum', acc_sum) self.use_sigmoid = use_sigmoid if not self.use_sigmoid: raise NotImplementedError self.loss_weight = loss_weight self.reduction = reduction def forward(self, pred, target, label_weight, reduction_override=None, **kwargs): """Calculate the GHM-C loss. Args: pred (float tensor of size [batch_num, class_num]): The direct prediction of classification fc layer. target (float tensor of size [batch_num, class_num]): Binary class target for each sample. label_weight (float tensor of size [batch_num, class_num]): the value is 1 if the sample is valid and 0 if ignored. reduction_override (str, optional): The reduction method used to override the original reduction method of the loss. Defaults to None. Returns: The gradient harmonized loss. """ assert reduction_override in (None, 'none', 'mean', 'sum') reduction = ( reduction_override if reduction_override else self.reduction) # the target should be binary class label if pred.dim() != target.dim(): target, label_weight = _expand_onehot_labels( target, label_weight, pred.size(-1)) target, label_weight = target.float(), label_weight.float() edges = self.edges mmt = self.momentum weights = torch.zeros_like(pred) # gradient length g = torch.abs(pred.sigmoid().detach() - target) valid = label_weight > 0 tot = max(valid.float().sum().item(), 1.0) n = 0 # n valid bins for i in range(self.bins): inds = (g >= edges[i]) & (g < edges[i + 1]) & valid num_in_bin = inds.sum().item() if num_in_bin > 0: if mmt > 0: self.acc_sum[i] = mmt * self.acc_sum[i] \ + (1 - mmt) * num_in_bin weights[inds] = tot / self.acc_sum[i] else: weights[inds] = tot / num_in_bin n += 1 if n > 0: weights = weights / n loss = F.binary_cross_entropy_with_logits( pred, target, reduction='none') loss = weight_reduce_loss( loss, weights, reduction=reduction, avg_factor=tot) return loss * self.loss_weight # TODO: code refactoring to make it consistent with other losses @LOSSES.register_module() class GHMR(nn.Module): """GHM Regression Loss. Details of the theorem can be viewed in the paper `Gradient Harmonized Single-stage Detector <https://arxiv.org/abs/1811.05181>`_. Args: mu (float): The parameter for the Authentic Smooth L1 loss. bins (int): Number of the unit regions for distribution calculation. momentum (float): The parameter for moving average. loss_weight (float): The weight of the total GHM-R loss. reduction (str): Options are "none", "mean" and "sum". Defaults to "mean" """ def __init__(self, mu=0.02, bins=10, momentum=0, loss_weight=1.0, reduction='mean'): super(GHMR, self).__init__() self.mu = mu self.bins = bins edges = torch.arange(bins + 1).float() / bins self.register_buffer('edges', edges) self.edges[-1] = 1e3 self.momentum = momentum if momentum > 0: acc_sum = torch.zeros(bins) self.register_buffer('acc_sum', acc_sum) self.loss_weight = loss_weight self.reduction = reduction # TODO: support reduction parameter def forward(self, pred, target, label_weight, avg_factor=None, reduction_override=None): """Calculate the GHM-R loss. Args: pred (float tensor of size [batch_num, 4 (* class_num)]): The prediction of box regression layer. Channel number can be 4 or 4 * class_num depending on whether it is class-agnostic. target (float tensor of size [batch_num, 4 (* class_num)]): The target regression values with the same size of pred. label_weight (float tensor of size [batch_num, 4 (* class_num)]): The weight of each sample, 0 if ignored. reduction_override (str, optional): The reduction method used to override the original reduction method of the loss. Defaults to None. Returns: The gradient harmonized loss. """ assert reduction_override in (None, 'none', 'mean', 'sum') reduction = ( reduction_override if reduction_override else self.reduction) mu = self.mu edges = self.edges mmt = self.momentum # ASL1 loss diff = pred - target loss = torch.sqrt(diff * diff + mu * mu) - mu # gradient length g = torch.abs(diff / torch.sqrt(mu * mu + diff * diff)).detach() weights = torch.zeros_like(g) valid = label_weight > 0 tot = max(label_weight.float().sum().item(), 1.0) n = 0 # n: valid bins for i in range(self.bins): inds = (g >= edges[i]) & (g < edges[i + 1]) & valid num_in_bin = inds.sum().item() if num_in_bin > 0: n += 1 if mmt > 0: self.acc_sum[i] = mmt * self.acc_sum[i] \ + (1 - mmt) * num_in_bin weights[inds] = tot / self.acc_sum[i] else: weights[inds] = tot / num_in_bin if n > 0: weights /= n loss = weight_reduce_loss( loss, weights, reduction=reduction, avg_factor=tot) return loss * self.loss_weight
e7a6d17a22f5e9f8531f0d8e0c576cff70c8e1eb
3474fd7e1ccd8dd4e0b4da5c67d89694c69ce73c
/batch3/outputs/bbn_yhe.py
2d75b05e4272c8e13788d9378f35e7fd0e971249
[]
no_license
mishakb/ISiTGR
149e1235ed2fff6ee2452b53c23dbe46f5dcf17e
db4f6fed47162de6347b62b3f8ae832e4ffbfdf0
refs/heads/master
2023-01-16T02:42:31.787483
2021-03-12T04:39:18
2021-03-12T04:39:18
176,977,260
27
7
null
2023-01-02T15:19:33
2019-03-21T15:48:31
HTML
UTF-8
Python
false
false
4,578
py
import planckStyle as s import numpy as np import matplotlib.pyplot as plt import sys sys.path.insert(0, r'c:\work\dist\git\camb') from camb.bbn import BBN_table_interpolator BBNstandard = BBN_table_interpolator('PArthENoPE_880.2_standard.dat') # BBN theoretical error sigma_yp_theo = 0.0003 # resolution of the theoretical BBN curve (number of omega_b values) num_ob = 50 # omegab range in the plot ob_min = 0.019 ob_max = 0.025 # yhe range in the plot yp_min = 0.175 yp_max = 0.28 # helium data: Aver et al. 2015 aver_mean = 0.2449 aver_sigma = 0.004 # helium data: Serenelli and Basu 2010 sere_minus = 0.294 sere_plus = yp_max sere_b = np.zeros(2, dtype='float') sere_y1 = np.zeros(2, dtype='float') sere_y2 = np.zeros(2, dtype='float') sere_b[0] = ob_min sere_b[1] = ob_max sere_y1[0] = sere_minus sere_y1[1] = sere_minus sere_y2[0] = sere_plus sere_y2[1] = sere_plus labels = [s.planckall, s.planckall + "+lensing+BAO"] datatag = [s.defdata_all, s.defdata_all + '_lensing_BAO'] ########### ombh2 -Yhe ############# g = s.getSinglePlotter() colors = g.settings.solid_colors[3:0:-1] del colors[1] bbn_b = np.arange(ob_min, ob_max + 0.1, (ob_max - ob_min) / num_ob) bbn_y = np.array([BBNstandard.Y_p(x, 0) for x in bbn_b]) bbn_y1 = bbn_y - sigma_yp_theo bbn_y2 = bbn_y + sigma_yp_theo g.add_y_bands(aver_mean, aver_sigma) # plt.fill_between(sere_b, sere_y1, yp_max, alpha=0.07, color='gray') # plt.plot(sere_b, sere_y1, alpha=0.2, color='gray', linestyle='-') plt.text(0.0193, 0.249, "Aver et al. (2015)", fontsize=7.) # plt.text(0.0183, 0.325, "Excluded by Serenelli \& Basu (2010)", fontsize=6.5) bbn_y1 = bbn_y - 2 * sigma_yp_theo bbn_y2 = bbn_y + 2 * sigma_yp_theo plt.fill_between(bbn_b, bbn_y1, bbn_y2, alpha=0.4, color='green', lw=0, zorder=10) bbn_y1 = bbn_y - sigma_yp_theo bbn_y2 = bbn_y + sigma_yp_theo plt.fill_between(bbn_b, bbn_y1, bbn_y2, alpha=0.9, color='green', lw=0, zorder=11) # plt.plot(bbn_b, bbn_y1, color='green', linestyle='solid') # plt.plot(bbn_b, bbn_y2, color='green', linestyle='solid') roots = [g.getRoot('yhe', d) for d in datatag] g.settings.legend_fontsize = 8 g.plot_2d(roots, 'omegabh2', 'YpBBN', filled=True, lims=[ob_min + 0.0001, ob_max, yp_min, yp_max]) g.add_legend(labels, legend_loc='lower left', colored_text=False) # plt.gca().set_yticks([0.2, 0.25, 0.3]) plt.gca().annotate('Standard BBN', xy=(0.0242, 0.249), xycoords='data', xytext=(-35, -30), textcoords='offset points', arrowprops=dict(arrowstyle="->", connectionstyle="arc3,rad=.2"), fontsize=8. ) g.export() ########### Neff -Yhe ############# g = s.getSinglePlotter() N_min = 0.01 N_max = 5 Neff = np.arange(N_min, N_max + 0.1, 0.1) Nrange = [N_min, N_max] g.add_y_bands(aver_mean, aver_sigma) plt.fill_between(Nrange, Neff[-1], sere_y1, alpha=0.07, color='gray') plt.plot(Nrange, sere_y1, alpha=0.2, color='gray', linestyle='-') plt.text(0.17, 0.242, "Aver et al. (2015)", fontsize=6) plt.text(0.17, 0.337, "Excluded by Serenelli \& Basu (2010)", fontsize=6) roots = [g.getRoot('nnu_yhe', d) for d in datatag] # roots += ['base_nnu_yhe_' + s.defdata_all + '_Aver15'] g.plot_2d(roots, 'nnu', 'YpBBN', filled=True, lims=[0, N_max, yp_min, yp_max]) g.add_2d_contours('base_nnu_yhe_' + s.defdata_all + '_Aver15_post_BAO_lensing', 'nnu', 'YpBBN', filled=False) ombh2mean = 0.0224 bbn_y = np.array([BBNstandard.Y_p(ombh2mean, x - 3.046) for x in Neff]) bbn_y1 = bbn_y - 2 * sigma_yp_theo bbn_y2 = bbn_y + 2 * sigma_yp_theo plt.fill_between(Neff, bbn_y1, bbn_y2, alpha=0.4, color='green', lw=0) bbn_y1 = bbn_y - sigma_yp_theo bbn_y2 = bbn_y + sigma_yp_theo plt.fill_between(Neff, bbn_y1, bbn_y2, alpha=0.9, color='green', lw=0) # plt.plot(Neff, bbn_y1, color='green', linestyle='solid') # plt.plot(Neff, bbn_y2, color='green', linestyle='solid') labels = labels[:1] + ['+lensing+BAO'] g.add_legend(labels, legend_loc='lower left', colored_text=True, fontsize=8) g.add_x_marker(3.046) plt.gca().set_yticks([0.15, 0.2, 0.25, 0.3, 0.35]) # g.rotate_yticklabels() plt.gca().annotate('Standard BBN\n' + r'($\Omega_{\rm b} h^2=0.0224$)', xy=(4.5, 0.262), xycoords='data', xytext=(-44, 30), textcoords='offset points', arrowprops=dict(arrowstyle="->", connectionstyle="arc3,rad=.2"), fontsize=6.5 ) g.export(tag='neff')
048d84b86b4c0b2d6195aab0d20755095d6863f5
b545bc57f3359a42b034078e3acb3e4d0c77a971
/src/azure-firewall/azext_firewall/aaz/latest/network/firewall/policy/_update.py
5e4c29b86003176bc8eddc08a81d5d110e491b50
[ "LicenseRef-scancode-generic-cla", "MIT" ]
permissive
ShichaoQiu/azure-cli-extensions
d91672b3f7bf2ffae4f1072830e99632b66cf754
8134c01681963387a496b5d4627527a5ed044e19
refs/heads/main
2023-08-24T09:09:55.689202
2023-08-15T06:08:35
2023-08-15T06:08:35
230,201,126
0
1
MIT
2020-12-11T07:14:51
2019-12-26T05:33:04
Python
UTF-8
Python
false
false
33,631
py
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # # Code generated by aaz-dev-tools # -------------------------------------------------------------------------------------------- # pylint: skip-file # flake8: noqa from azure.cli.core.aaz import * @register_command( "network firewall policy update", ) class Update(AAZCommand): """Update an Azure firewall policy. """ _aaz_info = { "version": "2022-01-01", "resources": [ ["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.network/firewallpolicies/{}", "2022-01-01"], ] } AZ_SUPPORT_NO_WAIT = True AZ_SUPPORT_GENERIC_UPDATE = True def _handler(self, command_args): super()._handler(command_args) return self.build_lro_poller(self._execute_operations, self._output) _args_schema = None @classmethod def _build_arguments_schema(cls, *args, **kwargs): if cls._args_schema is not None: return cls._args_schema cls._args_schema = super()._build_arguments_schema(*args, **kwargs) # define Arg Group "" _args_schema = cls._args_schema _args_schema.name = AAZStrArg( options=["-n", "--name"], help="The name of the Firewall Policy.", required=True, id_part="name", ) _args_schema.resource_group = AAZResourceGroupNameArg( required=True, ) _args_schema.sku = AAZStrArg( options=["--sku"], help="SKU of Firewall policy.", is_preview=True, nullable=True, enum={"Basic": "Basic", "Premium": "Premium", "Standard": "Standard"}, ) _args_schema.sql = AAZBoolArg( options=["--sql"], help="A flag to indicate if SQL Redirect traffic filtering is enabled.", is_preview=True, nullable=True, ) _args_schema.threat_intel_mode = AAZStrArg( options=["--threat-intel-mode"], help="The operation mode for Threat Intelligence.", nullable=True, enum={"Alert": "Alert", "Deny": "Deny", "Off": "Off"}, ) _args_schema.tags = AAZDictArg( options=["--tags"], help="Space-separated tags: key[=value] [key[=value] ...]. Use \"\" to clear existing tags.", nullable=True, ) tags = cls._args_schema.tags tags.Element = AAZStrArg( nullable=True, ) # define Arg Group "DNS" _args_schema = cls._args_schema _args_schema.enable_dns_proxy = AAZBoolArg( options=["--enable-dns-proxy"], arg_group="DNS", help="Enable DNS Proxy.", nullable=True, ) _args_schema.dns_servers = AAZListArg( options=["--dns-servers"], arg_group="DNS", help="Space-separated list of DNS server IP addresses.", nullable=True, ) dns_servers = cls._args_schema.dns_servers dns_servers.Element = AAZStrArg( nullable=True, ) # define Arg Group "DnsSettings" # define Arg Group "Identity Instance" _args_schema = cls._args_schema _args_schema.identity_type = AAZStrArg( options=["--identity-type"], arg_group="Identity Instance", help="The type of identity used for the resource. The type 'SystemAssigned, UserAssigned' includes both an implicitly created identity and a set of user assigned identities. The type 'None' will remove any identities from the virtual machine.", nullable=True, enum={"None": "None", "SystemAssigned": "SystemAssigned", "SystemAssigned, UserAssigned": "SystemAssigned, UserAssigned", "UserAssigned": "UserAssigned"}, ) _args_schema.user_assigned_identities = AAZDictArg( options=["--user-assigned-identities"], arg_group="Identity Instance", help="The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.", nullable=True, ) user_assigned_identities = cls._args_schema.user_assigned_identities user_assigned_identities.Element = AAZObjectArg( nullable=True, blank={}, ) # define Arg Group "IntrusionDetection" # define Arg Group "Intrustion Detection" _args_schema = cls._args_schema _args_schema.idps_mode = AAZStrArg( options=["--idps-mode"], arg_group="Intrustion Detection", help="IDPS mode.", is_preview=True, nullable=True, enum={"Alert": "Alert", "Deny": "Deny", "Off": "Off"}, ) # define Arg Group "Parameters" # define Arg Group "Properties" # define Arg Group "Snat" _args_schema = cls._args_schema _args_schema.auto_learn_private_ranges = AAZStrArg( options=["--learn-ranges", "--auto-learn-private-ranges"], arg_group="Snat", help="The operation mode for automatically learning private ranges to not be SNAT", nullable=True, enum={"Disabled": "Disabled", "Enabled": "Enabled"}, ) _args_schema.private_ranges = AAZListArg( options=["--private-ranges"], arg_group="Snat", help="List of private IP addresses/IP address ranges to not be SNAT.", nullable=True, ) private_ranges = cls._args_schema.private_ranges private_ranges.Element = AAZStrArg( nullable=True, ) # define Arg Group "TLS Inspection" _args_schema = cls._args_schema _args_schema.key_vault_secret_id = AAZStrArg( options=["--key-vault-secret-id"], arg_group="TLS Inspection", help="Secret Id of (base-64 encoded unencrypted pfx) Secret or Certificate object stored in KeyVault.", is_preview=True, nullable=True, ) _args_schema.cert_name = AAZStrArg( options=["--cert-name"], arg_group="TLS Inspection", help="Name of the CA certificate.", is_preview=True, nullable=True, ) # define Arg Group "Threat Intel Allowlist" _args_schema = cls._args_schema _args_schema.fqdns = AAZListArg( options=["--fqdns"], arg_group="Threat Intel Allowlist", help="Space-separated list of FQDNs.", nullable=True, ) _args_schema.ip_addresses = AAZListArg( options=["--ip-addresses"], arg_group="Threat Intel Allowlist", help="Space-separated list of IPv4 addresses.", nullable=True, ) fqdns = cls._args_schema.fqdns fqdns.Element = AAZStrArg( nullable=True, ) ip_addresses = cls._args_schema.ip_addresses ip_addresses.Element = AAZStrArg( nullable=True, ) return cls._args_schema _args_sub_resource_update = None @classmethod def _build_args_sub_resource_update(cls, _schema): if cls._args_sub_resource_update is not None: _schema.id = cls._args_sub_resource_update.id return cls._args_sub_resource_update = AAZObjectArg( nullable=True, ) sub_resource_update = cls._args_sub_resource_update sub_resource_update.id = AAZStrArg( options=["id"], help="Resource ID.", nullable=True, ) _schema.id = cls._args_sub_resource_update.id def _execute_operations(self): self.pre_operations() self.FirewallPoliciesGet(ctx=self.ctx)() self.pre_instance_update(self.ctx.vars.instance) self.InstanceUpdateByJson(ctx=self.ctx)() self.InstanceUpdateByGeneric(ctx=self.ctx)() self.post_instance_update(self.ctx.vars.instance) yield self.FirewallPoliciesCreateOrUpdate(ctx=self.ctx)() self.post_operations() @register_callback def pre_operations(self): pass @register_callback def post_operations(self): pass @register_callback def pre_instance_update(self, instance): pass @register_callback def post_instance_update(self, instance): pass def _output(self, *args, **kwargs): result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True) return result class FirewallPoliciesGet(AAZHttpOperation): CLIENT_TYPE = "MgmtClient" def __call__(self, *args, **kwargs): request = self.make_request() session = self.client.send_request(request=request, stream=False, **kwargs) if session.http_response.status_code in [200]: return self.on_200(session) return self.on_error(session.http_response) @property def url(self): return self.client.format_url( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/firewallPolicies/{firewallPolicyName}", **self.url_parameters ) @property def method(self): return "GET" @property def error_format(self): return "ODataV4Format" @property def url_parameters(self): parameters = { **self.serialize_url_param( "firewallPolicyName", self.ctx.args.name, required=True, ), **self.serialize_url_param( "resourceGroupName", self.ctx.args.resource_group, required=True, ), **self.serialize_url_param( "subscriptionId", self.ctx.subscription_id, required=True, ), } return parameters @property def query_parameters(self): parameters = { **self.serialize_query_param( "api-version", "2022-01-01", required=True, ), } return parameters @property def header_parameters(self): parameters = { **self.serialize_header_param( "Accept", "application/json", ), } return parameters def on_200(self, session): data = self.deserialize_http_content(session) self.ctx.set_var( "instance", data, schema_builder=self._build_schema_on_200 ) _schema_on_200 = None @classmethod def _build_schema_on_200(cls): if cls._schema_on_200 is not None: return cls._schema_on_200 cls._schema_on_200 = AAZObjectType() _UpdateHelper._build_schema_firewall_policy_read(cls._schema_on_200) return cls._schema_on_200 class FirewallPoliciesCreateOrUpdate(AAZHttpOperation): CLIENT_TYPE = "MgmtClient" def __call__(self, *args, **kwargs): request = self.make_request() session = self.client.send_request(request=request, stream=False, **kwargs) if session.http_response.status_code in [202]: return self.client.build_lro_polling( self.ctx.args.no_wait, session, self.on_200_201, self.on_error, lro_options={"final-state-via": "azure-async-operation"}, path_format_arguments=self.url_parameters, ) if session.http_response.status_code in [200, 201]: return self.client.build_lro_polling( self.ctx.args.no_wait, session, self.on_200_201, self.on_error, lro_options={"final-state-via": "azure-async-operation"}, path_format_arguments=self.url_parameters, ) return self.on_error(session.http_response) @property def url(self): return self.client.format_url( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/firewallPolicies/{firewallPolicyName}", **self.url_parameters ) @property def method(self): return "PUT" @property def error_format(self): return "ODataV4Format" @property def url_parameters(self): parameters = { **self.serialize_url_param( "firewallPolicyName", self.ctx.args.name, required=True, ), **self.serialize_url_param( "resourceGroupName", self.ctx.args.resource_group, required=True, ), **self.serialize_url_param( "subscriptionId", self.ctx.subscription_id, required=True, ), } return parameters @property def query_parameters(self): parameters = { **self.serialize_query_param( "api-version", "2022-01-01", required=True, ), } return parameters @property def header_parameters(self): parameters = { **self.serialize_header_param( "Content-Type", "application/json", ), **self.serialize_header_param( "Accept", "application/json", ), } return parameters @property def content(self): _content_value, _builder = self.new_content_builder( self.ctx.args, value=self.ctx.vars.instance, ) return self.serialize_content(_content_value) def on_200_201(self, session): data = self.deserialize_http_content(session) self.ctx.set_var( "instance", data, schema_builder=self._build_schema_on_200_201 ) _schema_on_200_201 = None @classmethod def _build_schema_on_200_201(cls): if cls._schema_on_200_201 is not None: return cls._schema_on_200_201 cls._schema_on_200_201 = AAZObjectType() _UpdateHelper._build_schema_firewall_policy_read(cls._schema_on_200_201) return cls._schema_on_200_201 class InstanceUpdateByJson(AAZJsonInstanceUpdateOperation): def __call__(self, *args, **kwargs): self._update_instance(self.ctx.vars.instance) def _update_instance(self, instance): _instance_value, _builder = self.new_content_builder( self.ctx.args, value=instance, typ=AAZObjectType ) _builder.set_prop("identity", AAZObjectType) _builder.set_prop("properties", AAZObjectType, typ_kwargs={"flags": {"client_flatten": True}}) _builder.set_prop("tags", AAZDictType, ".tags") identity = _builder.get(".identity") if identity is not None: identity.set_prop("type", AAZStrType, ".identity_type") identity.set_prop("userAssignedIdentities", AAZDictType, ".user_assigned_identities") user_assigned_identities = _builder.get(".identity.userAssignedIdentities") if user_assigned_identities is not None: user_assigned_identities.set_elements(AAZObjectType, ".") properties = _builder.get(".properties") if properties is not None: properties.set_prop("basePolicy", AAZObjectType) properties.set_prop("dnsSettings", AAZObjectType) properties.set_prop("intrusionDetection", AAZObjectType) properties.set_prop("sku", AAZObjectType) properties.set_prop("snat", AAZObjectType) properties.set_prop("sql", AAZObjectType) properties.set_prop("threatIntelMode", AAZStrType, ".threat_intel_mode") properties.set_prop("threatIntelWhitelist", AAZObjectType) properties.set_prop("transportSecurity", AAZObjectType) dns_settings = _builder.get(".properties.dnsSettings") if dns_settings is not None: dns_settings.set_prop("enableProxy", AAZBoolType, ".enable_dns_proxy") dns_settings.set_prop("servers", AAZListType, ".dns_servers") servers = _builder.get(".properties.dnsSettings.servers") if servers is not None: servers.set_elements(AAZStrType, ".") intrusion_detection = _builder.get(".properties.intrusionDetection") if intrusion_detection is not None: intrusion_detection.set_prop("mode", AAZStrType, ".idps_mode") sku = _builder.get(".properties.sku") if sku is not None: sku.set_prop("tier", AAZStrType, ".sku") snat = _builder.get(".properties.snat") if snat is not None: snat.set_prop("autoLearnPrivateRanges", AAZStrType, ".auto_learn_private_ranges") snat.set_prop("privateRanges", AAZListType, ".private_ranges") private_ranges = _builder.get(".properties.snat.privateRanges") if private_ranges is not None: private_ranges.set_elements(AAZStrType, ".") sql = _builder.get(".properties.sql") if sql is not None: sql.set_prop("allowSqlRedirect", AAZBoolType, ".sql") threat_intel_whitelist = _builder.get(".properties.threatIntelWhitelist") if threat_intel_whitelist is not None: threat_intel_whitelist.set_prop("fqdns", AAZListType, ".fqdns") threat_intel_whitelist.set_prop("ipAddresses", AAZListType, ".ip_addresses") fqdns = _builder.get(".properties.threatIntelWhitelist.fqdns") if fqdns is not None: fqdns.set_elements(AAZStrType, ".") ip_addresses = _builder.get(".properties.threatIntelWhitelist.ipAddresses") if ip_addresses is not None: ip_addresses.set_elements(AAZStrType, ".") transport_security = _builder.get(".properties.transportSecurity") if transport_security is not None: transport_security.set_prop("certificateAuthority", AAZObjectType) certificate_authority = _builder.get(".properties.transportSecurity.certificateAuthority") if certificate_authority is not None: certificate_authority.set_prop("keyVaultSecretId", AAZStrType, ".key_vault_secret_id") certificate_authority.set_prop("name", AAZStrType, ".cert_name") tags = _builder.get(".tags") if tags is not None: tags.set_elements(AAZStrType, ".") return _instance_value class InstanceUpdateByGeneric(AAZGenericInstanceUpdateOperation): def __call__(self, *args, **kwargs): self._update_instance_by_generic( self.ctx.vars.instance, self.ctx.generic_update_args ) class _UpdateHelper: """Helper class for Update""" @classmethod def _build_schema_sub_resource_update(cls, _builder): if _builder is None: return _builder.set_prop("id", AAZStrType, ".id") _schema_firewall_policy_read = None @classmethod def _build_schema_firewall_policy_read(cls, _schema): if cls._schema_firewall_policy_read is not None: _schema.etag = cls._schema_firewall_policy_read.etag _schema.id = cls._schema_firewall_policy_read.id _schema.identity = cls._schema_firewall_policy_read.identity _schema.location = cls._schema_firewall_policy_read.location _schema.name = cls._schema_firewall_policy_read.name _schema.properties = cls._schema_firewall_policy_read.properties _schema.tags = cls._schema_firewall_policy_read.tags _schema.type = cls._schema_firewall_policy_read.type return cls._schema_firewall_policy_read = _schema_firewall_policy_read = AAZObjectType() firewall_policy_read = _schema_firewall_policy_read firewall_policy_read.etag = AAZStrType( flags={"read_only": True}, ) firewall_policy_read.id = AAZStrType() firewall_policy_read.identity = AAZObjectType() firewall_policy_read.location = AAZStrType() firewall_policy_read.name = AAZStrType( flags={"read_only": True}, ) firewall_policy_read.properties = AAZObjectType( flags={"client_flatten": True}, ) firewall_policy_read.tags = AAZDictType() firewall_policy_read.type = AAZStrType( flags={"read_only": True}, ) identity = _schema_firewall_policy_read.identity identity.principal_id = AAZStrType( serialized_name="principalId", flags={"read_only": True}, ) identity.tenant_id = AAZStrType( serialized_name="tenantId", flags={"read_only": True}, ) identity.type = AAZStrType() identity.user_assigned_identities = AAZDictType( serialized_name="userAssignedIdentities", ) user_assigned_identities = _schema_firewall_policy_read.identity.user_assigned_identities user_assigned_identities.Element = AAZObjectType() _element = _schema_firewall_policy_read.identity.user_assigned_identities.Element _element.client_id = AAZStrType( serialized_name="clientId", flags={"read_only": True}, ) _element.principal_id = AAZStrType( serialized_name="principalId", flags={"read_only": True}, ) properties = _schema_firewall_policy_read.properties properties.base_policy = AAZObjectType( serialized_name="basePolicy", ) cls._build_schema_sub_resource_read(properties.base_policy) properties.child_policies = AAZListType( serialized_name="childPolicies", flags={"read_only": True}, ) properties.dns_settings = AAZObjectType( serialized_name="dnsSettings", ) properties.explicit_proxy = AAZObjectType( serialized_name="explicitProxy", ) properties.firewalls = AAZListType( flags={"read_only": True}, ) properties.insights = AAZObjectType() properties.intrusion_detection = AAZObjectType( serialized_name="intrusionDetection", ) properties.provisioning_state = AAZStrType( serialized_name="provisioningState", flags={"read_only": True}, ) properties.rule_collection_groups = AAZListType( serialized_name="ruleCollectionGroups", flags={"read_only": True}, ) properties.sku = AAZObjectType() properties.snat = AAZObjectType() properties.sql = AAZObjectType() properties.threat_intel_mode = AAZStrType( serialized_name="threatIntelMode", ) properties.threat_intel_whitelist = AAZObjectType( serialized_name="threatIntelWhitelist", ) properties.transport_security = AAZObjectType( serialized_name="transportSecurity", ) child_policies = _schema_firewall_policy_read.properties.child_policies child_policies.Element = AAZObjectType() cls._build_schema_sub_resource_read(child_policies.Element) dns_settings = _schema_firewall_policy_read.properties.dns_settings dns_settings.enable_proxy = AAZBoolType( serialized_name="enableProxy", ) dns_settings.require_proxy_for_network_rules = AAZBoolType( serialized_name="requireProxyForNetworkRules", nullable=True, ) dns_settings.servers = AAZListType() servers = _schema_firewall_policy_read.properties.dns_settings.servers servers.Element = AAZStrType() explicit_proxy = _schema_firewall_policy_read.properties.explicit_proxy explicit_proxy.enable_explicit_proxy = AAZBoolType( serialized_name="enableExplicitProxy", nullable=True, ) explicit_proxy.enable_pac_file = AAZBoolType( serialized_name="enablePacFile", nullable=True, ) explicit_proxy.http_port = AAZIntType( serialized_name="httpPort", ) explicit_proxy.https_port = AAZIntType( serialized_name="httpsPort", ) explicit_proxy.pac_file = AAZStrType( serialized_name="pacFile", ) explicit_proxy.pac_file_port = AAZIntType( serialized_name="pacFilePort", ) firewalls = _schema_firewall_policy_read.properties.firewalls firewalls.Element = AAZObjectType() cls._build_schema_sub_resource_read(firewalls.Element) insights = _schema_firewall_policy_read.properties.insights insights.is_enabled = AAZBoolType( serialized_name="isEnabled", ) insights.log_analytics_resources = AAZObjectType( serialized_name="logAnalyticsResources", ) insights.retention_days = AAZIntType( serialized_name="retentionDays", ) log_analytics_resources = _schema_firewall_policy_read.properties.insights.log_analytics_resources log_analytics_resources.default_workspace_id = AAZObjectType( serialized_name="defaultWorkspaceId", ) cls._build_schema_sub_resource_read(log_analytics_resources.default_workspace_id) log_analytics_resources.workspaces = AAZListType() workspaces = _schema_firewall_policy_read.properties.insights.log_analytics_resources.workspaces workspaces.Element = AAZObjectType() _element = _schema_firewall_policy_read.properties.insights.log_analytics_resources.workspaces.Element _element.region = AAZStrType() _element.workspace_id = AAZObjectType( serialized_name="workspaceId", ) cls._build_schema_sub_resource_read(_element.workspace_id) intrusion_detection = _schema_firewall_policy_read.properties.intrusion_detection intrusion_detection.configuration = AAZObjectType() intrusion_detection.mode = AAZStrType() configuration = _schema_firewall_policy_read.properties.intrusion_detection.configuration configuration.bypass_traffic_settings = AAZListType( serialized_name="bypassTrafficSettings", ) configuration.private_ranges = AAZListType( serialized_name="privateRanges", ) configuration.signature_overrides = AAZListType( serialized_name="signatureOverrides", ) bypass_traffic_settings = _schema_firewall_policy_read.properties.intrusion_detection.configuration.bypass_traffic_settings bypass_traffic_settings.Element = AAZObjectType() _element = _schema_firewall_policy_read.properties.intrusion_detection.configuration.bypass_traffic_settings.Element _element.description = AAZStrType() _element.destination_addresses = AAZListType( serialized_name="destinationAddresses", ) _element.destination_ip_groups = AAZListType( serialized_name="destinationIpGroups", ) _element.destination_ports = AAZListType( serialized_name="destinationPorts", ) _element.name = AAZStrType() _element.protocol = AAZStrType() _element.source_addresses = AAZListType( serialized_name="sourceAddresses", ) _element.source_ip_groups = AAZListType( serialized_name="sourceIpGroups", ) destination_addresses = _schema_firewall_policy_read.properties.intrusion_detection.configuration.bypass_traffic_settings.Element.destination_addresses destination_addresses.Element = AAZStrType() destination_ip_groups = _schema_firewall_policy_read.properties.intrusion_detection.configuration.bypass_traffic_settings.Element.destination_ip_groups destination_ip_groups.Element = AAZStrType() destination_ports = _schema_firewall_policy_read.properties.intrusion_detection.configuration.bypass_traffic_settings.Element.destination_ports destination_ports.Element = AAZStrType() source_addresses = _schema_firewall_policy_read.properties.intrusion_detection.configuration.bypass_traffic_settings.Element.source_addresses source_addresses.Element = AAZStrType() source_ip_groups = _schema_firewall_policy_read.properties.intrusion_detection.configuration.bypass_traffic_settings.Element.source_ip_groups source_ip_groups.Element = AAZStrType() private_ranges = _schema_firewall_policy_read.properties.intrusion_detection.configuration.private_ranges private_ranges.Element = AAZStrType() signature_overrides = _schema_firewall_policy_read.properties.intrusion_detection.configuration.signature_overrides signature_overrides.Element = AAZObjectType() _element = _schema_firewall_policy_read.properties.intrusion_detection.configuration.signature_overrides.Element _element.id = AAZStrType() _element.mode = AAZStrType() rule_collection_groups = _schema_firewall_policy_read.properties.rule_collection_groups rule_collection_groups.Element = AAZObjectType() cls._build_schema_sub_resource_read(rule_collection_groups.Element) sku = _schema_firewall_policy_read.properties.sku sku.tier = AAZStrType() snat = _schema_firewall_policy_read.properties.snat snat.auto_learn_private_ranges = AAZStrType( serialized_name="autoLearnPrivateRanges", ) snat.private_ranges = AAZListType( serialized_name="privateRanges", ) private_ranges = _schema_firewall_policy_read.properties.snat.private_ranges private_ranges.Element = AAZStrType() sql = _schema_firewall_policy_read.properties.sql sql.allow_sql_redirect = AAZBoolType( serialized_name="allowSqlRedirect", ) threat_intel_whitelist = _schema_firewall_policy_read.properties.threat_intel_whitelist threat_intel_whitelist.fqdns = AAZListType() threat_intel_whitelist.ip_addresses = AAZListType( serialized_name="ipAddresses", ) fqdns = _schema_firewall_policy_read.properties.threat_intel_whitelist.fqdns fqdns.Element = AAZStrType() ip_addresses = _schema_firewall_policy_read.properties.threat_intel_whitelist.ip_addresses ip_addresses.Element = AAZStrType() transport_security = _schema_firewall_policy_read.properties.transport_security transport_security.certificate_authority = AAZObjectType( serialized_name="certificateAuthority", ) certificate_authority = _schema_firewall_policy_read.properties.transport_security.certificate_authority certificate_authority.key_vault_secret_id = AAZStrType( serialized_name="keyVaultSecretId", ) certificate_authority.name = AAZStrType() tags = _schema_firewall_policy_read.tags tags.Element = AAZStrType() _schema.etag = cls._schema_firewall_policy_read.etag _schema.id = cls._schema_firewall_policy_read.id _schema.identity = cls._schema_firewall_policy_read.identity _schema.location = cls._schema_firewall_policy_read.location _schema.name = cls._schema_firewall_policy_read.name _schema.properties = cls._schema_firewall_policy_read.properties _schema.tags = cls._schema_firewall_policy_read.tags _schema.type = cls._schema_firewall_policy_read.type _schema_sub_resource_read = None @classmethod def _build_schema_sub_resource_read(cls, _schema): if cls._schema_sub_resource_read is not None: _schema.id = cls._schema_sub_resource_read.id return cls._schema_sub_resource_read = _schema_sub_resource_read = AAZObjectType() sub_resource_read = _schema_sub_resource_read sub_resource_read.id = AAZStrType() _schema.id = cls._schema_sub_resource_read.id __all__ = ["Update"]
f7246f5b593196ab8c42ab3791fb27a636fa9877
2836c3caf8ca332635640a27254a345afd449081
/nws/FFE/dump_text.py
64ee6d4d5a352d51b778840d80848c276b2e4fec
[ "Apache-2.0", "MIT" ]
permissive
akrherz/DEV
27cf1bac978a0d6bbfba1851b90d2495a3bdcd66
3b1ef5841b25365d9b256467e774f35c28866961
refs/heads/main
2023-08-30T10:02:52.750739
2023-08-29T03:08:01
2023-08-29T03:08:01
65,409,757
2
0
MIT
2023-09-12T03:06:07
2016-08-10T19:16:28
Jupyter Notebook
UTF-8
Python
false
false
1,179
py
"""Dump text from database.""" from pyiem.util import get_dbconn def main(): """Go Main Go.""" pgconn = get_dbconn("postgis") cursor = pgconn.cursor() cursor.execute( "WITH data as (" "SELECT wfo, eventid, issue at time zone 'UTC' as issue, report, " "expire at time zone 'UTC' as expire, " "svs, row_number() OVER (PARTITION by wfo, eventid, " "extract(year from issue) ORDER by length(svs) DESC) from " "warnings where phenomena = 'FF' and significance = 'W' and " "is_emergency) " "SELECT * from data WHERE row_number = 1 ORDER by issue, wfo, eventid" ) done = [] for row in cursor: key = f"{row[0]}_{row[1]}_{row[2].year}" if key in done: continue done.append(key) i = 0 with open(f"FFE_Text/{key}_{i}.txt", "w") as fh: fh.write(row[3]) for prod in ("" if row[5] is None else row[5]).split("__"): if prod.strip() == "": continue i += 1 with open(f"FFE_Text/{key}_{i}.txt", "w") as fh: fh.write(prod) if __name__ == "__main__": main()
b22dd85529c83aa6600650aa488ecfa81392e566
2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae
/python/python_15719.py
68b4f869fb8d6e083ba4f1619ed178f0c4fa912c
[]
no_license
AK-1121/code_extraction
cc812b6832b112e3ffcc2bb7eb4237fd85c88c01
5297a4a3aab3bb37efa24a89636935da04a1f8b6
refs/heads/master
2020-05-23T08:04:11.789141
2015-10-22T19:19:40
2015-10-22T19:19:40
null
0
0
null
null
null
null
UTF-8
Python
false
false
68
py
# PyGame, Surface not showing _display.blit(_active_surface, (h,w))
cf173c4188039fb9f7c03d8041bab18213f9fedf
2dd0082221239fef0e0894c852f70f1eaeb62b9e
/Assignments/pete/python/lab12/lab12-guess_the_number-v5.py
2c9c150c8bd62d806effabdefe672c22f7b08fbf
[]
no_license
pjz987/2019-10-28-fullstack-night
03097cf3dc24aeec0c326044bb0fc99385fbc333
4c643013de73f08d7503d62ec602d6a5c80ffa7e
refs/heads/master
2022-11-11T19:40:00.296645
2020-06-25T16:14:47
2020-06-25T16:14:47
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,139
py
''' lab12-guess_the_number-v5.py V5 Swap the user with the computer: the user will pick a number, and the computer will make random guesses until they get it right. ''' import random import time user_num = int(input("Welcome to Guess the Number v5.\nIn this program, computer guess you!\nPlease enter a number between 1 and 10: ")) while True: if user_num not in list(range(1, 11)): user_num = int(input(f"No cheating computer now. {user_num} is not a number between 1 and 10. Please enter a new number: ")) else: break guesses = [] count = 0 while True: time.sleep(1) guess = random.randint(1, 10) if guess == user_num: print(f"Computer guessed your number {user_num}.\n") time.sleep(1) while True: print("CONGRATULATION: COMPUTER GUESS YOU!" * count) count = count + 1 time.sleep(.25) else: if guess in guesses: print(f"Computer guess {guess} again. Computer can never be too sure.") else: print(f"Computer guessed {guess}. Computer wrong. Computer try again.") guesses.append(guess)
ecad71b97f40fd1c4027e616ed6efc3e283cbe34
2442d073434d463cede4a79ae8f9fd31c62174f8
/object-oriented-programming/composition/address.py
fec0ad8bea031d381431f42455a4d6f1d84773c9
[]
no_license
grbalmeida/hello-python
3630d75cfdde15223dc1c3a714fd562f6cda0505
4d9ddf2f7d104fdbc3aed2c88e50af19a39c1b63
refs/heads/master
2020-07-10T10:04:38.982256
2020-02-26T00:37:36
2020-02-26T00:37:36
204,237,527
0
0
null
null
null
null
UTF-8
Python
false
false
173
py
class Address: def __init__(self, city, state): self.city = city self.state = state def __del__(self): print(f'{self.city}/{self.state} have been deleted')
3fc82b879ed14d4f2cb2742b09238700a3f0c64f
54e23ae20b7351f1e5db325b13cc9a106b3e581a
/transformer/layers.py
723c431493e82ba286dc994ed9bec9d9c528fbf9
[ "Apache-2.0" ]
permissive
okehkim/End-to-End-Speech-Recognition-Models
191755c7fdab23ad61280279e200c9757824c64b
7b4695bbc778e4d2c92470b56e2479c8d81d0079
refs/heads/main
2023-01-30T02:11:57.860303
2020-11-28T16:53:02
2020-11-28T16:53:02
316,779,547
0
0
Apache-2.0
2020-11-28T16:52:27
2020-11-28T16:52:27
null
UTF-8
Python
false
false
4,293
py
# -*- coding: utf-8 -*- # Soohwan Kim @ https://github.com/sooftware/ # This source code is licensed under the Apache 2.0 License license found in the # LICENSE file in the root directory of this source tree. import torch.nn as nn from torch import Tensor from typing import Tuple, Optional, Any from transformer.sublayers import AddNorm, PositionWiseFeedForwardNet from attention import MultiHeadAttention class SpeechTransformerEncoderLayer(nn.Module): """ EncoderLayer is made up of self-attention and feedforward network. This standard encoder layer is based on the paper "Attention Is All You Need". Args: d_model: dimension of model (default: 512) num_heads: number of attention heads (default: 8) d_ff: dimension of feed forward network (default: 2048) dropout_p: probability of dropout (default: 0.3) ffnet_style: style of feed forward network [ff, conv] (default: ff) """ def __init__( self, d_model: int = 512, # dimension of model num_heads: int = 8, # number of attention heads d_ff: int = 2048, # dimension of feed forward network dropout_p: float = 0.3, # probability of dropout ffnet_style: str = 'ff' # style of feed forward network ) -> None: super(SpeechTransformerEncoderLayer, self).__init__() self.self_attention = AddNorm(MultiHeadAttention(d_model, num_heads), d_model) self.feed_forward = AddNorm(PositionWiseFeedForwardNet(d_model, d_ff, dropout_p, ffnet_style), d_model) def forward( self, inputs: Tensor, # B x T_input x D non_pad_mask: Optional[Any] = None, # B x T_input self_attn_mask: Optional[Any] = None # B x T_input x T_output ) -> Tuple[Tensor, Tensor]: output, attn = self.self_attention(inputs, inputs, inputs, self_attn_mask) output *= non_pad_mask output = self.feed_forward(output) output *= non_pad_mask return output, attn class SpeechTransformerDecoderLayer(nn.Module): """ DecoderLayer is made up of self-attention, multi-head attention and feedforward network. This standard decoder layer is based on the paper "Attention Is All You Need". Args: d_model: dimension of model (default: 512) num_heads: number of attention heads (default: 8) d_ff: dimension of feed forward network (default: 2048) dropout_p: probability of dropout (default: 0.3) ffnet_style: style of feed forward network [ff, conv] (default: ff) """ def __init__( self, d_model: int = 512, # dimension of model num_heads: int = 8, # number of attention heads d_ff: int = 2048, # dimension of feed forward network dropout_p: float = 0.3, # probability of dropout ffnet_style: str = 'ff' # style of feed forward network ) -> None: super(SpeechTransformerDecoderLayer, self).__init__() self.self_attention = AddNorm(MultiHeadAttention(d_model, num_heads), d_model) self.memory_attention = AddNorm(MultiHeadAttention(d_model, num_heads), d_model) self.feed_forward = AddNorm(PositionWiseFeedForwardNet(d_model, d_ff, dropout_p, ffnet_style), d_model) def forward( self, inputs: Tensor, # B x T_input memory: Tensor, # B x T_input x D_model non_pad_mask: Optional[Any] = None, # B x T_input self_attn_mask: Optional[Any] = None, # B x T_input x T_input memory_mask: Optional[Any] = None # B x T_input x T_output ) -> Tuple[Tensor, Tensor, Tensor]: output, self_attn = self.self_attention(inputs, inputs, inputs, self_attn_mask) output *= non_pad_mask output, memory_attn = self.memory_attention(output, memory, memory, memory_mask) output *= non_pad_mask output = self.feed_forward(output) output *= non_pad_mask return output, self_attn, memory_attn
232acacec4a343733eb00b2811848b81ae867e9f
e3c8f786d09e311d6ea1cab50edde040bf1ea988
/Incident-Response/Tools/dfirtrack/dfirtrack_main/tests/ip/test_ip_views.py
099cf7ffc43e424cec6cb803e502f9a1d4a4f205
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
foss2cyber/Incident-Playbook
d1add8aec6e28a19e515754c6ce2e524d67f368e
a379a134c0c5af14df4ed2afa066c1626506b754
refs/heads/main
2023-06-07T09:16:27.876561
2021-07-07T03:48:54
2021-07-07T03:48:54
384,988,036
1
0
MIT
2021-07-11T15:45:31
2021-07-11T15:45:31
null
UTF-8
Python
false
false
4,562
py
from django.contrib.auth.models import User from django.test import TestCase from dfirtrack_main.models import Ip import urllib.parse class IpViewTestCase(TestCase): """ ip view tests """ @classmethod def setUpTestData(cls): # create object Ip.objects.create(ip_ip='127.0.0.1') # create user User.objects.create_user(username='testuser_ip', password='pRs9Ap7oc9W0yjLfnP2Y') def test_ip_list_not_logged_in(self): """ test list view """ # create url destination = '/login/?next=' + urllib.parse.quote('/ip/', safe='') # get response response = self.client.get('/ip/', follow=True) # compare self.assertRedirects(response, destination, status_code=302, target_status_code=200) def test_ip_list_logged_in(self): """ test list view """ # login testuser self.client.login(username='testuser_ip', password='pRs9Ap7oc9W0yjLfnP2Y') # get response response = self.client.get('/ip/') # compare self.assertEqual(response.status_code, 200) def test_ip_list_template(self): """ test list view """ # login testuser self.client.login(username='testuser_ip', password='pRs9Ap7oc9W0yjLfnP2Y') # get response response = self.client.get('/ip/') # compare self.assertTemplateUsed(response, 'dfirtrack_main/ip/ip_list.html') def test_ip_list_get_user_context(self): """ test list view """ # login testuser self.client.login(username='testuser_ip', password='pRs9Ap7oc9W0yjLfnP2Y') # get response response = self.client.get('/ip/') # compare self.assertEqual(str(response.context['user']), 'testuser_ip') def test_ip_list_redirect(self): """ test list view """ # login testuser self.client.login(username='testuser_ip', password='pRs9Ap7oc9W0yjLfnP2Y') # create url destination = urllib.parse.quote('/ip/', safe='/') # get response response = self.client.get('/ip', follow=True) # compare self.assertRedirects(response, destination, status_code=301, target_status_code=200) def test_ip_detail_not_logged_in(self): """ test detail view """ # get object ip_1 = Ip.objects.get(ip_ip='127.0.0.1') # create url destination = '/login/?next=' + urllib.parse.quote('/ip/' + str(ip_1.ip_id) + '/', safe='') # get response response = self.client.get('/ip/' + str(ip_1.ip_id) + '/', follow=True) # compare self.assertRedirects(response, destination, status_code=302, target_status_code=200) def test_ip_detail_logged_in(self): """ test detail view """ # get object ip_1 = Ip.objects.get(ip_ip='127.0.0.1') # login testuser self.client.login(username='testuser_ip', password='pRs9Ap7oc9W0yjLfnP2Y') # get response response = self.client.get('/ip/' + str(ip_1.ip_id) + '/') # compare self.assertEqual(response.status_code, 200) def test_ip_detail_template(self): """ test detail view """ # get object ip_1 = Ip.objects.get(ip_ip='127.0.0.1') # login testuser self.client.login(username='testuser_ip', password='pRs9Ap7oc9W0yjLfnP2Y') # get response response = self.client.get('/ip/' + str(ip_1.ip_id) + '/') # compare self.assertTemplateUsed(response, 'dfirtrack_main/ip/ip_detail.html') def test_ip_detail_get_user_context(self): """ test detail view """ # get object ip_1 = Ip.objects.get(ip_ip='127.0.0.1') # login testuser self.client.login(username='testuser_ip', password='pRs9Ap7oc9W0yjLfnP2Y') # get response response = self.client.get('/ip/' + str(ip_1.ip_id) + '/') # compare self.assertEqual(str(response.context['user']), 'testuser_ip') def test_ip_detail_redirect(self): """ test detail view """ # get object ip_1 = Ip.objects.get(ip_ip='127.0.0.1') # login testuser self.client.login(username='testuser_ip', password='pRs9Ap7oc9W0yjLfnP2Y') # create url destination = urllib.parse.quote('/ip/' + str(ip_1.ip_id) + '/', safe='/') # get response response = self.client.get('/ip/' + str(ip_1.ip_id), follow=True) # compare self.assertRedirects(response, destination, status_code=301, target_status_code=200)
1bb2970dbed9a9c8f76d2ed9a6d205330e6218ef
b3858bf912bcdeb6fdf23646d94d2b9cd6e7900a
/Candy Race.py
63c7d4b1575660a3662429db52355fb201d39a0d
[]
no_license
Programmer-Admin/binarysearch-editorials
eedf9e253e85324030260d44e798b0ca13645e63
12815fe3803cf5392ccfaadd38c7f50e882275c1
refs/heads/main
2023-02-06T04:59:25.279318
2020-12-26T20:45:34
2020-12-26T20:45:34
null
0
0
null
null
null
null
UTF-8
Python
false
false
378
py
""" Candy Race Apparently you can solve this in 1ms, but here's a memoized recursive solution for your soul. """ from functools import lru_cache class Solution: def solve(self, candies): @lru_cache(None) def dfs(i,j): if j<i: return 0 return max(candies[i]-dfs(i+1, j), candies[j]-dfs(i,j-1)) return dfs(0, len(candies)-1)>0
6eab525395ba4dccbc34669ccc2adea80f44a930
98e821fe72b711b5d05dbaa7018541a643950291
/edrnsite/collaborations/browser/groupspace.py
ef62ca89f22e62fa55a445b1d324247c5276f794
[ "Apache-2.0" ]
permissive
EDRN/edrnsite.collaborations
e6b0a71a13a6171b9e48de3b8b39979ebb969504
2344b3fe2f60e1079823c688968329010d3c67d5
refs/heads/master
2021-01-18T21:09:51.188644
2018-09-05T14:58:42
2018-09-05T14:58:42
20,818,565
0
0
null
null
null
null
UTF-8
Python
false
false
444
py
# encoding: utf-8 # Copyright 2012 California Institute of Technology. ALL RIGHTS # RESERVED. U.S. Government Sponsorship acknowledged. '''EDRN Site Collaborations: group space view ''' from Products.Five.browser import BrowserView from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile class GroupSpaceView(BrowserView): '''Default view for a Group Space.''' index = ViewPageTemplateFile('templates/groupspace.pt')
dbd85ec4828daa9922789e8a37df4ddb2a4a6b71
e0f4db1f56bce425a1fe53796461b4b77f9f8c72
/src/profiles/migrations/0002_auto_20180820_1928.py
bba3be33fd8426a98ccba6dbec71351398b86fac
[]
no_license
AhmedBafadal/My-Picks
a105feacb8d649ce10cee71d6c4308360e221d94
21a9143119f9933dcebd53c2fd252a2160ab0e58
refs/heads/master
2020-03-26T09:50:05.059463
2018-08-29T18:54:17
2018-08-29T18:54:17
144,767,334
0
0
null
null
null
null
UTF-8
Python
false
false
756
py
# -*- coding: utf-8 -*- # Generated by Django 1.11.2 on 2018-08-20 18:28 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('profiles', '0001_initial'), ] operations = [ migrations.AlterField( model_name='profile', name='followers', field=models.ManyToManyField(blank=True, related_name='is_follower', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='profile', name='following', field=models.ManyToManyField(blank=True, related_name='is_following', to=settings.AUTH_USER_MODEL), ), ]
554020e53113b860b8d0bbd5c0664b43fa6ae211
42a7b34bce1d2968079c6ea034d4e3f7bb5802ad
/ex51/gothonweb/bin/app.py
4cbf3709fa4bf1a85bea1cd83292927e281bb187
[]
no_license
linpan/LPTHW
45c9f11265b5e1ffe0387a56cec192fa12c6c4d5
227bfee3098e8ecb5f07ffc3a0b8e64a853106ce
refs/heads/master
2021-04-26T13:42:56.859644
2014-12-18T15:21:14
2014-12-18T15:21:14
null
0
0
null
null
null
null
UTF-8
Python
false
false
527
py
#! /usr/bin/env python #coding:utf-8 import web urls = ( '/hello','Index' ) app = web.application(urls,globals()) render = web.template.render('/usr/local/LPTHW/ex51/gothonweb/templates/',base="layout") class Index(object): def GET(self): return render.hello_form() def POST(self): form = web.input(name="Nobody",greet="Hello") greeting = "%s,%s" % (form.greet,form.name) return render.index(greeting = greeting) if __name__ == '__main__': app.run()
a8ee7e66a35c09736719e1aa3b92405d4f0be455
3b11dc40c7d772fffeb4d8683e5c9791c41f6454
/addons/product/wizard/product_price_list.py
ae6ce2aea495f5f257f7a39ca139f811bc6bf4f2
[]
no_license
Jacky-odoo/Ecobank
b986352abac9416ab00008a4abaec2b1f1a1f262
5c501bd03a22421f47c76380004bf3d62292f79d
refs/heads/main
2023-03-09T18:10:45.058530
2021-02-25T14:11:12
2021-02-25T14:11:12
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,082
py
# -*- coding: utf-8 -*- # Part of Byte. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models class product_price_list(models.TransientModel): _name = 'product.price_list' _description = 'Price List' price_list = fields.Many2one('product.pricelist', 'PriceList', required=True) qty1 = fields.Integer('Quantity-1', default=1) qty2 = fields.Integer('Quantity-2', default=5) qty3 = fields.Integer('Quantity-3', default=10) qty4 = fields.Integer('Quantity-4', default=0) qty5 = fields.Integer('Quantity-5', default=0) @api.multi def print_report(self): """ To get the date and print the report @return : return report """ datas = {'ids': self.env.context.get('active_ids', [])} res = self.read(['price_list', 'qty1', 'qty2', 'qty3', 'qty4', 'qty5']) res = res and res[0] or {} res['price_list'] = res['price_list'][0] datas['form'] = res return self.env['report'].get_action([], 'product.report_pricelist', data=datas)
3c459ecdeef9297b64145fdb0850bea22bf0034c
ac1bbabc7c1b3149711c416dd8b5f5969a0dbd04
/Programming Basics/conditional_statements/even_odd.py
a2b6079b0ca9b1515287833ff45acf73255d5d9c
[]
no_license
AssiaHristova/SoftUni-Software-Engineering
9e904221e50cad5b6c7953c81bc8b3b23c1e8d24
d4910098ed5aa19770d30a7d9cdf49f9aeaea165
refs/heads/main
2023-07-04T04:47:00.524677
2021-08-08T23:31:51
2021-08-08T23:31:51
324,847,727
1
0
null
2021-08-08T23:31:52
2020-12-27T20:58:01
Python
UTF-8
Python
false
false
75
py
a = int(input()) if a % 2 == 0: print('even') else: print('odd')
ca7b25409a21a310db6153351cc71d886ecd96ad
66c7b0da6ee27ddce0943945503cdecf199f77a2
/hucrl/agent/tests/test_mpc_agent.py
a0b361bc8c294920b8fd24c0a2626d18525bcba2
[ "MIT" ]
permissive
tzahishimkin/extended-hucrl
07609f9e9f9436121bcc64ff3190c966183a2cd9
c144aeecba5f35ccfb4ec943d29d7092c0fa20e3
refs/heads/master
2023-07-09T22:57:28.682494
2021-08-24T08:50:16
2021-08-24T08:50:16
383,819,908
0
0
null
null
null
null
UTF-8
Python
false
false
3,789
py
import copy import os import pytest from rllib.agent import MPCAgent from rllib.algorithms.mpc import CEMShooting, MPPIShooting, RandomShooting from rllib.dataset.experience_replay import ExperienceReplay from rllib.environment import GymEnvironment from rllib.model.environment_model import EnvironmentModel from rllib.util.training.agent_training import evaluate_agent SEED = 0 MAX_ITER = 5 ENVIRONMENT = "VContinuous-CartPole-v0" env = GymEnvironment(ENVIRONMENT, SEED) env_model = copy.deepcopy(env) env_model.reset() dynamical_model = EnvironmentModel(env_model) reward_model = EnvironmentModel(env_model, model_kind="rewards") termination = EnvironmentModel(env_model, model_kind="termination") GAMMA = 0.99 HORIZON = 5 NUM_ITER = 5 NUM_SAMPLES = 50 NUM_ELITES = 5 KAPPA = 1.0 BETAS = [0.2, 0.8, 0] memory = ExperienceReplay(max_len=2000, num_steps=1) value_function = None @pytest.fixture(params=["random_shooting", "cem_shooting", "mppi_shooting"]) def solver(request): return request.param @pytest.fixture(params=[True, False]) def warm_start(request): return request.param @pytest.fixture(params=["mean", "zero", "constant"]) def default_action(request): return request.param @pytest.fixture(params=[1]) def num_cpu(request): return request.param def get_solver(solver_, warm_start_, num_cpu_, default_action_): if solver_ == "random_shooting": mpc_solver = RandomShooting( dynamical_model=dynamical_model, reward_model=reward_model, horizon=HORIZON, gamma=1.0, num_samples=NUM_SAMPLES, num_elites=NUM_ELITES, termination=termination, terminal_reward=value_function, warm_start=warm_start_, default_action=default_action_, num_cpu=num_cpu_, ) elif solver_ == "cem_shooting": mpc_solver = CEMShooting( dynamical_model=dynamical_model, reward_model=reward_model, horizon=HORIZON, gamma=1.0, num_iter=NUM_ITER, num_samples=NUM_SAMPLES, num_elites=NUM_ELITES, termination=termination, terminal_reward=value_function, warm_start=warm_start_, default_action=default_action_, num_cpu=num_cpu_, ) elif solver_ == "mppi_shooting": mpc_solver = MPPIShooting( dynamical_model=dynamical_model, reward_model=reward_model, horizon=HORIZON, gamma=1.0, num_iter=NUM_ITER, kappa=KAPPA, filter_coefficients=BETAS, num_samples=NUM_SAMPLES, termination=termination, terminal_reward=value_function, warm_start=warm_start_, default_action=default_action_, num_cpu=num_cpu_, ) else: raise NotImplementedError return mpc_solver def test_mpc_solvers(solver, num_cpu): if num_cpu > 1 and "CI" in os.environ: return mpc_solver = get_solver(solver, True, num_cpu, "mean") agent = MPCAgent(mpc_solver=mpc_solver) evaluate_agent( agent, environment=env, num_episodes=1, max_steps=MAX_ITER, render=False ) def test_mpc_warm_start(solver, warm_start): mpc_solver = get_solver(solver, warm_start, 1, "mean") agent = MPCAgent(mpc_solver=mpc_solver) evaluate_agent( agent, environment=env, num_episodes=1, max_steps=MAX_ITER, render=False ) def test_mpc_default_action(solver, default_action): mpc_solver = get_solver(solver, True, 1, default_action) agent = MPCAgent(mpc_solver=mpc_solver) evaluate_agent( agent, environment=env, num_episodes=1, max_steps=MAX_ITER, render=False )
71c8a815e9abdcce979147da4c74c9bf207a05b2
ef54d37f8a3303013ca7469871a320d303957ed7
/robo4.2/4.2/lib/python2.7/site-packages/RoboGalaxyLibrary/netconf/ncclient/operations/subscribe.py
f5ed796c96dd96bd4dc9b0046bf76d272b5e79c7
[]
no_license
richa92/Jenkin_Regression_Testing
d18badfcf16bda682dfe7bcbbd66f54a9a27a58d
24a74926170cbdfafa47e972644e2fe5b627d8ff
refs/heads/master
2020-07-12T10:01:59.099137
2019-08-27T12:14:53
2019-08-27T12:14:53
null
0
0
null
null
null
null
UTF-8
Python
false
false
693
py
# Copyright 2009 Shikhar Bhushan # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # TODO class Notification: pass class CreateSubscription: pass class NotificationListener: pass
c079fd6ce0e69dc89478dbf7d7e8926315ba7e7d
94838674ffd175df6194437c1ccc3f90ab409d6c
/pillowV3/log/2018-12-30 14:46:01.690040
d11e65881713fa69fa27304303d67f21dd294fcc
[]
no_license
WojciechKoz/MyFirstNeuralNetwork
4fdb3140d8f02257599d005638598f78055c1ac8
3cd032aba80ecd71edb0286724ae9ba565b75a81
refs/heads/master
2020-04-02T03:02:48.680433
2020-02-29T17:57:43
2020-02-29T17:57:43
153,943,121
0
0
null
null
null
null
UTF-8
Python
false
false
307,917
690040
#!/usr/bin/env python3 # -*- coding: utf8 -*- from __future__ import print_function # new print() on python2 from datetime import datetime import sys import numpy as np from mnist import MNIST # Display full arrays np.set_printoptions(threshold=np.inf) mndata = MNIST('./data') images_full, labels_full = mndata.load_training() images = [] labels = [] # dynamic arguments batch_size = int(sys.argv[1]) size_1 = int(sys.argv[2]) size_2 = int(sys.argv[3]) batch_training_size = int(sys.argv[4]) data_part = 5 # only one fifth of the whole dataset to speed up training for i in range(len(labels_full) // batch_size // data_part): images.append(images_full[i*batch_size : (i+1)*batch_size]) labels.append(labels_full[i*batch_size : (i+1)*batch_size]) def sigmoid_prime(x): return np.exp(-x) / ((np.exp(-x) + 1) ** 2) def sigmoid(x): return 1 / (1 + np.exp(-x)) # nowe, przyda się? def relu(x): return np.maximum(x, x * 0.01) def relu_prime(x): if x >= 0: return 1 # ej nie jest tak xd # a jak xd? type(x) == no.ndarray # no x to macierz xd # np.exp jest przeładowane ale jakakoleiwk funkcja to chyba nie # to co foreach ? :( # właśnie nie wiem, a co z gpu? # to miało być szybsze a nie xd # mamy duzo mozliwosci zmian ale nie na raz trzeba ustalic jakos # hm TODO gpu TODO wincyj procent TODO gui gotowe # xd # tamto myliło hah # to co najpierw? :p # ssh daje wglad do basha tylko tak ? # nie, to jest taki fajny programik, byobu # i ten pasek na dole też jest z byobu # on udostepnia tylko basha ? # tak, ale basha multiplayer xd # szkoda że 2 kursorow nie ma # hm return 0.01 # chyba tak xd nikt nie widzial xd # ale x to macierz :p # ale to jest przeciazone i jak jest funkcja od macierzy to bierze po kolei kazdy element # w sumie # zobacze na drugiej karcie xd #X = np.array([[0, 0], # [0, 1], # [1, 0], # [1, 1]]) #X = np.array(images) y = [] for batch in labels: y.append([]) for label in batch: y[-1].append([1.0 if i == label else 0.0 for i in range(10)]) y = np.array(y) #y = np.array([[0], # [1], # [1], # [0]]) np.random.seed(1) LEN = len(labels) SIZES = [ 784, size_1, size_2, 10 ] syn0 = 2 * np.random.random((SIZES[0], SIZES[1])) - 1 syn1 = 2 * np.random.random((SIZES[1], SIZES[2])) - 1 syn2 = 2 * np.random.random((SIZES[2], SIZES[3])) - 1 # biases for respective layers b0 = 2 * np.random.random((1, SIZES[1])) - 1 b1 = 2 * np.random.random((1, SIZES[2])) - 1 b2 = 2 * np.random.random((1, SIZES[3])) - 1 for i, batch in list(enumerate(images)): X = np.array(batch) print("x:") print(np.shape(X)) print("======================= BATCH {} =======================".format(i)) error = 1 j = 0 while j < batch_training_size: l0 = X l1 = sigmoid(np.dot(l0, syn0) + b0) l2 = sigmoid(np.dot(l1, syn1) + b1) l3 = sigmoid(np.dot(l2, syn2) + b2) l3_error = (y[i] - l3)#** 2 error = np.mean(np.abs(l3_error)) j += 1 if j % 20 == 0: print(("[%d] error: " % j) + str(error)) l3_delta = l3_error * sigmoid_prime(l3) l2_error = l3_delta.dot(syn2.T) l2_delta = l2_error * sigmoid_prime(l2) l1_error = l2_delta.dot(syn1.T) l1_delta = l1_error * sigmoid_prime(l1) syn2 += l2.T.dot(l3_delta) syn1 += l1.T.dot(l2_delta) syn0 += l0.T.dot(l1_delta) b0 += l1_delta.mean(axis=0) b1 += l2_delta.mean(axis=0) b2 += l3_delta.mean(axis=0) def predict(data): l0 = [data] l1 = sigmoid(np.dot(l0, syn0) + b0) l2 = sigmoid(np.dot(l1, syn1) + b1) l3 = sigmoid(np.dot(l2, syn2) + b2) return np.argmax(l3) print("Output after training: ") print(l3) for i, el in enumerate(l3): print(labels[0][i], "=", np.argmax(el), " predictions: ", el) testing_images, testing_labels = mndata.load_testing() correct = 0.0 for i, (image, label) in enumerate(zip(testing_images, testing_labels)): prediction = predict(image) if label == prediction: correct += 1.0 correct_rate = correct / (i + 1.0) print("{} = {} (correct {}%)".format(label, prediction, 100 * correct_rate)) with open('log/' + str(datetime.now()), 'a') as f: with open(__file__, 'r') as myself: print(myself.read(), file=f) print("", file=f) print("#### answers:", file=f) print("argv =", sys.argv, file=f) print("correct_rate =", correct_rate, file=f) print("SIZES =", SIZES, file=f) print("syn0 =", syn0, file=f) print("syn1 =", syn1, file=f) print("syn2 =", syn2, file=f) print("b0 =", b0, file=f) print("b1 =", b1, file=f) print("b2 =", b2, file=f) #### answers: argv = ['./main.py', '62', '22', '30', '24'] correct_rate = 0.3595 SIZES = [784, 22, 30, 10] syn0 = [[-1.65955991e-01 4.40648987e-01 -9.99771250e-01 -3.95334855e-01 -7.06488218e-01 -8.15322810e-01 -6.27479577e-01 -3.08878546e-01 -2.06465052e-01 7.76334680e-02 -1.61610971e-01 3.70439001e-01 -5.91095501e-01 7.56234873e-01 -9.45224814e-01 3.40935020e-01 -1.65390395e-01 1.17379657e-01 -7.19226123e-01 -6.03797022e-01 6.01489137e-01 9.36523151e-01] [-3.73151644e-01 3.84645231e-01 7.52778305e-01 7.89213327e-01 -8.29911577e-01 -9.21890434e-01 -6.60339161e-01 7.56285007e-01 -8.03306332e-01 -1.57784750e-01 9.15779060e-01 6.63305699e-02 3.83754228e-01 -3.68968738e-01 3.73001855e-01 6.69251344e-01 -9.63423445e-01 5.00288630e-01 9.77722178e-01 4.96331309e-01 -4.39112016e-01 5.78558657e-01] [-7.93547987e-01 -1.04212948e-01 8.17191006e-01 -4.12771703e-01 -4.24449323e-01 -7.39942856e-01 -9.61266084e-01 3.57671066e-01 -5.76743768e-01 -4.68906681e-01 -1.68536814e-02 -8.93274910e-01 1.48235211e-01 -7.06542850e-01 1.78611074e-01 3.99516720e-01 -7.95331142e-01 -1.71888024e-01 3.88800315e-01 -1.71641461e-01 -9.00093082e-01 7.17928118e-02] [ 3.27589290e-01 2.97782241e-02 8.89189512e-01 1.73110081e-01 8.06803831e-01 -7.25050592e-01 -7.21447305e-01 6.14782577e-01 -2.04646326e-01 -6.69291606e-01 8.55017161e-01 -3.04468281e-01 5.01624206e-01 4.51995971e-01 7.66612182e-01 2.47344414e-01 5.01884868e-01 -3.02203316e-01 -4.60144216e-01 7.91772436e-01 -1.43817620e-01 9.29680094e-01] [ 3.26882996e-01 2.43391440e-01 -7.70508054e-01 8.98978517e-01 -1.00175733e-01 1.56779229e-01 -1.83726394e-01 -5.25946040e-01 8.06759041e-01 1.47358973e-01 -9.94259346e-01 2.34289827e-01 -3.46710196e-01 5.41162045e-02 7.71884199e-01 -2.85460480e-01 8.17070302e-01 2.46720232e-01 -9.68357514e-01 8.58874467e-01 3.81793835e-01 9.94645701e-01] [-6.55318983e-01 -7.25728501e-01 8.65190926e-01 3.93636323e-01 -8.67999655e-01 5.10926105e-01 5.07752377e-01 8.46049071e-01 4.23049517e-01 -7.51458076e-01 -9.60239732e-01 -9.47578026e-01 -9.43387024e-01 -5.07577865e-01 7.20055897e-01 7.76621287e-02 1.05643957e-01 6.84061785e-01 -7.51653370e-01 -4.41632642e-01 1.71518543e-01 9.39191497e-01] [ 1.22060439e-01 -9.62705421e-01 6.01265345e-01 -5.34051452e-01 6.14210391e-01 -2.24278712e-01 7.27083709e-01 4.94243285e-01 1.12480468e-01 -7.27089549e-01 -8.80164621e-01 -7.57313089e-01 -9.10896243e-01 -7.85011742e-01 -5.48581323e-01 4.25977961e-01 1.19433964e-01 -9.74888040e-01 -8.56051441e-01 9.34552660e-01 1.36200924e-01 -5.93413531e-01] [-4.95348511e-01 4.87651708e-01 -6.09141038e-01 1.62717855e-01 9.40039978e-01 6.93657603e-01 -5.20304482e-01 -1.24605715e-02 2.39911437e-01 6.57961799e-01 -6.86417211e-01 -9.62847596e-01 -8.59955713e-01 -2.73097781e-02 2.12658923e-01 1.37702874e-01 -3.65275181e-01 9.77232309e-01 1.59490438e-01 -2.39717655e-01 1.01896438e-01 4.90668862e-01] [ 3.38465787e-01 -4.70160885e-01 -8.67330331e-01 -2.59831604e-01 2.59435014e-01 -5.79651980e-01 5.05511107e-01 -8.66927037e-01 -4.79369803e-01 6.09509127e-01 -6.13131435e-01 2.78921762e-01 4.93406182e-02 8.49615941e-01 -4.73406459e-01 -8.68077819e-01 4.70131927e-01 5.44356059e-01 8.15631705e-01 8.63944138e-01 -9.72096854e-01 -5.31275828e-01] [ 2.33556714e-01 8.98032641e-01 9.00352238e-01 1.13306376e-01 8.31212700e-01 2.83132418e-01 -2.19984572e-01 -2.80186658e-02 2.08620966e-01 9.90958430e-02 8.52362853e-01 8.37466871e-01 -2.10248774e-01 9.26525057e-01 -6.52088667e-01 -7.47340961e-01 -7.29841684e-01 1.13243314e-02 -9.56950389e-01 8.95940422e-01 6.54230942e-01 -9.69962039e-01] [-6.47607489e-01 -3.35872851e-01 -7.38006310e-01 6.18981384e-01 -3.10526695e-01 8.80214965e-01 1.64028360e-01 7.57663969e-01 6.89468891e-01 8.10784637e-01 -8.02394684e-02 9.26936320e-02 5.97207182e-01 -4.28562297e-01 -1.94929548e-02 1.98220615e-01 -9.68933449e-01 1.86962816e-01 -1.32647302e-01 6.14721058e-01 -3.69510394e-01 7.85777417e-01] [ 1.55714431e-01 -6.31979597e-01 5.75858468e-01 2.24062354e-01 -8.92181456e-01 -1.59612640e-01 3.58137673e-01 8.37203556e-01 -9.99195950e-01 9.53518298e-01 -2.46839371e-01 9.47567077e-01 2.09432202e-01 6.57691616e-01 1.49423009e-01 2.56152397e-01 -4.28847437e-01 1.73666681e-01 5.00043527e-01 7.16627673e-01 5.10164377e-01 3.96114497e-01] [ 7.28958860e-01 -3.54638006e-01 3.41577582e-01 -9.82521272e-02 -2.35794496e-01 -1.78377300e-01 -1.97040833e-01 -3.65232108e-01 2.43838736e-01 -1.39505458e-01 9.47604156e-01 3.55601783e-01 -6.02860223e-01 -1.46597981e-01 -3.13307520e-01 5.95277608e-01 7.59996577e-01 8.07683912e-01 3.25439625e-01 -4.59583476e-01 -4.95266597e-01 7.09795885e-01] [ 5.54292926e-02 6.04322168e-01 1.44977034e-01 4.66285051e-01 3.80232549e-02 5.41767821e-01 1.37715981e-01 -6.85802428e-02 -3.14622184e-01 -8.63581303e-01 -2.44151641e-01 -8.40747845e-01 9.65634227e-01 -6.36774297e-01 6.23717395e-01 7.49923290e-01 3.76826505e-01 1.38988825e-01 -6.78057126e-01 -6.62399545e-02 -3.09655898e-01 -5.49920084e-01] [ 1.85023738e-01 -3.75460325e-01 8.32611107e-01 8.19271050e-01 -4.85763412e-01 -7.78217399e-01 -6.14074536e-01 -8.31658642e-04 4.57171336e-01 -5.83611123e-01 -5.03932883e-01 7.03343750e-01 -1.68302563e-01 2.33370134e-01 -5.32667722e-01 -7.96065481e-01 3.17140339e-02 -4.57180259e-02 -6.94656712e-01 2.43612463e-01 8.80202376e-02 3.08274694e-01] [-7.10908920e-01 5.03055634e-01 -5.55901720e-01 3.87036487e-02 5.70592056e-01 -9.55339144e-01 -3.51275081e-01 7.45844753e-01 6.89419215e-01 7.68811852e-02 7.33216548e-01 8.99611983e-01 6.52813995e-01 7.08230888e-01 -8.02513196e-01 3.02608665e-01 4.07033976e-01 2.20481625e-01 5.99230523e-01 -9.30857560e-01 5.40477469e-01 4.63457201e-01] [-4.80603213e-01 -4.85861402e-01 2.64606635e-01 -3.09405077e-01 5.93177356e-01 -1.07707536e-01 5.65498830e-01 9.80943567e-01 -3.99503321e-01 -7.13988343e-01 8.02616873e-01 8.31187578e-02 9.49480742e-01 2.73208800e-01 9.87826049e-01 9.21416083e-02 5.28518678e-02 -7.29144194e-01 -2.88589658e-01 -9.47562865e-01 -6.79209641e-01 4.91274385e-01] [-9.39200620e-01 -2.66913806e-01 7.24692506e-01 3.85355435e-01 3.81884284e-01 -6.22726398e-01 -1.16191439e-01 1.63154815e-01 9.79503415e-01 -5.92187550e-01 -5.04534196e-01 -4.75653832e-01 5.00344827e-01 -8.60493451e-02 -8.86141123e-01 1.70324812e-02 -5.76079671e-01 5.97208490e-01 -4.05337237e-01 -9.44787976e-01 1.86864899e-01 6.87680858e-01] [-2.37967752e-01 4.99716621e-01 2.22829566e-02 8.19036099e-02 9.18868642e-01 6.07921783e-01 -9.35353867e-01 4.18774502e-01 -6.99970369e-02 8.95097883e-01 -5.57134531e-01 -4.65855961e-01 -8.37052070e-01 -1.42762343e-01 -7.81962472e-01 2.67573521e-01 6.05926475e-01 3.93600992e-01 5.32422762e-01 -3.15091760e-01 6.91702966e-01 -1.42462450e-01] [ 6.48019741e-01 2.52992317e-01 -7.13153903e-01 -8.43226200e-01 -9.63334714e-01 -8.66550005e-01 -8.28323726e-02 -7.73316154e-01 -9.44433302e-01 5.09722963e-01 -2.10299039e-01 4.93876991e-01 -9.51903465e-02 -9.98265060e-02 -4.38549866e-02 -5.19921469e-02 6.06326684e-01 -1.95214960e-01 8.09372321e-01 -9.25877904e-01 5.47748685e-01 -7.48717238e-01] [ 2.37027134e-01 -9.79271477e-01 7.72545652e-02 -9.93964087e-01 9.02387571e-01 8.10804067e-01 5.91933884e-01 8.30548640e-01 -7.08883538e-01 -6.84539860e-01 -6.24736654e-01 2.44991805e-01 8.11618992e-01 9.79910357e-01 4.22244918e-01 4.63600818e-01 8.18586409e-01 -1.98252535e-01 -5.00298640e-01 -6.53139658e-01 -7.61085899e-01 6.25221176e-01] [-7.06415253e-01 -4.71405035e-01 6.38178357e-01 -3.78825496e-01 9.64834899e-01 -4.66722596e-01 6.73066899e-02 -3.71065978e-01 8.21545662e-01 -2.66886712e-01 -1.32815345e-01 2.45853846e-02 8.77772955e-01 -9.38101987e-01 4.33757327e-01 7.82037909e-01 -9.45425553e-01 4.41024945e-02 -3.48020376e-01 7.18978642e-01 1.17033102e-01 3.80455736e-01] [-9.42930001e-02 2.56618075e-01 -4.19806297e-01 -9.81302844e-01 1.53511870e-01 -3.77111572e-01 3.45351970e-02 8.32811706e-01 -1.47050423e-01 -5.05207927e-01 -2.57412477e-01 8.63722233e-01 8.73736763e-01 6.88659897e-01 8.40413029e-01 -5.44199420e-01 -8.25035581e-01 -5.45380527e-01 -3.71246768e-01 -6.50468247e-01 2.14188324e-01 -1.72827170e-01] [ 6.32703024e-01 -6.29739203e-01 4.03753060e-01 -5.19288750e-01 1.48438178e-01 -3.02024806e-01 -8.86071201e-01 -5.42372658e-01 3.28205111e-01 -5.49981328e-03 3.80319681e-02 -6.50559700e-01 1.41431703e-01 9.93506850e-01 6.33670218e-01 1.88745248e-01 9.51978137e-01 8.03125169e-01 1.91215867e-01 -9.35147349e-01 -8.12845808e-01 -8.69256570e-01] [-9.65337026e-02 -2.49130334e-01 9.50700069e-01 -6.64033414e-01 9.45575184e-01 5.34949738e-01 6.48475679e-01 2.65231634e-01 3.37465540e-01 -4.62353330e-02 -9.73727286e-01 -2.93987829e-01 -1.58563970e-02 4.60182422e-01 -6.27433145e-02 -8.51901678e-02 -7.24674518e-01 -9.78222532e-01 5.16556521e-01 -3.60094324e-01 9.68766900e-01 -5.59531548e-01] [-3.22583949e-01 4.77922713e-02 5.09782914e-01 -7.22844322e-02 -7.50354914e-01 -3.74997243e-01 9.03833940e-03 3.47698016e-01 5.40299913e-01 -7.39328438e-01 -9.54169737e-01 3.81646444e-02 6.19977421e-01 -9.74792466e-01 3.44939689e-01 3.73616453e-01 -1.01506493e-01 8.29577373e-01 2.88722170e-01 -9.89520325e-01 -3.11431090e-02 7.18635612e-01] [ 6.60799140e-01 2.98308394e-01 3.47396848e-01 1.56999160e-01 -4.51760450e-01 1.21059981e-01 3.43459570e-01 -2.95140740e-01 7.11656735e-01 -6.09925028e-01 4.94641621e-01 -4.20794508e-01 5.47598574e-01 -1.44525341e-01 6.15396818e-01 -2.92930275e-01 -5.72613525e-01 5.34569017e-01 -3.82716105e-01 4.66490135e-01 4.88946306e-01 -5.57206598e-01] [-5.71775726e-01 -6.02104153e-01 -7.14963324e-01 -2.45834802e-01 -9.46744231e-01 -7.78159262e-01 3.49128048e-01 5.99553074e-01 -8.38940946e-01 -5.36595379e-01 -5.84748676e-01 8.34667126e-01 4.22629036e-01 1.07769222e-01 -3.90964024e-01 6.69708095e-01 -1.29388085e-01 8.46912430e-01 4.12103609e-01 -4.39373841e-02 -7.47579793e-01 9.52087101e-01] [-6.80332699e-01 -5.94795750e-01 -1.37636490e-01 -1.91596188e-01 -7.06497038e-01 4.58637839e-01 -6.22509866e-01 2.87791289e-01 5.08611901e-01 -5.78535216e-01 2.01908496e-01 4.97856750e-01 2.76437421e-01 1.94254606e-01 -4.09035429e-01 4.63212942e-01 8.90616880e-01 -1.48877219e-01 5.64363634e-01 -8.87717921e-01 6.70543205e-01 -6.15499966e-01] [-2.09806262e-01 -3.99837908e-01 -8.39792712e-01 8.09262006e-01 -2.59691645e-01 6.13948770e-02 -1.17674682e-02 -7.35677716e-01 -5.87091882e-01 -8.47622382e-01 1.58433999e-02 -4.76900896e-01 -2.85876782e-01 -7.83869343e-01 5.75103679e-01 -7.86832246e-01 9.71417647e-01 -6.45677671e-01 1.44810225e-01 -9.10309331e-01 5.74232579e-01 -6.20788104e-01] [ 5.58079568e-02 4.80155086e-01 -7.00137030e-01 1.02174348e-01 -5.66765583e-01 5.18392099e-01 4.45830387e-01 -6.46901931e-01 7.23933115e-01 -9.60449801e-01 7.20473995e-01 1.17807622e-01 -1.93559056e-01 5.17493862e-01 4.33858003e-01 9.74652350e-01 -4.43829903e-01 -9.92412655e-01 8.67805217e-01 7.15794209e-01 4.57701755e-01 3.33775658e-02] [ 4.13912490e-01 5.61059114e-01 -2.50248113e-01 5.40645051e-01 5.01248638e-01 2.26422423e-01 -1.96268152e-01 3.94616039e-01 -9.93774284e-01 5.49793293e-01 7.92833205e-01 -5.21368585e-01 -7.58465631e-01 -5.59432024e-01 -3.95806537e-01 7.66057017e-01 8.63328605e-02 -4.26576701e-01 -7.23290620e-01 -4.19711074e-01 2.27742179e-01 -3.51722940e-01] [-8.52796366e-02 -1.11765786e-01 6.56270721e-01 -1.47303692e-01 -3.08602358e-01 3.49943210e-01 -5.57035889e-01 -6.55083521e-02 -3.70468625e-01 2.53711204e-01 7.54720949e-01 -1.04622000e-01 5.68914838e-01 -8.60685989e-02 3.12458663e-01 -7.36318050e-01 -1.34036986e-01 8.18623977e-01 2.10958002e-01 5.33549174e-01 9.40121619e-03 -3.88875034e-03] [ 6.85799680e-01 -8.64386131e-01 1.46544543e-01 8.85525151e-01 3.57200963e-02 -6.11068381e-01 6.95878785e-01 -4.96721715e-01 4.01452073e-01 8.05218808e-02 8.97672577e-01 2.48673405e-01 6.75955924e-01 -9.84134248e-01 9.78680112e-01 -8.44570859e-01 -3.55740973e-01 8.92304791e-01 -9.82121795e-01 6.45460011e-01 7.22423277e-01 -1.20338372e-01] [-4.88509612e-01 6.05379039e-01 -4.42759911e-02 -7.31322783e-01 8.55697986e-01 7.91939934e-01 -1.69097000e-02 7.13404993e-01 -1.62843948e-01 3.66929800e-01 -2.04018721e-01 1.14840349e-02 -6.20896594e-01 9.29977848e-01 -4.11568624e-01 -7.93080888e-01 -7.11369200e-01 -9.71815412e-01 4.31891399e-01 1.28996640e-01 5.89156702e-01 1.41598466e-02] [-6.10886905e+03 -5.79019057e+02 -6.40566294e+03 1.18767811e+03 -1.01342742e+04 -1.86240499e+03 -2.15603135e+03 -2.10711258e+03 -8.65660720e+03 2.03802936e+03 5.99608730e+02 2.00042826e+03 -1.05805693e+03 -1.15980188e+03 -2.73319938e+03 -1.67291309e+04 1.25475483e+03 -1.30464506e+03 -1.66037679e+03 -1.29579817e+03 -5.78944000e+02 -8.98583064e+02] [-2.07844579e+04 -1.97073735e+03 -2.17941673e+04 4.04168720e+03 -3.44778184e+04 -6.33356185e+03 -7.33354595e+03 -7.16537606e+03 -2.94480398e+04 6.93442411e+03 2.04064938e+03 6.80294152e+03 -3.59835342e+03 -3.94803962e+03 -9.29960653e+03 -5.69107799e+04 4.26655933e+03 -4.43970858e+03 -5.64782203e+03 -4.41041367e+03 -1.96821154e+03 -3.05628880e+03] [-2.90362517e+04 -3.23539320e+03 -3.11380704e+04 4.92765450e+03 -4.84417886e+04 -8.55386725e+03 -1.08665505e+04 -1.02008967e+04 -4.16683053e+04 9.38923583e+03 2.54299249e+03 9.38942521e+03 -4.99157708e+03 -5.90374980e+03 -1.33009481e+04 -7.82782998e+04 5.48631547e+03 -6.50660331e+03 -8.54395299e+03 -5.96592672e+03 -2.76329449e+03 -4.24368174e+03] [-8.26758447e+03 1.65882517e+04 -1.49306419e+04 -1.26716194e+04 -2.30965193e+04 6.41323511e+03 3.47227189e+03 -1.07091387e+04 -1.65547187e+04 3.27722388e+03 2.81782718e+04 5.75023101e+03 2.70228407e+04 1.82241095e+04 -1.51810703e+04 -3.66371703e+04 -5.85993165e+03 1.35850381e+04 -2.05744699e+04 1.54083332e+04 2.69523003e+04 2.37891464e+04] [ 5.16110543e+03 3.65583311e+04 2.10762096e+03 7.24860847e+03 -9.95766772e+03 1.75890349e+04 2.22366001e+04 3.12644123e+03 -2.59561876e+03 2.22116418e+04 4.83208237e+04 2.39499323e+04 4.33058081e+04 3.60052448e+04 -1.45680729e+03 -5.83324706e+04 1.28269578e+04 3.19492402e+04 -3.55852557e+03 2.82949181e+04 4.41198831e+04 4.06003160e+04] [-4.07688934e+03 1.77587811e+04 -3.84227587e+03 1.49217638e+04 -1.48238716e+04 7.07105521e+03 9.81185782e+03 5.03204679e+03 -1.04906380e+04 2.22798115e+04 2.33968147e+04 2.19434030e+04 1.56643304e+04 1.42210251e+04 7.30082967e+02 -5.72669261e+04 1.77933086e+04 1.37590788e+04 6.06462661e+03 1.05439134e+04 1.72386322e+04 1.56289532e+04] [ 1.54574511e+04 3.87253657e+04 2.08168112e+04 2.65690183e+04 2.27614061e+04 1.56077087e+04 2.52063637e+04 2.02434204e+04 1.83063410e+04 3.54813230e+04 3.93777556e+04 3.24337218e+04 2.94542764e+04 3.12011774e+04 1.11929068e+04 -3.35397098e+04 3.19327571e+04 2.92794972e+04 2.30104914e+04 1.86591680e+04 3.24138764e+04 3.21754534e+04] [ 1.42570859e+04 4.68386216e+04 4.03106210e+04 4.62503233e+04 5.96190191e+04 1.23359957e+03 2.07986093e+04 3.48205009e+04 3.09233569e+04 3.59087930e+04 2.63818322e+04 2.80828342e+04 1.79809478e+04 4.47499325e+04 2.28033219e+04 -2.60669840e+04 4.03230104e+04 3.48262769e+04 4.87481424e+04 -8.34407855e+03 3.39083373e+04 4.61739295e+04] [ 1.51177057e+04 5.93878816e+04 4.50460933e+04 6.73385512e+04 7.31718439e+04 -1.92695170e+03 2.45474713e+04 5.23300708e+04 3.27470199e+04 5.44047139e+04 3.45919931e+04 4.27440615e+04 1.80582667e+04 5.60247590e+04 2.97352916e+04 -4.96976288e+04 6.40829290e+04 4.84800732e+04 7.42766529e+04 -1.18336846e+04 4.42765342e+04 6.45736295e+04] [-1.56226980e+04 1.44504076e+04 -2.06435071e+04 4.63179566e+04 -6.73887224e+02 -1.55858371e+04 6.75960787e+03 2.35601965e+04 -2.20044255e+04 6.76132380e+04 1.99490305e+04 5.57000301e+04 -1.83428829e+04 -5.32054395e+03 -4.45651477e+03 -7.24278603e+04 7.84861359e+04 1.44161427e+04 4.56510144e+04 -1.23148310e+04 4.65991545e+03 5.81904776e+03] [ 3.89322019e+04 8.03371086e+04 3.30005976e+04 2.95962509e+04 2.96450438e+04 2.65517379e+04 6.50422056e+04 2.10195842e+04 3.54278049e+04 6.57922278e+04 1.15757883e+05 6.26566784e+04 7.90593590e+04 6.86703894e+04 3.52272156e+02 -6.88931540e+04 5.62307776e+04 7.02321915e+04 1.35925039e+04 4.84761526e+04 8.05122182e+04 7.33897036e+04] [ 6.18454968e+04 1.18926090e+05 5.29411950e+04 -8.29052898e+04 4.54849974e+04 4.42352071e+04 8.76586341e+04 -1.02028656e+04 6.26565808e+04 3.73502048e+04 2.07436666e+05 3.41122037e+04 1.39743948e+05 1.07486371e+05 -5.59005717e+04 5.60972198e+03 1.27152696e+04 9.34158618e+04 -4.12791020e+04 7.70694287e+04 1.26077697e+05 1.05682726e+05] [ 4.39655204e+04 5.91223119e+04 3.20697954e+04 -8.52997912e+04 1.27613305e+04 3.79410914e+04 4.52402990e+04 -2.01278207e+04 3.71621504e+04 -4.17131842e+03 1.14196952e+05 -1.61261835e+03 8.73070140e+04 5.72237333e+04 -3.83659354e+04 4.90001618e+04 -2.67688959e+04 4.19016041e+04 -5.49553337e+04 5.32633653e+04 7.05763791e+04 4.95783822e+04] [ 2.81592409e+04 4.15453689e+04 2.25673546e+04 -2.90059730e+04 1.60310185e+04 2.48571476e+04 2.89123581e+04 -1.04982438e+03 2.38143462e+04 1.22514180e+04 7.02043982e+04 1.13135313e+04 4.74068276e+04 3.38569726e+04 -1.31336730e+04 6.49917066e+03 -3.59874554e+02 2.90446873e+04 -1.67542281e+04 3.09018950e+04 4.11910618e+04 3.08301367e+04] [ 1.24579662e+04 1.97312510e+04 1.11590511e+04 6.86493517e+02 1.04019247e+04 1.09698956e+04 1.41173559e+04 5.61198468e+03 1.14871364e+04 1.20051512e+04 2.86925326e+04 1.11882921e+04 1.94178571e+04 1.60754181e+04 7.09667726e+02 -6.89930389e+03 7.91710806e+03 1.51346815e+04 2.14047505e+03 1.33413127e+04 1.83398759e+04 1.57387044e+04] [ 2.54760562e+03 9.12845441e+03 4.71630620e+02 -1.25776610e+04 2.46922294e+03 2.18927539e+03 4.29907081e+03 -2.88617506e+03 3.02268467e+03 5.35722640e+03 1.90908777e+04 4.44066142e+03 8.67977953e+03 6.10658658e+03 -1.19586619e+04 1.44024588e+03 2.47970493e+03 5.54284925e+03 -5.84918297e+03 4.25935063e+03 7.60675473e+03 6.72188852e+03] [-4.01182093e+03 3.73165702e+03 -7.21263304e+03 -2.53069159e+04 -4.20397579e+03 -3.95312683e+03 -2.33022093e+03 -1.09786544e+04 -3.68050208e+03 -1.93036584e+02 1.71619867e+04 -1.43590909e+03 3.29535582e+03 -1.43587777e+02 -2.33606813e+04 5.44360770e+03 -3.50953987e+03 -8.31302605e+02 -1.49925860e+04 -1.72800693e+03 2.08403078e+03 8.70448198e+02] [-1.02525482e-01 4.08626797e-01 3.63290675e-01 3.94297058e-01 2.37201485e-01 -6.98038533e-01 5.21604913e-01 5.62091644e-01 8.08205972e-01 -5.32462615e-01 -6.46642214e-01 -2.17801754e-01 -3.58870692e-01 6.30953858e-01 2.27051799e-01 5.20003505e-01 -1.44669801e-01 -8.01118874e-01 -7.69929976e-01 -2.53185737e-01 -6.12304465e-01 6.41492997e-01] [ 1.99272017e-01 3.77690518e-01 -1.77800774e-02 -8.23652638e-01 -5.29844727e-01 -7.67958382e-02 -6.02816994e-01 -9.49047528e-01 4.58795397e-01 4.49833494e-01 -3.39216507e-01 6.86988252e-01 -1.43115048e-01 7.29372290e-01 3.14130849e-01 1.62071315e-01 -5.98545024e-01 5.90932210e-02 7.88864837e-01 -3.90012048e-01 7.41891218e-01 8.17490546e-01] [-3.40310875e-01 3.66148733e-01 7.98441899e-01 -8.48606236e-01 7.57175726e-01 -6.18321273e-01 6.99537820e-01 3.34237577e-01 -3.11321609e-01 -6.97248860e-01 2.70741923e-01 6.95576087e-01 6.43698750e-01 2.56479194e-01 9.12603020e-01 1.79846254e-01 -6.04334431e-01 -1.41338555e-01 -3.26508003e-01 9.83890024e-01 -2.39527008e-01 9.85401747e-01] [ 3.76085015e-02 -6.55440597e-01 -8.50851857e-01 -2.59388612e-01 -7.53162280e-01 2.69037433e-01 -1.72160309e-01 9.81831265e-01 8.59911247e-01 -7.01527935e-01 -2.10235475e-01 -7.68405781e-02 1.21897510e-01 5.60727047e-01 -2.56121819e-02 -1.60012896e-01 -4.76000591e-01 8.21612278e-01 -9.55456977e-01 6.42243796e-01 -6.23063201e-01 3.71513798e-01] [-2.89581221e-01 9.48425256e-01 -7.54455741e-01 -6.24860215e-01 7.78884951e-01 1.66812629e-01 -3.81507231e-01 -9.98471229e-01 -5.44804523e-01 -7.09192732e-01 -5.93132351e-01 7.92645114e-01 7.46188757e-01 4.00578875e-01 -5.90046477e-02 6.54272005e-01 -8.34720583e-03 -2.73022633e-01 -4.48793794e-01 8.49481627e-01 -2.26021531e-01 -1.42382531e-02] [-4.91123795e-01 7.69933038e-01 -2.33473086e-01 -4.04850569e-01 4.35189924e-01 -6.18260114e-01 -7.63614741e-01 6.73995564e-01 4.88271843e-01 1.81041095e-01 -5.14216850e-01 2.46494290e-01 2.76710641e-01 -3.44861112e-01 -8.65021314e-01 7.61077195e-01 -8.00865379e-02 5.27745436e-01 -4.92222758e-01 1.82774365e-01 -1.42409679e-01 -2.35798715e-01] [-7.46573232e-01 -5.11466674e-01 -8.41316834e-01 -3.94283391e-01 4.83409600e-01 2.30031450e-01 3.44822198e-01 -9.83233841e-01 3.56753945e-01 6.36138109e-03 -5.38183099e-01 -6.50206982e-01 -6.30034069e-01 6.88520010e-01 9.65179579e-01 8.27479250e-01 -3.05261159e-01 5.60449159e-01 9.29091814e-02 6.32884795e-01 -3.25460976e-02 1.79902597e-01] [ 5.74526895e-01 -7.99530714e-01 7.40142337e-02 -2.68033307e-01 2.08613687e-01 9.17605031e-01 -2.14354110e-01 -2.14054004e-01 5.81273987e-01 2.90156798e-01 -2.12231984e-01 3.77910096e-01 -1.91996782e-01 -7.27773692e-01 -9.87134627e-02 -3.31161215e-01 -5.64062140e-01 8.51549603e-01 3.79278798e-01 1.97641781e-01 4.93314944e-01 8.39421235e-02] [ 5.39810804e+03 5.26526411e+03 6.61127037e+03 6.85431001e+01 7.03159453e+03 1.70308061e+03 5.08998965e+03 1.09918379e+03 7.11709442e+03 -7.18688769e+01 4.80685090e+03 6.66748794e+01 4.84412375e+03 5.14889150e+03 1.83457576e+03 4.16716880e+03 5.57287991e+01 4.82023680e+03 1.22567623e+03 2.74423141e+03 5.10328704e+03 4.81834219e+03] [ 1.03790645e+03 1.00109806e+03 1.26003112e+03 -2.96125086e+02 1.24293390e+03 1.41656830e+02 9.57789980e+02 -9.85821706e+00 1.39013646e+03 -3.27437515e+02 8.67803089e+02 -2.93617977e+02 8.83490175e+02 9.69517359e+02 1.99608288e+02 1.24244716e+03 -2.68600645e+02 8.81423702e+02 2.12843983e+01 4.00321806e+02 9.47477627e+02 8.79533985e+02] [ 2.37262058e+03 2.57842008e+03 2.37471713e+03 -3.53831675e+03 2.39795902e+03 2.81655237e+02 2.11335575e+03 -1.61849968e+03 3.34432058e+03 -2.65290231e+03 2.59269214e+03 -2.29166389e+03 3.25942843e+03 3.12615268e+03 -1.07756023e+03 5.77793029e+03 -2.90044053e+03 2.47167215e+03 -2.42610854e+03 1.64688919e+03 3.29215982e+03 3.22058518e+03] [-2.57104446e+03 1.47935220e+04 -2.06612939e+04 3.03160494e+04 -2.35906740e+04 -1.33254222e+03 -7.38175290e+03 -9.47053778e+02 -2.34650859e+04 3.95244523e+04 3.91146849e+04 3.61950310e+04 1.31637848e+04 5.32756727e+03 -1.25654129e+04 -7.09708222e+04 3.26584169e+04 1.22964378e+04 -1.56706347e+03 5.97540612e+03 1.62507306e+04 2.09586544e+04] [ 6.33035674e+03 9.87387225e+04 1.84953264e+04 -2.60245729e+04 1.90593245e+04 3.14917136e+04 4.87900238e+04 5.42924387e+03 2.61800272e+04 7.14141353e+04 1.27543278e+05 6.46117026e+04 8.38154719e+04 6.94961962e+04 -2.43662729e+04 -6.57610272e+04 5.23327505e+04 6.81900913e+04 5.43554128e+03 4.28798845e+04 9.06177753e+04 7.54782344e+04] [ 2.88519455e+04 3.04163877e+05 8.16235847e+04 -2.65707278e+05 8.01893999e+04 6.46794533e+04 1.39641984e+05 -9.09005207e+04 1.22273422e+05 1.03265957e+05 4.04935213e+05 8.86646788e+04 3.01318542e+05 2.23909353e+05 -1.78862929e+05 -2.38537207e+04 1.58728718e+04 1.89625898e+05 -1.34562234e+05 1.15786504e+05 2.98427083e+05 2.24249533e+05] [ 9.02816954e+04 4.39734922e+05 1.30659590e+05 -4.45899111e+05 1.06238082e+05 1.36386502e+05 2.23590285e+05 -1.44296691e+05 1.88334583e+05 8.04819644e+04 6.48059554e+05 7.13260159e+04 4.68773164e+05 3.34405880e+05 -2.47079153e+05 2.60039931e+04 -3.98731019e+04 2.93927083e+05 -2.53097265e+05 2.16954726e+05 4.40982071e+05 3.43566097e+05] [ 5.27487541e+04 4.91676475e+05 7.83411238e+04 -4.60540250e+05 7.33245112e+04 9.76378421e+04 1.97314629e+05 -1.72603628e+05 1.38801359e+05 1.45324670e+05 7.59264123e+05 1.23239581e+05 5.23787741e+05 3.77895354e+05 -3.57989672e+05 -1.22677654e+05 4.42642353e+03 3.07565354e+05 -2.78802795e+05 2.01643851e+05 4.92571054e+05 4.11820011e+05] [ 1.98858137e+05 6.73258279e+05 2.68042602e+05 -7.71249808e+04 3.07418811e+05 1.65411007e+05 3.85329022e+05 8.88879005e+04 2.75248457e+05 4.39999147e+05 8.19466081e+05 3.84950670e+05 5.88772874e+05 5.18743410e+05 -1.32259228e+05 -3.72237929e+05 3.35864573e+05 4.65625616e+05 6.70330719e+04 2.81885894e+05 6.08325967e+05 5.48189227e+05] [ 3.41057930e+05 9.01664183e+05 4.65939501e+05 -1.06618299e+05 5.31196171e+05 2.70944717e+05 5.16287340e+05 1.52802564e+05 5.28143477e+05 4.79984027e+05 9.75106560e+05 4.27438707e+05 8.29942792e+05 7.43584565e+05 -6.44100053e+04 -1.82335164e+05 3.69233391e+05 6.31546316e+05 9.58176777e+04 4.18737620e+05 8.46250520e+05 7.69874502e+05] [ 7.06418027e+05 1.27652926e+06 8.85914725e+05 2.96203146e+05 9.55248856e+05 5.16786995e+05 9.08318069e+05 4.71432763e+05 9.32273643e+05 7.59631835e+05 1.33482147e+06 6.88185290e+05 1.14502626e+06 1.12333705e+06 2.73725206e+05 -2.64008224e+05 6.78525570e+05 1.03753218e+06 4.33230600e+05 6.88338916e+05 1.18905754e+06 1.15869527e+06] [ 9.52095003e+05 1.90915082e+06 1.23707481e+06 1.68359027e+05 1.33446768e+06 5.89216251e+05 1.33580173e+06 5.09429604e+05 1.30193566e+06 1.00324447e+06 2.06561553e+06 8.65679632e+05 1.68957037e+06 1.62580490e+06 1.59382914e+05 -4.26506358e+05 8.45047638e+05 1.52747860e+06 4.18986044e+05 8.96249634e+05 1.75671647e+06 1.70248168e+06] [ 1.71397007e+06 2.85976246e+06 2.07341878e+06 7.01558596e+04 2.06527707e+06 1.15242307e+06 2.20802555e+06 7.47192980e+05 2.24646917e+06 1.33553662e+06 3.13410559e+06 1.20785625e+06 2.77663637e+06 2.54736632e+06 3.96975913e+05 -2.50958458e+04 1.03972866e+06 2.37364418e+06 4.85762357e+05 1.66293497e+06 2.76480478e+06 2.57611793e+06] [ 1.83187000e+06 2.94279172e+06 2.21601956e+06 -3.40232690e+05 2.22279344e+06 1.21102303e+06 2.27133988e+06 6.17035152e+05 2.48833984e+06 1.10447650e+06 3.22771460e+06 1.00439030e+06 2.95033864e+06 2.64543408e+06 2.77081190e+05 6.38636103e+05 7.91640443e+05 2.41657719e+06 2.47263802e+05 1.80113802e+06 2.89226089e+06 2.64500507e+06] [ 1.41936237e+06 2.30241568e+06 1.54523025e+06 -7.25744535e+05 1.44330055e+06 9.72698750e+05 1.72160019e+06 1.38623710e+05 1.85222287e+06 7.15581905e+05 2.86143425e+06 6.74160919e+05 2.59927237e+06 2.13767363e+06 -1.69504884e+05 7.63303827e+05 3.03379076e+05 1.85897074e+06 -3.77123188e+05 1.56409723e+06 2.43000379e+06 2.13585525e+06] [ 9.43900061e+05 1.61946437e+06 1.01083204e+06 -8.26322446e+05 1.00106608e+06 6.86752671e+05 1.14068219e+06 -4.21153043e+04 1.29663971e+06 4.15228265e+05 2.12193968e+06 3.82944488e+05 1.86088179e+06 1.46829993e+06 -3.28089231e+05 7.75882736e+05 7.54750637e+04 1.25249200e+06 -4.64955911e+05 1.11080179e+06 1.70725233e+06 1.46905293e+06] [ 3.55999062e+05 8.94302344e+05 4.59394514e+05 -9.47792119e+05 6.26427087e+05 3.10411019e+05 4.65288557e+05 -1.82667476e+05 7.50582118e+05 8.13314294e+04 1.19345181e+06 4.99227837e+04 9.63684184e+05 7.17650997e+05 -4.98733026e+05 8.63912761e+05 -9.31794520e+04 6.00368779e+05 -4.34117065e+05 5.36212683e+05 8.85575073e+05 7.30022165e+05] [ 8.42814411e+04 4.62954311e+05 1.67527863e+05 -7.22807939e+05 2.68754649e+05 1.18380358e+05 1.51829064e+05 -2.34232234e+05 3.54168237e+05 1.65751568e+04 6.73444873e+05 -3.94764807e+03 4.98453155e+05 3.42431121e+05 -4.52215087e+05 4.99937104e+05 -1.05451462e+05 2.65080144e+05 -3.81918830e+05 2.30155095e+05 4.44172531e+05 3.36656924e+05] [ 4.06893214e+04 2.16558999e+05 6.97851090e+04 -1.99336585e+05 1.10598213e+05 4.16249665e+04 9.20253317e+04 -7.18961540e+04 1.31450035e+05 7.11589826e+04 3.10906960e+05 5.44058012e+04 2.11834420e+05 1.65888641e+05 -1.64844437e+05 4.89832139e+04 1.16306032e+04 1.31323542e+05 -1.19307043e+05 8.61157452e+04 2.02335310e+05 1.70005831e+05] [ 4.88907143e+04 7.16390262e+04 5.35588137e+04 7.45197354e+03 6.10780399e+04 4.23783887e+04 5.85250795e+04 2.72132416e+04 6.26491372e+04 4.78526626e+04 8.58145723e+04 4.44344646e+04 7.11974134e+04 6.55738542e+04 1.16028059e+04 1.14791497e+04 3.94432268e+04 6.21730527e+04 2.15898573e+04 5.04086169e+04 7.00835741e+04 6.58288837e+04] [ 2.15726400e+04 2.31033105e+04 2.29342353e+04 1.76564864e+04 2.32877146e+04 1.84566427e+04 2.29918182e+04 1.78588396e+04 2.32014198e+04 1.84650464e+04 2.35733499e+04 1.83503789e+04 2.34968016e+04 2.29706123e+04 1.76076712e+04 9.32238050e+02 1.81096518e+04 2.31875277e+04 1.77574234e+04 2.04565547e+04 2.32750349e+04 2.30123299e+04] [-4.41523016e-02 1.09036997e-01 7.50807725e-01 8.98528868e-01 2.78369512e-01 4.97040640e-01 -2.96750187e-01 8.36056638e-01 6.73504100e-01 1.04111762e-01 7.35254419e-01 6.99743763e-01 6.16822713e-01 -7.38614351e-01 7.20051496e-02 8.01566229e-01 5.41663661e-01 -1.58573602e-01 9.55760787e-01 -4.98453687e-01 -3.32779542e-01 9.08401015e-01] [ 6.17430696e-01 8.00680009e-01 -9.76820821e-01 -2.40471134e-01 -5.28956087e-01 -4.08129896e-01 4.75990993e-01 4.48169690e-01 -7.64011796e-02 -2.69451078e-01 1.69383226e-01 6.74155093e-02 1.01159118e-01 2.30954142e-01 -2.08703455e-01 5.43436965e-01 -4.97593633e-01 3.11521536e-01 9.20794296e-01 -9.92890479e-01 4.46722089e-01 -7.82009932e-01] [ 8.75749833e-02 2.53832033e-01 -5.14105748e-01 8.27687750e-01 2.44625057e-01 -5.36610617e-01 5.40264375e-01 -7.88985840e-01 1.10450637e-01 -8.10087860e-01 1.61968145e-01 -4.94039775e-01 2.98933616e-01 4.31653906e-01 4.83712291e-02 3.15042815e-01 6.26587691e-01 6.15024543e-01 2.39363984e-01 -9.77950758e-02 -1.84100022e-01 7.49495240e-01] [ 4.05863464e-01 -6.30129955e-01 1.31464293e-01 8.17616065e-01 -6.98620016e-01 6.91035351e-01 6.40404933e-01 -1.31779740e-03 -9.24832297e-01 -7.04994989e-01 8.45277389e-01 -9.11690408e-01 -2.55002467e-01 2.38079269e-01 -6.06103922e-01 9.62159514e-01 -6.88049534e-01 7.93583903e-02 2.36022649e-01 -5.38020103e-01 -6.73820216e-02 -1.79829417e-02] [ 5.33724753e-01 2.68561506e-01 -6.48998939e-02 3.33885086e-01 -9.32451470e-02 5.14231827e-02 3.17136083e-01 -6.37609236e-01 5.76099102e-01 2.05730675e-01 9.60665770e-01 -5.55233613e-01 -6.74670517e-02 7.91283605e-01 2.02866819e-01 4.51917692e-01 -1.72659672e-01 3.68619244e-01 8.92110060e-01 -5.99627869e-01 6.14730528e-01 9.39076024e-01] [-9.70862536e-01 -5.69580827e-01 -7.65246159e-01 5.34847035e-01 -8.37566828e-01 9.96827117e-01 -2.20775968e-01 8.10405532e-01 -6.22844793e-01 -1.18255600e-01 6.89623638e-01 1.90539561e-01 -7.56838066e-02 2.52994935e-02 1.47771054e-01 -1.19361585e-01 -6.12882085e-01 -7.72068477e-01 -6.97544145e-01 -8.95018072e-01 1.17352822e-01 6.95442028e-01] [-2.81761322e-01 -8.49378255e-01 -1.48551176e-01 2.04015677e-01 -1.49193069e-01 6.21442467e-01 8.83772492e-01 7.39821547e-01 4.07998997e-01 -5.94021504e-02 3.65396086e-01 -9.73163379e-01 9.64956237e-01 -6.55845336e-01 8.12520792e-01 6.14219803e-01 6.00279369e-01 -4.62127884e-01 -5.61692388e-01 -1.42398614e-01 6.98742201e-01 -9.92407151e-02] [-4.62695667e+03 -1.98538686e+03 -7.15699938e+03 2.98476032e+03 -7.22600344e+03 -6.66818494e+03 -4.80135068e+03 -5.20952243e+03 -7.25682094e+03 1.46178978e+03 3.64618895e+03 8.62338015e+02 -2.59185382e+03 -2.65843914e+03 -7.56830618e+03 -1.09717420e+04 3.75268541e+01 -3.49917649e+03 -6.37349053e+03 -5.60909535e+03 -1.61840329e+03 -9.50684434e+02] [ 8.12161835e+02 1.09368954e+04 -4.64128164e+03 1.52673126e+04 -2.55874229e+03 -1.12679537e+04 2.93552774e+03 -6.61191510e+03 -3.87857862e+03 1.22509824e+04 2.44642977e+04 1.09441325e+04 6.14591443e+03 7.66777605e+03 -1.29203788e+04 -2.68810659e+04 8.35423287e+03 5.86098655e+03 -8.15416000e+03 -5.77302939e+03 1.07781679e+04 1.22352779e+04] [ 1.10044554e+05 1.20628548e+05 1.12256935e+05 1.19275408e+05 1.13821946e+05 8.48450353e+04 1.23159617e+05 9.16841565e+04 1.10850074e+05 1.04555218e+05 1.28336346e+05 1.05334995e+05 1.18932808e+05 1.20838375e+05 8.83412357e+04 -4.29810847e+04 1.01675177e+05 1.19516684e+05 9.27122744e+04 9.49436928e+04 1.24557948e+05 1.23944499e+05] [ 2.73560338e+05 3.13631419e+05 2.82063372e+05 2.83988048e+05 2.71591083e+05 2.17809272e+05 2.92205125e+05 2.40321525e+05 2.82701349e+05 2.78856477e+05 3.29544176e+05 2.79187655e+05 3.06630679e+05 2.96173071e+05 2.31171506e+05 -1.05539221e+05 2.67627681e+05 2.95980127e+05 2.56203608e+05 2.40059220e+05 3.18333797e+05 3.04458670e+05] [ 4.71863177e+05 7.67405939e+05 5.44924315e+05 3.46135361e+05 5.61056486e+05 4.22974791e+05 6.26548235e+05 4.00549627e+05 5.76625980e+05 5.79679675e+05 8.27998261e+05 5.72200769e+05 7.05987683e+05 6.70637696e+05 3.00557696e+05 -1.87890006e+05 5.24834106e+05 6.52512436e+05 4.31798834e+05 5.02707998e+05 7.50863799e+05 6.84572493e+05] [ 7.89713861e+05 1.52069108e+06 8.69223164e+05 3.88648512e+05 9.71014698e+05 7.37121033e+05 1.12241429e+06 5.93696732e+05 9.54655552e+05 1.14025439e+06 1.75470176e+06 1.11803994e+06 1.35706423e+06 1.22469850e+06 3.03441453e+05 -4.30384778e+05 1.01348333e+06 1.24978615e+06 6.29175084e+05 9.63408184e+05 1.47115751e+06 1.31990075e+06] [ 1.16696365e+06 2.36028472e+06 1.27019459e+06 8.06891957e+05 1.43378686e+06 1.10052717e+06 1.68767959e+06 9.78769155e+05 1.38211216e+06 1.84456497e+06 2.75021133e+06 1.81247894e+06 2.08888447e+06 1.90373421e+06 5.06491601e+05 -1.09430811e+06 1.67532693e+06 1.93499095e+06 1.04067602e+06 1.46988566e+06 2.27186940e+06 2.10301338e+06] [ 1.71077451e+06 3.32814652e+06 1.88973231e+06 1.41052977e+06 2.16680444e+06 1.57962929e+06 2.40798346e+06 1.53856185e+06 2.02216919e+06 2.69153857e+06 3.82508296e+06 2.62017460e+06 2.94533617e+06 2.75759428e+06 8.61266417e+05 -1.58862135e+06 2.47594632e+06 2.74597246e+06 1.63840078e+06 2.06083577e+06 3.18894329e+06 3.01866083e+06] [ 2.16948647e+06 4.08452238e+06 2.53074158e+06 2.27132278e+06 2.98649345e+06 1.87628023e+06 2.96543071e+06 2.17710305e+06 2.60468101e+06 3.49013028e+06 4.38192281e+06 3.32320632e+06 3.40618197e+06 3.39724263e+06 1.36426921e+06 -2.20342301e+06 3.29015001e+06 3.34157696e+06 2.44222538e+06 2.35315983e+06 3.76723004e+06 3.67637373e+06] [ 2.59266421e+06 5.34788908e+06 3.21436626e+06 2.83613542e+06 3.88920139e+06 2.22931674e+06 3.65911652e+06 2.75726049e+06 3.39357307e+06 4.47070174e+06 5.61472524e+06 4.21603391e+06 4.39309794e+06 4.40073141e+06 1.59360272e+06 -2.91163601e+06 4.21617823e+06 4.23549568e+06 3.15909116e+06 2.85133841e+06 4.90532067e+06 4.78673532e+06] [ 3.34040885e+06 6.62127758e+06 4.13100816e+06 2.96965309e+06 4.92933949e+06 2.68556906e+06 4.60568818e+06 3.11790519e+06 4.38530938e+06 5.14593915e+06 7.04715793e+06 4.79448328e+06 5.51666950e+06 5.48826165e+06 1.76263240e+06 -3.02998723e+06 4.82897370e+06 5.31696516e+06 3.46498219e+06 3.51455368e+06 6.07892506e+06 5.93580634e+06] [ 4.13270257e+06 8.36644220e+06 5.28979502e+06 2.10846390e+06 6.09774330e+06 3.19127830e+06 5.74248191e+06 3.12127560e+06 5.68573550e+06 5.55038631e+06 9.08594538e+06 5.08671306e+06 7.23955787e+06 7.05235532e+06 1.46008017e+06 -2.54823927e+06 4.96702273e+06 6.62108221e+06 3.19413732e+06 4.25253959e+06 7.76434039e+06 7.47643474e+06] [ 4.69608139e+06 9.44044373e+06 6.07042129e+06 6.60246543e+05 6.68519467e+06 3.52967463e+06 6.51690843e+06 2.64933987e+06 6.66865108e+06 5.36651561e+06 1.05196513e+07 4.87408994e+06 8.54008516e+06 8.02774308e+06 8.90383844e+05 -1.19935148e+06 4.44912431e+06 7.40400939e+06 2.37943508e+06 4.83068489e+06 8.84789270e+06 8.30276079e+06] [ 4.63669702e+06 9.26559491e+06 5.93548089e+06 -2.01568517e+06 6.36464950e+06 3.40631326e+06 6.30348700e+06 1.39400922e+06 6.87847551e+06 3.82215566e+06 1.07697742e+07 3.43872034e+06 9.01349312e+06 7.94887444e+06 -2.58534833e+05 1.59138626e+06 2.68857418e+06 7.11097545e+06 5.05395716e+05 4.97657897e+06 8.92914437e+06 8.08646257e+06] [ 3.45247949e+06 7.88203918e+06 4.39600819e+06 -4.79512100e+06 4.60237189e+06 2.62178625e+06 4.89249967e+06 -4.69865021e+05 5.58716770e+06 1.99202681e+06 1.03222458e+07 1.78127604e+06 8.23512231e+06 6.56002525e+06 -2.13698494e+06 3.43904300e+06 6.38205496e+05 5.57981899e+06 -1.96154926e+06 4.21261087e+06 7.72881155e+06 6.58711084e+06] [ 2.28451637e+06 5.82268819e+06 3.02895864e+06 -5.08205052e+06 3.21483596e+06 1.91134010e+06 3.40908111e+06 -9.13662931e+05 4.11027870e+06 8.24516438e+05 7.99210870e+06 7.04297853e+05 6.19959491e+06 4.71105370e+06 -2.34659403e+06 3.78839998e+06 -3.10184616e+05 3.87714794e+06 -2.31680729e+06 3.03814065e+06 5.69925672e+06 4.67956121e+06] [ 1.29643485e+06 3.39253741e+06 1.72100145e+06 -3.81535508e+06 1.93673378e+06 1.15751074e+06 1.97631956e+06 -7.82383754e+05 2.46387201e+06 6.48830599e+03 4.80078478e+06 -2.73328112e+04 3.69081719e+06 2.71400600e+06 -1.66451947e+06 3.09922698e+06 -6.42649618e+05 2.24656730e+06 -1.76239737e+06 1.84323959e+06 3.36177939e+06 2.70826487e+06] [ 7.33799334e+05 1.90080469e+06 1.07625215e+06 -2.53454488e+06 1.22708049e+06 6.36326064e+05 1.13259997e+06 -5.41586179e+05 1.56253217e+06 -2.28702947e+05 2.59169217e+06 -2.50819390e+05 2.07505896e+06 1.51814902e+06 -9.90980519e+05 2.26420481e+06 -5.80534131e+05 1.23061358e+06 -1.09552953e+06 1.00520877e+06 1.86256951e+06 1.46029798e+06] [ 3.35101789e+05 9.16422573e+05 5.29258022e+05 -1.42243023e+06 5.70664602e+05 2.55796011e+05 5.22402633e+05 -3.44931172e+05 7.51840135e+05 -2.40166364e+05 1.23900993e+06 -2.63040875e+05 9.92670323e+05 7.11316647e+05 -5.41636706e+05 1.21361078e+06 -4.29806691e+05 5.42871485e+05 -6.32980974e+05 4.19866918e+05 8.64111114e+05 6.74376164e+05] [ 1.12067521e+05 2.67334859e+05 1.68268319e+05 -4.33912421e+05 1.55764973e+05 8.23035095e+04 1.63080643e+05 -1.10409789e+05 2.21265840e+05 -9.15794276e+04 3.77815368e+05 -9.65316224e+04 3.18304005e+05 2.21511122e+05 -1.61825420e+05 3.25202053e+05 -1.51724965e+05 1.63760415e+05 -2.17572696e+05 1.42064229e+05 2.70378518e+05 2.07095027e+05] [ 2.01184556e+04 5.61952499e+04 4.27545304e+04 -1.58050002e+05 4.33892544e+04 1.10904832e+04 2.82741540e+04 -4.45320405e+04 6.42477234e+04 -4.56330888e+04 8.29912721e+04 -4.52857147e+04 6.62805829e+04 4.43067097e+04 -6.59149635e+04 1.19980216e+05 -6.03594242e+04 2.90078912e+04 -7.32303393e+04 2.42848649e+04 5.43126894e+04 3.57906190e+04] [-8.14039436e-01 4.62702082e-01 1.37799436e+00 -1.00370063e+00 -6.07463193e-01 7.31723733e-01 4.21682367e-01 -5.36372194e-01 1.36289959e+00 6.02870877e-01 -3.92045803e-02 -8.22640928e-01 6.65617718e-01 9.82699292e-01 -4.81690058e-01 -2.49208354e-02 -1.06512816e+00 -3.12695318e-01 3.59562599e-01 -7.52399222e-01 -2.72020525e-02 6.20632747e-01] [-9.69821385e-01 -9.35611192e-01 -4.24948120e-01 -3.26753639e-01 -2.75567789e-01 6.14381151e-01 -9.28038065e-01 -2.83209511e-01 2.76556267e-01 -7.13991781e-01 7.15477229e-01 2.91128817e-01 -7.69413149e-01 -5.83805180e-01 4.90996661e-01 -1.93186743e-01 -3.10598781e-02 7.74910156e-01 -7.25112901e-01 9.28745595e-01 -9.14740636e-01 -8.01119485e-01] [-5.53523138e-01 6.13170165e-01 -8.82543853e-02 3.06502285e-01 -6.51357725e-01 -6.66556134e-01 4.16966811e-02 4.80587271e-01 -7.50510696e-02 7.39228170e-01 3.20823869e-01 -5.01819823e-01 -5.30195558e-01 -2.51906566e-01 5.13232113e-01 8.75875329e-01 7.23811999e-01 -4.76778415e-01 -1.75442144e-01 2.23088199e-01 9.36689116e-02 -2.21450109e-01] [-7.97158446e-01 4.24489609e-01 -8.23729672e-01 7.32154859e-01 7.26896881e-01 4.16901583e-01 -2.71508524e-02 3.98293131e-01 -2.10025880e-01 5.94991969e-01 5.72834165e-01 -6.88942832e-01 -3.64364899e-01 -3.12033646e-01 -2.71659403e-01 1.33291113e-01 -9.16625115e-01 3.19939438e-01 -5.96736482e-01 -1.67074888e-01 -9.82146870e-02 -3.02546461e-01] [-2.33439380e-01 -6.39387878e-01 -8.63739885e-01 1.02513298e-01 1.91733891e-01 -7.09000422e-01 -5.38116583e-01 -4.27245737e-01 6.61502734e-01 -6.93158118e-01 1.04277615e-01 1.36279435e-01 -3.13159814e-01 -2.98765621e-01 9.18224035e-01 1.06687899e-01 3.05462402e-01 -1.85197671e-02 2.93299963e-01 2.10501571e-01 -7.09653414e-01 -3.30638934e-01] [-7.91320131e-02 -7.44356677e-01 -2.73853353e-01 -5.43716731e-01 6.95005205e-01 -3.17051840e-01 -5.46421359e-01 -1.83637194e-01 3.66128095e-01 -8.68756002e-01 -6.38541573e-01 3.88613878e-01 5.04513210e-01 2.35508559e-01 7.58397121e-01 -6.62399620e-01 4.07095464e-01 8.34520318e-01 -6.98520682e-01 -5.12472045e-01 9.33691442e-02 -3.11761326e-01] [ 2.37004564e+03 -5.68653078e+03 1.68486533e+02 8.14438689e+03 -1.32982581e+03 -3.67432506e+02 -2.89946132e+02 -7.40290314e+02 -1.16234682e+03 -9.12939703e+02 -6.58980445e+03 -7.06523338e+02 -3.84279873e+03 -1.87814198e+03 4.01145765e+03 -6.56237647e+03 -1.01594833e+03 -1.52041541e+03 -1.05560693e+03 -1.03107297e+03 -4.08469169e+03 -2.69426800e+03] [ 2.06385229e+04 -2.35564873e+04 1.35914067e+04 2.33149266e+04 7.42531392e+03 2.94117408e+03 4.32249838e+03 -5.13240312e+03 1.06259962e+04 -1.22025303e+04 -3.24120654e+04 -1.15440512e+04 -1.30193280e+04 -4.35513238e+03 2.27729490e+04 -8.11891652e+03 -1.27070903e+04 -2.84632844e+03 -6.73645567e+03 -1.25518708e+03 -1.72887450e+04 -1.33888417e+04] [ 1.45163010e+05 1.31953745e+05 1.41123654e+05 2.73325121e+05 1.46352019e+05 8.99786667e+04 1.75899958e+05 1.48857958e+05 1.10965906e+05 1.81481014e+05 1.20188556e+05 1.75873668e+05 1.23388119e+05 1.57670468e+05 1.53977317e+05 -1.30640513e+05 1.83287675e+05 1.64486099e+05 1.74444597e+05 1.09185533e+05 1.40962832e+05 1.59529993e+05] [ 4.08294581e+05 4.48826096e+05 4.43850112e+05 6.46897747e+05 4.37431835e+05 2.97356359e+05 4.97161531e+05 4.42462240e+05 3.94547555e+05 4.75663818e+05 4.06311340e+05 4.75345354e+05 3.97939495e+05 4.59410286e+05 4.41211571e+05 -3.21723955e+05 4.79985212e+05 4.64379822e+05 5.17117357e+05 3.36495424e+05 4.50939973e+05 4.61158775e+05] [ 5.43538887e+05 9.28936211e+05 5.58727530e+05 9.11463351e+05 6.24753441e+05 4.04770026e+05 7.73687617e+05 6.29621120e+05 5.13422893e+05 9.56501704e+05 9.93589293e+05 9.38313643e+05 7.22938959e+05 7.98697159e+05 4.52974333e+05 -9.52643284e+05 8.97506145e+05 8.18481217e+05 7.78596080e+05 5.22854866e+05 9.00082136e+05 8.80181050e+05] [ 9.28260702e+05 1.75167328e+06 9.46900574e+05 1.58317569e+06 1.14198247e+06 7.61884745e+05 1.39417313e+06 1.12786927e+06 8.64696783e+05 1.85301623e+06 1.82048885e+06 1.80186642e+06 1.28703480e+06 1.41735012e+06 7.64675241e+05 -1.92350927e+06 1.76509972e+06 1.52081221e+06 1.43649840e+06 9.94816900e+05 1.64236034e+06 1.61450545e+06] [ 1.84292064e+06 3.32233349e+06 1.87909027e+06 2.77155665e+06 2.24385253e+06 1.58432582e+06 2.63750670e+06 2.07614551e+06 1.76129581e+06 3.41598685e+06 3.49208586e+06 3.33368970e+06 2.53890126e+06 2.69959400e+06 1.43720218e+06 -3.20957414e+06 3.26359454e+06 2.87943772e+06 2.54265915e+06 2.00256859e+06 3.09236807e+06 3.05591777e+06] [ 2.01522947e+06 4.55319426e+06 2.06880424e+06 3.54181961e+06 2.77937316e+06 1.75544928e+06 3.15187947e+06 2.62609294e+06 1.94762137e+06 4.72798345e+06 4.78308317e+06 4.58579940e+06 3.20973866e+06 3.46195655e+06 1.50486761e+06 -5.18355532e+06 4.53155964e+06 3.65830383e+06 3.39796556e+06 2.37035243e+06 4.12669959e+06 4.09778595e+06] [ 2.10510598e+06 5.46587902e+06 2.28698054e+06 4.33428201e+06 3.40885763e+06 1.78260826e+06 3.49497131e+06 3.15244863e+06 2.19063080e+06 5.92376820e+06 5.57482853e+06 5.64711099e+06 3.51916183e+06 3.98688395e+06 1.63534810e+06 -6.73578613e+06 5.68435388e+06 4.18070554e+06 4.28141862e+06 2.44113536e+06 4.76882337e+06 4.77312365e+06] [ 1.24649635e+06 5.31943506e+06 1.47179725e+06 5.17905246e+06 2.99608066e+06 9.23203176e+05 2.76064567e+06 3.09397997e+06 1.32772752e+06 6.63361599e+06 5.31486928e+06 6.18131529e+06 2.68312476e+06 3.51410871e+06 1.13447397e+06 -8.74136048e+06 6.37866134e+06 3.70606290e+06 4.58910088e+06 1.55694449e+06 4.30683586e+06 4.50514034e+06] [ 1.04211960e+06 5.93071260e+06 1.29808191e+06 5.85470465e+06 2.80554541e+06 7.63335126e+05 2.84583348e+06 3.08013322e+06 1.05746016e+06 7.45994859e+06 6.20989291e+06 6.85172784e+06 3.27567088e+06 3.97741385e+06 7.18135710e+05 -1.07138267e+07 7.04516660e+06 4.08063174e+06 4.54323754e+06 1.61242200e+06 4.92574473e+06 5.18185643e+06] [ 1.17404137e+06 7.21585381e+06 1.36810607e+06 5.99713646e+06 2.99365301e+06 7.96924464e+05 3.30182568e+06 2.87033643e+06 1.17468373e+06 8.58348235e+06 8.33932939e+06 7.77598646e+06 4.55259215e+06 5.03999841e+06 -1.02076303e+05 -1.25175403e+07 7.84484099e+06 5.03880565e+06 4.14509763e+06 1.97919129e+06 6.22293286e+06 6.46973007e+06] [ 1.10109584e+06 8.51771051e+06 1.35307609e+06 5.28476029e+06 2.75315176e+06 7.33414369e+05 3.80605554e+06 2.05740399e+06 1.23761932e+06 9.25073345e+06 1.06533589e+07 8.27438962e+06 6.10803359e+06 6.21593181e+06 -1.42173146e+06 -1.33931457e+07 7.94273030e+06 5.86694683e+06 2.92603956e+06 2.20485650e+06 7.63486257e+06 7.67805588e+06] [ 9.26763433e+05 9.21219479e+06 1.41506782e+06 2.18433821e+06 2.70021076e+06 5.01588675e+05 3.80607276e+06 7.13031086e+05 1.65383804e+06 8.18983771e+06 1.20422691e+07 7.16247386e+06 7.17958088e+06 6.74999838e+06 -3.24832909e+06 -1.07375320e+07 6.54453079e+06 6.07287113e+06 9.58860413e+05 2.30736011e+06 8.44503032e+06 8.14158231e+06] [-3.22262635e+05 7.37964334e+06 4.30671597e+04 -2.16639885e+06 8.65002349e+05 -3.70479906e+05 2.20336042e+06 -1.67497347e+06 5.06968257e+05 4.88769256e+06 1.06357274e+07 4.03221057e+06 6.13461170e+06 5.04416818e+06 -5.30627765e+06 -6.85578025e+06 3.09784965e+06 4.21318874e+06 -2.24242331e+06 1.33797051e+06 6.84698246e+06 6.18128166e+06] [-1.55114836e+06 5.10761219e+06 -1.40597030e+06 -6.14424322e+06 -1.04791829e+06 -1.18187611e+06 5.36173599e+05 -3.75999724e+06 -6.44357268e+05 1.41186290e+06 8.73879097e+06 9.27792612e+05 4.79324480e+06 3.03563816e+06 -6.83158646e+06 -2.97052815e+06 -3.88718347e+05 2.07973761e+06 -5.05724654e+06 3.54671736e+05 4.88293809e+06 3.87744940e+06] [-1.66847207e+06 3.19418855e+06 -1.55541655e+06 -6.75550217e+06 -1.38587016e+06 -1.26255890e+06 -1.41164929e+05 -3.80477191e+06 -7.86057337e+05 -4.51179973e+05 6.26888887e+06 -6.61032041e+05 3.25081406e+06 1.67035569e+06 -5.97030890e+06 -1.41091375e+05 -1.81374491e+06 7.76801399e+05 -5.10980333e+06 -2.23653749e+05 3.07733661e+06 2.12728480e+06] [-1.09359524e+06 1.55015307e+06 -1.00903502e+06 -5.79440782e+06 -9.61048336e+05 -7.80438045e+05 -2.55810468e+05 -2.92400292e+06 -3.04405778e+05 -1.51484218e+06 3.52557796e+06 -1.49656801e+06 1.89114979e+06 7.19994285e+05 -3.96220275e+06 2.04915872e+06 -2.32241161e+06 6.14641965e+04 -3.96988636e+06 -2.29157419e+05 1.61334672e+06 8.12518762e+05] [-5.00619373e+05 1.02932590e+06 -3.58810283e+05 -3.54341979e+06 -3.70230169e+05 -3.77352007e+05 3.71185540e+04 -1.74085014e+06 6.65964100e+04 -1.14253622e+06 2.04029543e+06 -1.08980920e+06 1.26645685e+06 5.48311825e+05 -2.13144623e+06 1.56108111e+06 -1.58472158e+06 1.31629560e+05 -2.34323674e+06 -8.74163392e+04 1.08043265e+06 5.60865158e+05] [-2.77802286e+05 2.65141889e+05 -1.25369113e+05 -2.23121846e+06 -1.94056446e+05 -2.82476478e+05 -1.54623148e+05 -1.00672524e+06 8.95610566e+04 -1.14030212e+06 5.85890370e+05 -1.09354096e+06 3.27966708e+05 1.81548128e+04 -1.03169600e+06 1.39276013e+06 -1.30816548e+06 -2.24099620e+05 -1.29449966e+06 -2.94268884e+05 2.12233687e+05 -7.44315476e+04] [ 1.48044809e+04 2.02658477e+04 1.39906320e+05 -7.43794604e+05 5.72882472e+04 -4.17078335e+04 -2.30258560e+04 -2.15495653e+05 2.00686344e+05 -6.10011283e+05 -6.47088923e+04 -5.58682694e+05 4.65313445e+04 2.32574389e+03 -1.22596963e+05 8.16807141e+05 -6.00453310e+05 -1.27356696e+05 -2.74335319e+05 -1.63349186e+05 2.65952807e+04 -9.07735204e+04] [ 5.25164661e+04 4.48984074e+04 9.10642316e+04 -1.32435768e+05 2.96031849e+04 2.58494539e+04 5.31569165e+04 -7.74502189e+03 8.78708092e+04 -1.55521090e+05 1.48340014e+03 -1.25429925e+05 6.10118157e+04 6.14020467e+04 4.34012630e+04 1.55384606e+05 -1.51655796e+05 -5.83620185e+02 -9.39694522e+03 -2.96609352e+04 6.42852824e+04 1.85917656e+04] [-9.36343475e+03 -1.01061937e+04 -5.29291628e+01 3.23242980e+04 -1.73992048e+04 -1.08108052e+04 -1.99086748e+03 1.21277187e+04 -9.27049470e+03 -2.18473102e+04 -3.59569484e+04 -1.45376892e+04 -9.20984509e+03 -4.82544506e+02 2.85002449e+04 -3.10891655e+04 -1.59960932e+04 -1.56728683e+04 2.74470767e+04 -2.84513419e+04 -2.83332273e+03 -5.28959093e+03] [-3.96585825e+02 7.61033647e+02 -5.82723299e+02 1.02423760e+03 -8.09628038e+02 1.22931955e+02 3.04218857e+02 1.38186655e+02 -8.57111981e+02 1.23376447e+03 1.13037735e+03 1.20092322e+03 7.59508934e+02 6.40384766e+02 -3.32018513e+02 -3.06329059e+03 8.96299237e+02 6.13267303e+02 1.49776995e+02 1.29178526e+02 7.74415308e+02 8.43118067e+02] [ 6.89522726e-01 -1.66761703e-01 1.96802479e-03 4.59246757e-01 -4.82556585e-01 8.43273837e-01 4.04138928e-01 2.37860846e-01 -4.53982325e-01 3.91696913e-01 -4.85570503e-01 6.32905341e-01 -9.77055090e-01 -6.66924713e-01 1.15657978e-01 4.24561999e-01 -1.50386922e-02 7.55619003e-01 9.51739875e-01 -7.14458649e-02 5.35973959e-01 -1.04557323e-02] [ 3.30197557e-01 -5.50250040e-01 -7.80916566e-01 9.58017875e-01 6.29989319e-01 -4.29232935e-01 1.71656769e-01 -2.39857589e-01 -6.47323918e-01 -1.62329714e-01 -4.05624412e-01 -9.66751787e-01 -2.25263386e-01 8.41826008e-01 6.56400517e-01 1.48149510e-01 -3.62706270e-01 -5.37471410e-01 9.12039167e-01 1.29220198e-01 -5.64789393e-01 -5.01035898e-01] [ 9.19192405e-01 -5.22371733e-01 -7.86571823e-01 -6.32076120e-01 5.32449614e-01 4.17528512e-01 -1.24116150e-01 5.87609014e-01 3.50253843e-02 -1.72803678e-01 -7.65116128e-01 7.67822132e-01 -4.48813571e-01 2.32521447e-01 9.69159558e-01 -6.21163956e-01 9.30375984e-01 3.05430737e-01 -7.42536791e-01 5.43854292e-01 9.03551557e-01 1.25561099e-01] [-4.66976883e+03 -9.26048951e+03 -4.96248071e+03 -1.17062967e+03 -6.76188128e+03 -5.32012295e+03 -5.76041483e+03 -4.77029555e+03 -5.84563379e+03 -5.76240954e+03 -9.06204253e+03 -6.00917069e+03 -7.32445426e+03 -6.77383991e+03 -2.13636561e+03 -5.99738374e+03 -5.84724156e+03 -6.88418423e+03 -5.33273192e+03 -6.19933758e+03 -8.16387654e+03 -7.82113875e+03] [-2.82752286e+04 -3.59312358e+04 -2.77523867e+04 -5.47418342e+04 -2.11411071e+04 -3.36818802e+04 -3.31570035e+04 -3.82214237e+04 -3.38194539e+04 -4.44748168e+04 -5.94478520e+04 -5.51724541e+04 -5.64800925e+04 -5.03968116e+04 -3.07395498e+04 -3.26487855e+04 -3.86704458e+04 -3.70393716e+04 -2.81645912e+04 -3.48448158e+04 -5.09014204e+04 -5.29613256e+04] [ 6.78149714e+04 9.68118026e+03 4.70047842e+04 1.33648158e+05 6.93472651e+04 3.33515535e+04 6.92595364e+04 5.05630143e+04 1.47248765e+04 6.95934343e+04 -3.50563048e+04 4.32367028e+04 -1.54418399e+04 1.93964977e+04 7.68760271e+04 -2.47633974e+05 7.66430838e+04 6.06204725e+04 7.98630558e+04 3.82814298e+04 1.57480642e+03 1.93587101e+04] [ 4.95223757e+05 4.16013820e+05 4.77396991e+05 7.81680363e+05 5.04901968e+05 3.36748753e+05 5.65892480e+05 4.60030433e+05 3.89000289e+05 5.34558534e+05 3.14807393e+05 5.07343113e+05 3.53917342e+05 4.56371875e+05 5.13340370e+05 -6.06630331e+05 5.36423908e+05 5.26783270e+05 5.46162159e+05 3.87967590e+05 4.23904036e+05 4.65957897e+05] [ 7.61411118e+05 7.78611215e+05 7.10021347e+05 1.44221223e+06 7.79500025e+05 4.71804906e+05 9.16948652e+05 7.97343236e+05 5.58070942e+05 1.03514326e+06 6.41604794e+05 9.96410187e+05 5.72365039e+05 7.68983603e+05 8.22054751e+05 -1.35530107e+06 1.03724184e+06 8.74068340e+05 1.00550964e+06 5.80284012e+05 7.61307265e+05 8.34738281e+05] [ 1.19501618e+06 1.61616456e+06 1.10323699e+06 2.39163767e+06 1.33990847e+06 7.76740651e+05 1.55494836e+06 1.42703095e+06 8.71359075e+05 2.08135623e+06 1.45123027e+06 2.00159920e+06 9.96719074e+05 1.34668504e+06 1.25264344e+06 -2.77933436e+06 2.07426023e+06 1.58227519e+06 1.90699454e+06 9.94565985e+05 1.45555312e+06 1.55492209e+06] [ 1.96699540e+06 2.98086134e+06 1.85307777e+06 3.46038613e+06 2.21734149e+06 1.43297023e+06 2.65155551e+06 2.20260429e+06 1.50347730e+06 3.50735748e+06 2.94682463e+06 3.39918499e+06 2.07100576e+06 2.46615884e+06 1.79321202e+06 -4.51400655e+06 3.40049630e+06 2.78149040e+06 2.87081870e+06 1.80657872e+06 2.72448693e+06 2.83542611e+06] [ 2.84229608e+06 4.38339161e+06 2.60181468e+06 5.22894690e+06 3.24586055e+06 2.05607706e+06 3.73451053e+06 3.27423367e+06 2.11932936e+06 5.24846154e+06 4.29906850e+06 5.10411062e+06 2.98549523e+06 3.60479198e+06 2.66535396e+06 -6.79317724e+06 5.12761640e+06 4.03568384e+06 4.29410300e+06 2.60408755e+06 4.00267348e+06 4.21985455e+06] [ 2.87254409e+06 5.58135199e+06 2.57207340e+06 6.31766643e+06 3.67185771e+06 1.97590452e+06 4.15477050e+06 3.85957939e+06 2.04625617e+06 6.81247954e+06 5.31887580e+06 6.55674622e+06 3.37869382e+06 4.26926407e+06 2.75370761e+06 -9.49422913e+06 6.58562938e+06 4.70411712e+06 5.34935623e+06 2.66420154e+06 4.95874154e+06 5.19765931e+06] [ 1.25206414e+06 5.48468080e+06 9.56396170e+05 6.01631426e+06 2.58518799e+06 2.95894073e+05 2.78918818e+06 3.13140224e+06 4.00608322e+05 7.36133855e+06 5.04723441e+06 6.89468629e+06 2.12900997e+06 3.35384140e+06 1.25480105e+06 -1.23272170e+07 6.99098698e+06 3.69432666e+06 5.22281564e+06 9.38775078e+05 4.28913255e+06 4.59090793e+06] [-1.33176246e+06 4.03690782e+06 -2.18198181e+06 5.88172920e+06 -3.92216620e+05 -2.10271578e+06 2.82497981e+05 1.31076789e+06 -2.83170025e+06 7.59603454e+06 4.11563987e+06 6.92709049e+06 1.72292107e+05 1.40528378e+06 -1.26791312e+06 -1.61384649e+07 6.81223474e+06 1.74524240e+06 3.59260197e+06 -1.43822378e+06 2.63561577e+06 3.08390599e+06] [-3.92012861e+06 3.25707454e+06 -5.48717136e+06 6.84881908e+06 -3.75901689e+06 -4.24808105e+06 -1.77100495e+06 -2.03574370e+05 -6.44081516e+06 8.64193683e+06 4.52052924e+06 7.68262589e+06 -8.26154062e+05 2.29624981e+05 -3.85921813e+06 -2.19498911e+07 7.32132274e+06 4.70577286e+05 2.01342869e+06 -3.29605604e+06 1.89252914e+06 2.54853075e+06] [-5.95841964e+06 3.13393212e+06 -8.08209773e+06 5.96701359e+06 -6.54573760e+06 -5.92871459e+06 -3.17974519e+06 -2.16948072e+06 -9.12720734e+06 9.05320418e+06 5.97450945e+06 7.79933214e+06 -7.16946181e+05 -1.90067953e+05 -6.95687417e+06 -2.49813798e+07 7.02717583e+06 -2.32706777e+05 -4.90851679e+05 -4.56701260e+06 1.95214133e+06 2.56269334e+06] [-7.44857528e+06 3.06527338e+06 -9.67566329e+06 3.19711575e+06 -8.32490428e+06 -7.08437308e+06 -4.40925449e+06 -4.40795465e+06 -1.04224433e+07 8.15536522e+06 7.23034614e+06 6.72185583e+06 -1.91389031e+05 -3.96428416e+05 -1.00101950e+07 -2.44231242e+07 5.49922449e+06 -9.74263139e+05 -3.56975559e+06 -5.46105308e+06 2.13068494e+06 2.46026936e+06] [-8.46984993e+06 2.28140014e+06 -1.04146212e+07 -1.71900898e+06 -9.23144598e+06 -7.95868844e+06 -5.67928593e+06 -6.68244610e+06 -1.07991989e+07 5.15644709e+06 6.75415289e+06 3.76998050e+06 -4.59780376e+05 -1.34873368e+06 -1.22234543e+07 -2.00139803e+07 2.37188894e+06 -2.27602215e+06 -6.45043163e+06 -6.41536715e+06 1.36728450e+06 1.28356317e+06] [-9.60445638e+06 3.55984247e+05 -1.14557093e+07 -5.34007001e+06 -1.07517906e+07 -8.77789802e+06 -7.19257034e+06 -8.33469278e+06 -1.17907961e+07 1.94205484e+06 4.72225937e+06 7.71511240e+05 -1.91409871e+06 -3.23454122e+06 -1.33412640e+07 -1.61384967e+07 -6.73869022e+05 -4.25181774e+06 -8.50505433e+06 -7.56157032e+06 -5.09088293e+05 -8.52327384e+05] [-1.00482685e+07 -1.84451371e+06 -1.19208381e+07 -6.91172075e+06 -1.16783025e+07 -8.76671048e+06 -8.07888681e+06 -8.76063536e+06 -1.22918249e+07 -5.09254735e+05 2.32149793e+06 -1.27583323e+06 -3.58950843e+06 -4.95835394e+06 -1.28724949e+07 -1.31390550e+07 -2.69596199e+06 -5.76318529e+06 -9.04816281e+06 -8.06433238e+06 -2.52298459e+06 -2.89696237e+06] [-7.88256341e+06 -2.38879352e+06 -9.35393710e+06 -6.80493336e+06 -9.42827401e+06 -6.85751465e+06 -6.69634868e+06 -7.18947384e+06 -9.64649247e+06 -2.11643184e+06 4.71391709e+05 -2.50634923e+06 -3.55060226e+06 -4.68721420e+06 -9.73004726e+06 -7.92274804e+06 -3.59434057e+06 -5.26825422e+06 -7.46450084e+06 -6.66025907e+06 -2.87203901e+06 -3.32053047e+06] [-3.86854047e+06 -9.50331771e+05 -4.69049276e+06 -3.82955789e+06 -5.29965856e+06 -3.51657368e+06 -3.10942700e+06 -3.93613738e+06 -4.96193885e+06 -1.57979400e+06 7.11411838e+05 -1.65028729e+06 -1.13313120e+06 -1.97241482e+06 -5.05064822e+06 -3.80863158e+06 -2.57702768e+06 -2.62189304e+06 -4.28398226e+06 -3.54599320e+06 -8.89509524e+05 -1.38654853e+06] [-1.65873720e+06 1.24656821e+05 -1.89550664e+06 -2.19535098e+06 -2.51414348e+06 -1.67093705e+06 -1.06997412e+06 -1.84698640e+06 -2.12684018e+06 -1.22024052e+06 6.08772155e+05 -1.15317910e+06 3.90785253e+03 -4.98304219e+05 -2.24267021e+06 -1.46952862e+06 -1.75943049e+06 -1.13608188e+06 -1.96104100e+06 -1.83646085e+06 2.02176917e+05 -2.56688883e+05] [-2.20687633e+05 6.63938950e+05 -1.77155865e+05 -6.60982123e+05 -6.94130931e+05 -3.70161929e+05 1.00464886e+05 -4.33752834e+05 -3.59649852e+05 -5.14279088e+05 5.92387719e+05 -4.12116626e+05 5.53136252e+05 3.33005047e+05 -3.99777669e+05 -3.63769822e+05 -7.82747649e+05 -1.28371555e+05 -3.89219119e+05 -6.51125779e+05 6.78703824e+05 3.49459479e+05] [ 2.03870060e+04 1.94621234e+05 7.40108449e+04 1.18462441e+05 -3.14468872e+05 -1.36132787e+05 1.20748623e+05 2.58717363e+04 -8.80808383e+04 -3.99411167e+05 -1.58062835e+05 -2.99879953e+05 1.50709679e+05 1.57095644e+05 2.59675493e+05 -1.35561048e+05 -4.44602979e+05 -1.22531354e+05 1.63025188e+05 -4.20230792e+05 2.52101558e+05 1.07114298e+05] [-5.85769688e+04 2.23225023e+05 -1.09061131e+05 3.75727409e+05 -3.36724027e+05 -9.01052447e+04 7.91787932e+04 5.52247115e+04 -2.27633313e+05 7.86845635e+04 9.26423380e+04 1.20342568e+05 1.69020948e+05 1.88187843e+05 1.17790647e+05 -7.77594095e+05 -5.78478029e+02 1.58438563e+04 1.79531256e+05 -2.23317531e+05 2.87447156e+05 2.10241907e+05] [-1.31731503e+04 8.51717558e+04 -2.43595083e+04 2.25769575e+05 -1.24032003e+05 -1.34343640e+04 6.14073965e+04 5.65744941e+04 -9.23481533e+04 7.95246426e+04 4.04967476e+04 9.67791722e+04 7.29068452e+04 9.25230152e+04 7.97941055e+04 -4.12322669e+05 4.33401522e+04 3.02052809e+04 1.08999605e+05 -5.07540737e+04 1.13021671e+05 9.93424913e+04] [ 1.28837961e+03 3.97566341e+04 7.78277256e+03 5.38657207e+04 -1.76292915e+04 1.48321931e+04 3.96240870e+04 1.66139726e+04 -6.53333783e+03 4.31173121e+04 5.07977437e+04 4.58249045e+04 5.01848403e+04 4.47172706e+04 9.45813901e+03 -1.01297336e+05 2.71900437e+04 2.83619969e+04 1.36477076e+04 1.32061988e+04 4.59049643e+04 4.30790253e+04] [-6.24090732e+02 -5.44015699e+02 -6.32298238e+02 -5.70308353e+02 -6.58005427e+02 -5.97896907e+02 -5.86870070e+02 -6.62925473e+02 -6.68061735e+02 -5.80291931e+02 -4.63085471e+02 -5.38176070e+02 -5.40298333e+02 -5.65415692e+02 -6.48112039e+02 -8.99884794e+02 -6.23816837e+02 -5.84639797e+02 -6.33865841e+02 -6.19031543e+02 -5.50022169e+02 -5.73389657e+02] [-8.64971899e-01 -1.27375903e-01 8.57725599e-01 -4.73639793e-01 -7.44093656e-01 5.24477544e-01 8.57216243e-01 -3.48463159e-01 -5.35083804e-01 -6.35029047e-01 9.26471045e-01 1.53269269e-01 7.20175544e-01 -3.83310028e-01 -8.87127139e-01 -9.23916298e-02 -5.85779808e-01 3.79673287e-02 7.59752712e-02 -4.11423566e-01 -3.85092236e-01 -9.51652656e-01] [-1.59195972e-01 -3.52621173e-01 2.38980251e-01 7.19526133e-01 -1.47191184e-02 -8.34583404e-01 8.66960917e-01 6.16653934e-01 -2.68962354e-01 -3.38542178e-01 8.75868157e-01 -2.50211319e-01 2.71604437e-01 9.70688676e-01 -9.26520471e-01 -3.70738044e-01 -5.37528556e-01 8.44730838e-01 1.02273461e-01 -8.92572923e-01 5.10496499e-01 3.68167643e-01] [ 1.24585719e+04 1.35527080e+04 1.04447879e+04 1.61347657e+04 7.90193547e+03 1.14522935e+04 1.40736893e+04 1.13913998e+04 9.07903545e+03 1.58722329e+04 1.48750899e+04 1.58713158e+04 1.48028505e+04 1.37154107e+04 1.11102860e+04 -1.88989610e+04 1.38576476e+04 1.37400572e+04 1.19261757e+04 1.25465754e+04 1.41057480e+04 1.36791936e+04] [-9.38107191e+04 -4.81044979e+04 -1.08871979e+05 -6.38856803e+04 -9.49406038e+04 -8.04193569e+04 -6.81763733e+04 -7.30418179e+04 -1.32361455e+05 -2.86502397e+04 -7.51356291e+04 -5.24264043e+04 -1.01722032e+05 -9.69839129e+04 -8.88139004e+04 -2.40741623e+05 -2.86520532e+04 -6.40362320e+04 -3.94493827e+04 -7.47267037e+04 -8.12014215e+04 -8.49817534e+04] [ 6.52123703e+04 -1.28474671e+05 -2.48482003e+04 2.40683449e+05 -2.19397241e+04 9.95147859e+03 2.86610940e+04 1.52839804e+04 -9.50436722e+04 4.83694707e+04 -2.12167671e+05 2.55711625e+04 -1.35810978e+05 -7.19004471e+04 1.17400918e+05 -4.42017169e+05 4.78192439e+04 7.71057544e+03 5.62882733e+04 -3.24890342e+03 -1.14811056e+05 -7.39064635e+04] [ 6.94892347e+05 5.05805225e+05 5.26737084e+05 1.19984960e+06 6.58452573e+05 4.47997494e+05 6.84723453e+05 5.91499241e+05 3.97657571e+05 8.87634431e+05 3.63541981e+05 8.27560856e+05 3.05251286e+05 4.99029164e+05 6.97777780e+05 -1.13974009e+06 8.94239391e+05 7.01706119e+05 7.58715044e+05 5.21300826e+05 4.63082083e+05 5.73238858e+05] [ 1.13805806e+06 9.56979807e+05 9.04468837e+05 2.21535920e+06 1.13609676e+06 6.81454133e+05 1.18797511e+06 1.12296070e+06 7.00200748e+05 1.66116516e+06 7.43579725e+05 1.57820148e+06 5.37433523e+05 9.05465024e+05 1.20731108e+06 -2.26965491e+06 1.68039476e+06 1.18924475e+06 1.50056060e+06 8.14113172e+05 8.45786720e+05 1.04112170e+06] [ 2.18695447e+06 2.36096473e+06 1.97298815e+06 3.37208289e+06 2.35730213e+06 1.51145758e+06 2.42495680e+06 2.13343119e+06 1.68931839e+06 3.04864455e+06 2.10388631e+06 2.94656144e+06 1.57513816e+06 2.08054263e+06 2.10681464e+06 -3.61837056e+06 3.02836354e+06 2.46932711e+06 2.76040803e+06 1.75431105e+06 2.11375469e+06 2.30301418e+06] [ 2.42161178e+06 3.30919980e+06 2.13025103e+06 4.24316645e+06 2.78287819e+06 1.52468997e+06 2.93837090e+06 2.50251706e+06 1.68638515e+06 4.18237907e+06 3.05560065e+06 3.98963085e+06 1.98157455e+06 2.67114440e+06 2.20446112e+06 -5.83579058e+06 4.07574135e+06 3.10822472e+06 3.45423730e+06 1.86776769e+06 2.84747201e+06 3.11046752e+06] [ 2.03176839e+06 3.61692412e+06 1.51562915e+06 5.07090120e+06 2.41887822e+06 9.40802582e+05 2.75993765e+06 2.53036066e+06 9.72395405e+05 5.07873413e+06 3.39936700e+06 4.87233491e+06 1.70310255e+06 2.64573309e+06 1.93950568e+06 -8.26910634e+06 4.88842119e+06 3.08007162e+06 3.85818844e+06 1.37923423e+06 3.04568656e+06 3.34857145e+06] [ 7.93926578e+05 3.76196507e+06 1.12497315e+05 5.19628217e+06 1.34445375e+06 -3.45596234e+05 1.82617325e+06 2.13810826e+06 -5.31759944e+05 5.89121959e+06 3.27646978e+06 5.58538512e+06 8.99160073e+05 2.05962810e+06 8.82472488e+05 -1.10411037e+07 5.52870860e+06 2.38950682e+06 4.01688513e+06 1.10398105e+05 2.80512134e+06 3.08939512e+06] [-1.58573495e+06 3.56756772e+06 -2.37082537e+06 3.06789348e+06 -6.62680808e+05 -2.69846888e+06 -1.33345596e+05 4.10641494e+05 -2.94924047e+06 5.59363871e+06 3.18819028e+06 5.06585138e+06 -3.90286993e+05 6.61256312e+05 -1.86377817e+06 -1.32774493e+07 4.93351777e+06 8.26421120e+05 2.62960907e+06 -2.20143659e+06 1.95874793e+06 2.08193404e+06] [-5.58683308e+06 1.43684140e+06 -7.10333471e+06 1.40474503e+06 -5.14900434e+06 -6.02877570e+06 -3.75027141e+06 -2.46167977e+06 -7.67181622e+06 4.90621387e+06 1.90075105e+06 4.16319578e+06 -2.98597379e+06 -2.17666111e+06 -5.73346815e+06 -1.70938383e+07 3.74151675e+06 -2.11587726e+06 -2.43255559e+05 -5.45046982e+06 -3.43102148e+05 -1.82776485e+05] [-1.07285279e+07 -1.97090822e+06 -1.29544390e+07 -1.28899356e+06 -1.08229983e+07 -1.00833509e+07 -8.39653130e+06 -6.29630114e+06 -1.34563342e+07 3.10687357e+06 -3.73460867e+05 2.10207197e+06 -6.47932223e+06 -5.93598591e+06 -1.06929570e+07 -2.05100581e+07 1.46743534e+06 -6.05990354e+06 -4.37924443e+06 -9.29369496e+06 -3.66399808e+06 -3.45007568e+06] [-1.59355834e+07 -6.09674740e+06 -1.88923299e+07 -5.13447181e+06 -1.66641034e+07 -1.42636209e+07 -1.34293642e+07 -1.08080758e+07 -1.90511865e+07 2.36435599e+05 -3.01782380e+06 -9.43911127e+05 -1.00927986e+07 -1.00342952e+07 -1.62291777e+07 -2.17554182e+07 -1.84808757e+06 -1.04638344e+07 -9.66209660e+06 -1.30639712e+07 -7.35800089e+06 -7.15201337e+06] [-1.95001292e+07 -9.57720011e+06 -2.27755166e+07 -9.40448106e+06 -2.06933619e+07 -1.70779589e+07 -1.72352031e+07 -1.42749139e+07 -2.26725358e+07 -3.14267272e+06 -5.82877679e+06 -4.35573877e+06 -1.30309482e+07 -1.34118316e+07 -1.99986037e+07 -1.99703015e+07 -5.38053372e+06 -1.41805912e+07 -1.37984035e+07 -1.58524665e+07 -1.05292012e+07 -1.05213473e+07] [-2.05152132e+07 -1.20570502e+07 -2.35998297e+07 -1.28636893e+07 -2.15796297e+07 -1.78752869e+07 -1.90148494e+07 -1.55560209e+07 -2.34657362e+07 -6.57701480e+06 -8.76440533e+06 -7.62387097e+06 -1.53080802e+07 -1.57383133e+07 -2.07837004e+07 -1.56124234e+07 -8.29740206e+06 -1.64512972e+07 -1.53155775e+07 -1.72045858e+07 -1.31251806e+07 -1.31624648e+07] [-1.99051572e+07 -1.29988025e+07 -2.29066215e+07 -1.28982833e+07 -2.13969918e+07 -1.72444804e+07 -1.90447650e+07 -1.50177677e+07 -2.32774462e+07 -7.79253700e+06 -1.06111973e+07 -8.66479102e+06 -1.61077524e+07 -1.64522992e+07 -1.91486775e+07 -1.42301336e+07 -9.12041995e+06 -1.69044858e+07 -1.45641503e+07 -1.73616288e+07 -1.41537320e+07 -1.41268951e+07] [-1.85002422e+07 -1.30886839e+07 -2.15488890e+07 -1.00422159e+07 -2.09001958e+07 -1.62605026e+07 -1.79952965e+07 -1.35149383e+07 -2.26308949e+07 -7.23935901e+06 -1.11487650e+07 -7.91943592e+06 -1.60484460e+07 -1.60966177e+07 -1.66722618e+07 -1.48822480e+07 -8.27153727e+06 -1.62518614e+07 -1.25877973e+07 -1.69966287e+07 -1.41560848e+07 -1.38766548e+07] [-1.36475379e+07 -1.02754721e+07 -1.61472230e+07 -7.21126907e+06 -1.63498278e+07 -1.23393574e+07 -1.36859356e+07 -1.01493163e+07 -1.72913043e+07 -6.10023278e+06 -9.22552324e+06 -6.42743623e+06 -1.23083486e+07 -1.23790599e+07 -1.19542879e+07 -1.07377331e+07 -6.87021172e+06 -1.26013406e+07 -9.34421938e+06 -1.33478838e+07 -1.09603496e+07 -1.08044224e+07] [-7.83249358e+06 -5.14700633e+06 -9.79606667e+06 -2.72465932e+06 -1.05188453e+07 -7.58112960e+06 -7.75375614e+06 -5.71916108e+06 -1.08773492e+07 -2.70874576e+06 -4.26065858e+06 -2.90252994e+06 -6.47942895e+06 -6.60248456e+06 -6.96370523e+06 -8.08325490e+06 -3.59883842e+06 -7.14530043e+06 -5.22741283e+06 -8.35661979e+06 -5.43065275e+06 -5.43506054e+06] [-3.26977690e+06 -1.10844273e+06 -4.09402230e+06 -7.03535124e+05 -5.14418193e+06 -3.50960577e+06 -2.78998811e+06 -2.28978547e+06 -4.93442449e+06 -1.15685723e+06 -1.09544585e+06 -1.14515647e+06 -1.77041325e+06 -2.01011031e+06 -2.84058298e+06 -4.46871600e+06 -1.83003903e+06 -2.84887999e+06 -1.95448309e+06 -4.12679638e+06 -1.10213578e+06 -1.41071653e+06] [-1.33484196e+06 3.72517122e+05 -1.57421590e+06 2.09606765e+05 -2.55326040e+06 -1.52909003e+06 -7.22980480e+05 -7.92861279e+05 -2.25313934e+06 -3.12334894e+05 2.15543540e+04 -2.47811458e+05 -3.36944293e+04 -1.63559247e+05 -9.60063495e+05 -3.13531235e+06 -8.49700480e+05 -9.69173794e+05 -4.91886668e+05 -2.11077098e+06 4.07953886e+05 8.63477033e+04] [-1.00135382e+06 1.95080411e+05 -1.20589878e+06 3.93929973e+05 -1.95529286e+06 -1.08424702e+06 -5.64835541e+05 -5.74040242e+05 -1.69159148e+06 -1.20475032e+05 -1.38194330e+05 -6.56801432e+04 -9.26719511e+04 -1.12515700e+05 -5.92863225e+05 -2.86503219e+06 -5.19037152e+05 -7.36133820e+05 -2.63753173e+05 -1.52602315e+06 2.35910289e+05 -1.07693428e+04] [-8.45655266e+05 2.12980438e+05 -1.08062146e+06 7.62141045e+05 -1.46957753e+06 -7.69378824e+05 -3.74029530e+05 -3.31976595e+05 -1.41062663e+06 4.68809044e+05 2.43061944e+05 4.67260983e+05 -3.82616617e+04 1.20850986e+04 -5.32017475e+05 -2.97320182e+06 1.47205998e+05 -3.58707192e+05 -6.52368773e+04 -9.71453189e+05 2.89211448e+05 1.75573146e+05] [-5.58198360e+05 3.96342746e+03 -7.27204775e+05 5.58111636e+05 -8.89391540e+05 -4.73635404e+05 -3.02263870e+05 -1.88025846e+05 -9.12970711e+05 3.86551172e+05 1.18093853e+05 3.70971730e+05 -1.50731511e+05 -1.02935865e+05 -3.64083923e+05 -1.81703284e+06 2.13591321e+05 -2.50525432e+05 -5.18696037e+04 -5.31007187e+05 3.33761559e+04 2.94800926e+04] [-8.50533524e+04 3.95339663e+04 -1.00391509e+05 1.76724059e+05 -1.64361921e+05 -6.56789798e+04 3.30823563e+03 -1.03874862e+04 -1.54718555e+05 1.20744162e+05 8.64809605e+04 1.19375776e+05 3.74978237e+04 3.48548530e+04 -5.41271837e+04 -3.84557118e+05 7.14242259e+04 -1.24967299e+04 -9.29257710e+02 -7.20432384e+04 5.82387120e+04 5.80895029e+04] [-1.49577034e+04 -1.60964930e+04 -1.90286798e+04 5.35964770e+03 -2.03915452e+04 -1.24638679e+04 -1.63738058e+04 -4.81054794e+03 -2.22732036e+04 -1.28016888e+03 -1.21056377e+04 -1.76224762e+03 -1.82856428e+04 -1.64360065e+04 -8.17727476e+03 -9.96030560e+03 -1.58300988e+03 -1.63696165e+04 -4.34729290e+03 -1.44237454e+04 -1.63664958e+04 -1.34561112e+04] [ 5.08152856e+03 5.27473450e+03 5.27545738e+03 4.53986835e+03 5.35363687e+03 4.55197354e+03 5.29104891e+03 4.47939424e+03 5.37560382e+03 4.50828597e+03 5.31970378e+03 4.46389027e+03 5.39635369e+03 5.35726501e+03 4.48802395e+03 1.06904690e+03 4.54845714e+03 5.35038590e+03 4.48295919e+03 4.83591413e+03 5.34467760e+03 5.30689104e+03] [-2.96728690e+03 -1.14771149e+04 -4.55122517e+03 -1.08392841e+04 -2.40249194e+03 -2.24164737e+03 -9.22647578e+03 -4.92956393e+03 -2.36439049e+03 -1.08591905e+04 -1.31285673e+04 -1.05811783e+04 -9.86023507e+03 -9.93220305e+03 -2.35576149e+03 2.46149006e+04 -8.83789513e+03 -9.71672766e+03 -5.78416059e+03 -4.67686795e+03 -1.12753092e+04 -1.08206401e+04] [ 4.87534094e+04 3.34546249e+04 4.92654216e+04 1.32607804e+04 5.49164717e+04 3.56718382e+04 3.75511914e+04 2.96285667e+04 5.57943749e+04 1.49240094e+04 3.24001303e+04 1.60377588e+04 3.83318920e+04 3.52687641e+04 3.34114684e+04 5.60365689e+04 1.85047188e+04 3.60528690e+04 2.77518290e+04 3.72477725e+04 3.30763195e+04 3.40818707e+04] [-8.22999904e+04 -1.41310212e+05 -1.27995061e+05 -9.21838921e+03 -1.31241990e+05 -9.68851918e+04 -1.04913768e+05 -9.66997485e+04 -1.55487437e+05 -4.49115543e+04 -1.39897928e+05 -5.47484259e+04 -1.54850447e+05 -1.41317868e+05 -6.73111081e+04 -1.62369449e+05 -4.91927904e+04 -1.06458527e+05 -7.68833256e+04 -1.05305169e+05 -1.55587224e+05 -1.32556957e+05] [ 2.61194085e+05 -7.77946834e+03 1.07167720e+05 5.59013469e+05 1.27370168e+05 1.41750321e+05 1.51968773e+05 2.01101252e+05 1.14509491e+04 3.26040296e+05 -5.21659188e+04 3.03089424e+05 -7.80812852e+04 2.10225401e+04 3.01061410e+05 -4.72206074e+05 3.38563408e+05 1.61982429e+05 2.87756172e+05 1.30493030e+05 -4.22978932e+04 5.60789637e+04] [ 1.15351752e+06 6.01974390e+05 9.40714475e+05 1.75253414e+06 9.78504568e+05 7.45218597e+05 9.52135542e+05 9.91423334e+05 7.77090924e+05 1.08433631e+06 4.38478461e+05 1.07502712e+06 4.50616899e+05 7.07380097e+05 1.23483598e+06 -7.05782021e+05 1.14203982e+06 8.80908460e+05 1.18729715e+06 7.63810162e+05 5.24435394e+05 7.36869716e+05] [ 1.80487095e+06 1.02582788e+06 1.58236775e+06 2.94976253e+06 1.76077639e+06 1.16486031e+06 1.52696845e+06 1.75616425e+06 1.34660849e+06 1.89735065e+06 6.69154522e+05 1.85799220e+06 6.40714309e+05 1.12313761e+06 2.06861264e+06 -1.28388120e+06 2.05335191e+06 1.42512942e+06 2.22566032e+06 1.14377639e+06 8.26485304e+05 1.15297408e+06] [ 2.26680267e+06 1.73798891e+06 2.07293719e+06 3.03853083e+06 2.48639724e+06 1.45061364e+06 1.93644446e+06 2.06686761e+06 1.84567104e+06 2.57612322e+06 1.42954992e+06 2.50056670e+06 9.19455127e+05 1.51775601e+06 2.28016877e+06 -1.77543825e+06 2.70207983e+06 1.93740744e+06 2.74281232e+06 1.38530229e+06 1.27541636e+06 1.59779436e+06] [ 1.93685008e+06 1.71793047e+06 1.70897562e+06 2.87913417e+06 2.30301029e+06 8.67721477e+05 1.62014492e+06 1.78266960e+06 1.34096237e+06 2.63012038e+06 1.32804127e+06 2.48488766e+06 4.48581649e+05 1.21693477e+06 1.88869796e+06 -2.87658749e+06 2.74800725e+06 1.70542413e+06 2.71663305e+06 7.42742411e+05 1.07274162e+06 1.43608160e+06] [ 7.31119130e+05 1.22265240e+06 2.74160308e+05 1.70744917e+06 1.14132777e+06 -4.80565263e+05 1.86656813e+05 6.83515228e+05 -2.45383229e+04 2.26966789e+06 7.93632236e+05 2.11994260e+06 -5.82265788e+05 2.26325014e+05 4.89155518e+05 -3.68609967e+06 2.28295373e+06 5.58925442e+05 1.85915186e+06 -6.50276115e+05 3.35472573e+05 6.63526801e+05] [-1.67154519e+06 7.76903065e+05 -2.12487887e+06 -6.02793116e+05 -6.42013181e+05 -2.72200910e+06 -1.79086371e+06 -8.47023308e+05 -2.18934184e+06 1.73354931e+06 9.22905454e+04 1.47260477e+06 -2.18880429e+06 -1.35864022e+06 -1.92657509e+06 -4.29480121e+06 1.69500263e+06 -1.18902535e+06 8.01462575e+05 -2.81355153e+06 -7.23310491e+05 -5.96912979e+05] [-5.59308034e+06 -8.71181121e+05 -6.15331035e+06 -4.48065988e+06 -4.03762141e+06 -5.88638720e+06 -5.15114278e+06 -3.56062906e+06 -5.91732427e+06 -1.07310069e+05 -1.41891169e+06 -5.04568965e+05 -4.86821026e+06 -4.17262280e+06 -5.67759260e+06 -4.91017106e+06 -2.94780505e+05 -4.25035884e+06 -1.70252778e+06 -6.03123173e+06 -2.95854787e+06 -3.03704415e+06] [-1.12616041e+07 -5.44471579e+06 -1.20307383e+07 -9.99485780e+06 -9.66146845e+06 -1.03827719e+07 -1.04889612e+07 -8.01192977e+06 -1.13613504e+07 -4.43745747e+06 -5.69149086e+06 -4.84565663e+06 -9.57340462e+06 -9.19276643e+06 -1.07326972e+07 -4.22001244e+06 -4.62284453e+06 -9.50314424e+06 -6.39533180e+06 -1.05849553e+07 -7.64135427e+06 -7.95791248e+06] [-1.79673455e+07 -1.14660896e+07 -1.93165361e+07 -1.55695223e+07 -1.68114647e+07 -1.56027231e+07 -1.70634486e+07 -1.37668959e+07 -1.82316325e+07 -8.98824589e+06 -1.06343969e+07 -9.53819789e+06 -1.49799612e+07 -1.50479513e+07 -1.72745302e+07 -4.63952111e+06 -9.48270411e+06 -1.55919778e+07 -1.28905053e+07 -1.55961630e+07 -1.32954746e+07 -1.36040403e+07] [-2.45066277e+07 -1.86009982e+07 -2.70115885e+07 -1.88668520e+07 -2.45815862e+07 -2.07199291e+07 -2.40319434e+07 -1.90406168e+07 -2.57780451e+07 -1.29901065e+07 -1.62387767e+07 -1.35870680e+07 -2.11971408e+07 -2.15303825e+07 -2.30413472e+07 -6.29224138e+06 -1.36943853e+07 -2.21342930e+07 -1.88599624e+07 -2.05981813e+07 -1.97447659e+07 -1.97467636e+07] [-2.79107548e+07 -2.34629721e+07 -3.11462354e+07 -2.04378171e+07 -2.89401545e+07 -2.30871175e+07 -2.79055982e+07 -2.15969443e+07 -3.00885525e+07 -1.56447789e+07 -2.05121948e+07 -1.61934957e+07 -2.54540246e+07 -2.58365473e+07 -2.53994450e+07 -6.37562821e+06 -1.62171963e+07 -2.60878640e+07 -2.17522931e+07 -2.31994531e+07 -2.41734696e+07 -2.39909124e+07] [-2.63377433e+07 -2.49771158e+07 -2.93544033e+07 -1.94805884e+07 -2.76980939e+07 -2.17040583e+07 -2.73547129e+07 -2.00683686e+07 -2.88413332e+07 -1.72056263e+07 -2.34768223e+07 -1.75171531e+07 -2.66505453e+07 -2.65485710e+07 -2.23159278e+07 -3.75034250e+06 -1.70567977e+07 -2.64591148e+07 -2.00788189e+07 -2.27212058e+07 -2.56508166e+07 -2.52458661e+07] [-2.48090272e+07 -2.49799123e+07 -2.73251176e+07 -1.64339247e+07 -2.58003537e+07 -2.07103016e+07 -2.64634838e+07 -1.75722385e+07 -2.77049443e+07 -1.67330181e+07 -2.55938966e+07 -1.70013282e+07 -2.76320913e+07 -2.66185167e+07 -1.86442486e+07 -5.53825709e+06 -1.57500800e+07 -2.59148490e+07 -1.62308344e+07 -2.28859153e+07 -2.61818060e+07 -2.54201619e+07] [-2.08795041e+07 -2.17768620e+07 -2.31810470e+07 -1.21711730e+07 -2.22073431e+07 -1.81346128e+07 -2.27122190e+07 -1.42798825e+07 -2.40988662e+07 -1.41442580e+07 -2.30981175e+07 -1.42381242e+07 -2.46396139e+07 -2.33088825e+07 -1.45737763e+07 -6.27049598e+06 -1.30099334e+07 -2.25212766e+07 -1.23728064e+07 -2.07284227e+07 -2.30702393e+07 -2.21826531e+07] [-1.61328027e+07 -1.65874723e+07 -1.82725482e+07 -8.92162281e+06 -1.80902144e+07 -1.47101426e+07 -1.78872098e+07 -1.11752565e+07 -1.94341422e+07 -1.09058667e+07 -1.77684653e+07 -1.09378367e+07 -1.91365034e+07 -1.81264765e+07 -1.13688646e+07 -5.76595810e+06 -1.01860145e+07 -1.77379526e+07 -9.45831453e+06 -1.71479976e+07 -1.77461461e+07 -1.70658953e+07] [-1.01165928e+07 -9.96296823e+06 -1.19902075e+07 -5.98983535e+06 -1.26856350e+07 -9.80851038e+06 -1.14922181e+07 -7.62227460e+06 -1.30454421e+07 -7.32660907e+06 -1.05464997e+07 -7.24210596e+06 -1.14659396e+07 -1.12429984e+07 -7.68310901e+06 -3.75870502e+06 -7.23855704e+06 -1.14418827e+07 -6.73685719e+06 -1.16062064e+07 -1.05766393e+07 -1.04341091e+07] [-5.42364285e+06 -4.26583846e+06 -6.74840795e+06 -1.57051761e+06 -7.95533675e+06 -5.72702606e+06 -5.81357016e+06 -3.85388429e+06 -7.86272673e+06 -3.06089581e+06 -4.59467615e+06 -2.98481895e+06 -5.18967369e+06 -5.21166548e+06 -4.06484275e+06 -4.36129646e+06 -3.46762948e+06 -5.91579686e+06 -3.23429874e+06 -6.95547470e+06 -4.45042686e+06 -4.53679802e+06] [-3.60836697e+06 -1.57429532e+06 -4.57838405e+06 1.34359725e+05 -5.77403802e+06 -3.80801219e+06 -3.24303855e+06 -2.31617474e+06 -5.65443730e+06 -8.98027273e+05 -1.71925035e+06 -8.99479917e+05 -2.39884375e+06 -2.41505656e+06 -2.71292755e+06 -5.72967980e+06 -1.56649831e+06 -3.25418655e+06 -1.79943315e+06 -4.61804906e+06 -1.68219396e+06 -1.82828872e+06] [-3.17265884e+06 -8.27644189e+05 -3.98103588e+06 1.13427280e+06 -5.02021250e+06 -3.13664305e+06 -2.39841158e+06 -1.79677911e+06 -4.95061249e+06 2.98265164e+05 -6.49247252e+05 2.33737371e+05 -1.59955131e+06 -1.54921624e+06 -2.34928743e+06 -6.99727363e+06 -4.51482999e+05 -2.30052700e+06 -1.22584786e+06 -3.64289960e+06 -8.76041168e+05 -9.89907326e+05] [-2.34488512e+06 -4.48374232e+05 -2.84225417e+06 1.41319280e+06 -3.52661071e+06 -2.17781785e+06 -1.50029874e+06 -1.08465373e+06 -3.58303555e+06 7.52751731e+05 -2.08792327e+05 6.86322565e+05 -1.04374321e+06 -8.97705333e+05 -1.61053156e+06 -5.95211785e+06 1.99640517e+05 -1.40245173e+06 -5.63993703e+05 -2.41852953e+06 -4.23563078e+05 -4.55486530e+05] [-1.14461763e+06 -1.76692576e+05 -1.40141540e+06 1.00605296e+06 -1.68886803e+06 -1.00375568e+06 -6.58394511e+05 -4.25147530e+05 -1.77421163e+06 6.32085815e+05 3.58219995e+04 5.86701454e+05 -4.63216452e+05 -3.68083529e+05 -7.42992785e+05 -3.23703096e+06 3.62566973e+05 -5.74177035e+05 -1.61687717e+05 -1.07846178e+06 -1.41416325e+05 -1.17939902e+05] [-2.19537246e+05 5.30629441e+04 -3.19637822e+05 4.33122829e+05 -3.74540001e+05 -1.77487485e+05 -1.00475172e+05 -2.27154363e+04 -4.12590104e+05 3.42708706e+05 1.64589390e+05 3.18360481e+05 -2.46239929e+04 -9.45096649e+03 -1.39298927e+05 -8.88463077e+05 2.64605145e+05 -4.64741736e+04 3.63534962e+04 -1.70888704e+05 5.86865406e+04 8.32827261e+04] [-1.23421015e+04 -1.16958159e+03 -2.07357011e+04 5.87004992e+04 -2.85422312e+04 -1.16787161e+04 -5.07221822e+03 1.24807720e+04 -3.34456236e+04 3.56141052e+04 1.41469131e+04 3.38161484e+04 -7.57790413e+03 -2.89898251e+03 9.49504230e+02 -7.04318348e+04 3.16861053e+04 -4.49154315e+03 1.63043643e+04 -1.04028907e+04 7.20038610e+02 7.58343434e+03] [ 2.06506164e+04 2.14345891e+04 2.14342927e+04 1.84522412e+04 2.17536812e+04 1.84945031e+04 2.14980377e+04 1.81998804e+04 2.18485185e+04 1.83179095e+04 2.16220390e+04 1.81410963e+04 2.19317434e+04 2.17732706e+04 1.82389039e+04 4.34474591e+03 1.84873766e+04 2.17437623e+04 1.82163075e+04 1.96547962e+04 2.17223248e+04 2.15685259e+04] [ 3.27830146e+04 -1.11460280e+04 3.62900148e+04 -3.07489720e+04 3.99914900e+04 2.69668462e+04 5.51143820e+03 4.99606291e+03 4.51415340e+04 -3.23394469e+04 -2.07194073e+04 -2.96611863e+04 -3.19299392e+03 -4.63877422e+03 2.52558675e+04 1.16981017e+05 -2.36095687e+04 -7.69007182e+02 -6.19326771e+02 1.66430648e+04 -1.60336991e+04 -1.50437498e+04] [ 8.52526637e+04 -3.26760865e+04 1.04118597e+05 -6.48359607e+04 1.10858730e+05 5.90930341e+04 1.66960261e+04 2.02726163e+04 1.24485312e+05 -8.51521049e+04 -6.28692011e+04 -7.65333150e+04 -1.60307603e+04 -1.43400807e+04 7.33065303e+04 3.09824147e+05 -5.86029571e+04 -2.28396548e+03 1.17346759e+04 3.17128898e+04 -4.73469862e+04 -3.88855783e+04] [ 5.87006100e+04 -1.84464340e+05 -5.34510574e+03 1.32995604e+05 2.12421925e+04 -3.91217793e+04 -8.94761418e+04 -2.74108704e+04 -1.79823313e+04 -1.03809116e+04 -1.91593840e+05 -1.21610695e+04 -2.05933197e+05 -1.57391329e+05 5.59469492e+04 1.09893650e+05 1.72468826e+04 -8.70118391e+04 2.85017379e+03 -7.07506203e+04 -2.18655629e+05 -1.50419651e+05] [ 5.26825358e+05 -2.27079822e+05 3.69188161e+05 7.10728948e+05 3.82180415e+05 2.45603313e+05 1.52414920e+05 2.89054473e+05 3.09583598e+05 1.92474264e+05 -3.02616707e+05 2.05701497e+05 -2.15351505e+05 -5.76731059e+04 5.80084165e+05 3.94526970e+05 2.76780662e+05 1.15110107e+05 3.54056221e+05 1.49698477e+05 -2.63911889e+05 -8.81038002e+04] [ 1.31895070e+06 3.45340769e+05 1.12834747e+06 1.86693096e+06 1.21049432e+06 8.35304837e+05 7.65974036e+05 1.16081047e+06 1.02038139e+06 1.01461843e+06 1.92837747e+05 1.04863310e+06 1.58956010e+05 4.92042596e+05 1.50353532e+06 6.05480652e+05 1.20671538e+06 7.00541247e+05 1.43609557e+06 6.91654177e+05 1.45552409e+05 4.74978698e+05] [ 1.84476982e+06 2.52621227e+05 1.73734574e+06 1.95050005e+06 1.90871880e+06 1.19197328e+06 9.08818528e+05 1.53291406e+06 1.66427575e+06 1.00139266e+06 4.66744646e+04 1.04265268e+06 -2.15523945e+04 4.61153278e+05 2.08839762e+06 1.78089985e+06 1.35011661e+06 7.65912005e+05 1.88455164e+06 8.08349434e+05 -9.90273398e+04 3.11622584e+05] [ 2.33193569e+06 3.49806520e+05 2.35522836e+06 1.51186857e+06 2.73780837e+06 1.43716482e+06 9.72764843e+05 1.70229203e+06 2.40295562e+06 8.47675578e+05 2.88310998e+04 8.95877104e+05 -1.10964415e+05 5.00154507e+05 2.43112458e+06 3.22812876e+06 1.30881859e+06 8.58598022e+05 2.14055743e+06 7.58325161e+05 -1.93565971e+05 2.45034089e+05] [ 1.19353824e+06 -5.31746056e+05 1.36420072e+06 -3.64349365e+05 1.89393244e+06 2.45252308e+05 -3.03620982e+05 6.27889832e+05 1.45500919e+06 -3.78592982e+05 -1.13883353e+06 -3.74334554e+05 -1.32585515e+06 -7.41177986e+05 1.28127034e+06 3.92169213e+06 1.90828233e+05 -3.59330570e+05 1.27821905e+06 -6.07774510e+05 -1.29588442e+06 -9.71978399e+05] [-7.18372878e+05 -1.51224495e+06 -5.82461457e+05 -3.59860157e+06 2.97065155e+05 -1.68656444e+06 -2.49664550e+06 -1.28982631e+06 -2.26563675e+05 -1.96013222e+06 -2.35506500e+06 -1.97008538e+06 -2.99186849e+06 -2.60156100e+06 -1.02185287e+06 4.97470900e+06 -1.39415927e+06 -2.34158587e+06 -4.66953796e+05 -2.59796254e+06 -2.78703230e+06 -2.66989798e+06] [-4.74517211e+06 -3.97974427e+06 -4.52052558e+06 -8.62841768e+06 -2.92650227e+06 -5.05303905e+06 -6.27556203e+06 -4.38712615e+06 -3.65663620e+06 -4.88097655e+06 -4.84257736e+06 -4.83699598e+06 -6.56718213e+06 -6.20459250e+06 -4.88195975e+06 7.00512214e+06 -4.15075552e+06 -6.16742823e+06 -3.40503297e+06 -6.12456250e+06 -5.97847484e+06 -6.08373066e+06] [-9.20809467e+06 -7.30718281e+06 -8.83966347e+06 -1.49940417e+07 -6.83624322e+06 -8.55351983e+06 -1.05165618e+07 -8.18170385e+06 -7.44705627e+06 -9.54403490e+06 -8.54605759e+06 -9.43632809e+06 -1.04479514e+07 -1.02960568e+07 -8.90514153e+06 9.77648800e+06 -8.61914308e+06 -1.06015794e+07 -7.28691557e+06 -9.91096567e+06 -9.76955399e+06 -1.02972397e+07] [-1.34337972e+07 -1.07160260e+07 -1.31255059e+07 -1.98897763e+07 -1.13392608e+07 -1.19149185e+07 -1.44078623e+07 -1.22150930e+07 -1.14108767e+07 -1.34466603e+07 -1.14043416e+07 -1.33913277e+07 -1.33364930e+07 -1.37375364e+07 -1.32773696e+07 1.09393998e+07 -1.27339745e+07 -1.44594207e+07 -1.19970332e+07 -1.31670333e+07 -1.29519452e+07 -1.37570690e+07] [-1.86798270e+07 -1.60706864e+07 -1.92694250e+07 -2.19023826e+07 -1.78568289e+07 -1.58608592e+07 -1.97852620e+07 -1.67809840e+07 -1.75720179e+07 -1.57895454e+07 -1.49760585e+07 -1.58933112e+07 -1.74846572e+07 -1.83004419e+07 -1.84872953e+07 8.20769922e+06 -1.56481292e+07 -1.90924818e+07 -1.73100748e+07 -1.68836011e+07 -1.74987116e+07 -1.79968108e+07] [-2.35637996e+07 -2.22150886e+07 -2.59778680e+07 -1.94070388e+07 -2.53487664e+07 -1.92914290e+07 -2.50333347e+07 -1.98434914e+07 -2.49712102e+07 -1.60634259e+07 -1.88208637e+07 -1.62022915e+07 -2.24161790e+07 -2.32471408e+07 -2.19349088e+07 9.38783213e+05 -1.63830140e+07 -2.36604192e+07 -2.07950387e+07 -2.02124527e+07 -2.26586974e+07 -2.23320536e+07] [-2.41783759e+07 -2.50297015e+07 -2.76384767e+07 -1.48893838e+07 -2.72196941e+07 -1.92522124e+07 -2.59941736e+07 -1.88421030e+07 -2.73692009e+07 -1.49010673e+07 -2.15259430e+07 -1.50081683e+07 -2.48720643e+07 -2.51246773e+07 -2.03677238e+07 -3.01778448e+06 -1.49012180e+07 -2.47336532e+07 -1.94382876e+07 -2.03422804e+07 -2.50328248e+07 -2.40404910e+07] [-2.24925885e+07 -2.61136370e+07 -2.56009010e+07 -1.22636293e+07 -2.48478346e+07 -1.79764123e+07 -2.50873854e+07 -1.61251697e+07 -2.60281428e+07 -1.51784734e+07 -2.51369670e+07 -1.52152482e+07 -2.68469417e+07 -2.58892012e+07 -1.61320428e+07 -2.96025283e+06 -1.40458337e+07 -2.45665564e+07 -1.55052746e+07 -2.00086801e+07 -2.65423379e+07 -2.51058900e+07] [-1.95292906e+07 -2.47149328e+07 -2.17639992e+07 -9.09136787e+06 -2.05636729e+07 -1.59247986e+07 -2.24976539e+07 -1.22595278e+07 -2.27407111e+07 -1.42771588e+07 -2.63352595e+07 -1.41906293e+07 -2.67044567e+07 -2.46571003e+07 -1.11456041e+07 -3.04346899e+06 -1.19615313e+07 -2.26461407e+07 -1.01611965e+07 -1.88630004e+07 -2.57210042e+07 -2.40208679e+07] [-1.68922573e+07 -2.20694866e+07 -1.82981707e+07 -9.00018185e+06 -1.71249883e+07 -1.45007863e+07 -1.99308372e+07 -1.05299949e+07 -1.90819332e+07 -1.41500603e+07 -2.51157939e+07 -1.38950845e+07 -2.43343958e+07 -2.21788434e+07 -8.84100854e+06 -4.79254758e+05 -1.16981624e+07 -2.06086347e+07 -8.15340787e+06 -1.78315718e+07 -2.32556111e+07 -2.19117949e+07] [-1.36890599e+07 -1.68562913e+07 -1.47882185e+07 -9.01366033e+06 -1.41522738e+07 -1.25551269e+07 -1.62637283e+07 -9.02947217e+06 -1.55059281e+07 -1.20207249e+07 -1.93768456e+07 -1.17542443e+07 -1.92164661e+07 -1.77621448e+07 -7.96593220e+06 1.00083512e+06 -1.02258549e+07 -1.69528629e+07 -7.03090819e+06 -1.55946982e+07 -1.82058623e+07 -1.74259559e+07] [-9.86436082e+06 -1.10596991e+07 -1.11795615e+07 -8.10403579e+06 -1.13911688e+07 -9.55821172e+06 -1.19184747e+07 -7.44776869e+06 -1.18174828e+07 -9.20409936e+06 -1.23993558e+07 -8.94972687e+06 -1.26808810e+07 -1.23175496e+07 -6.93363507e+06 1.17683854e+06 -8.39640461e+06 -1.21972318e+07 -6.43230149e+06 -1.18681688e+07 -1.20781086e+07 -1.18697545e+07] [-7.38641650e+06 -7.08065928e+06 -8.74880476e+06 -4.81429331e+06 -9.64873634e+06 -7.43439342e+06 -8.54820348e+06 -5.77539561e+06 -9.62012342e+06 -5.79093164e+06 -7.59420074e+06 -5.65974417e+06 -8.10985427e+06 -8.16095076e+06 -5.66702853e+06 -1.43264293e+06 -5.80340102e+06 -8.61649139e+06 -5.18888804e+06 -9.05019750e+06 -7.61905801e+06 -7.62391757e+06] [-4.67870942e+06 -3.19612513e+06 -5.76748972e+06 -2.04568202e+06 -6.86380208e+06 -4.85094896e+06 -4.86342002e+06 -3.68860531e+06 -6.66422067e+06 -2.61686831e+06 -3.39225953e+06 -2.61218716e+06 -3.92374854e+06 -4.05698483e+06 -3.88440835e+06 -3.51776850e+06 -3.16028387e+06 -4.85553600e+06 -3.37157424e+06 -5.81568138e+06 -3.43305115e+06 -3.61603925e+06] [-3.59742419e+06 -1.42345472e+06 -4.36349352e+06 -2.16982687e+05 -5.28114699e+06 -3.63118920e+06 -3.04697507e+06 -2.47957309e+06 -5.18616793e+06 -5.29491125e+05 -1.28733298e+06 -6.16439212e+05 -2.16233592e+06 -2.18828039e+06 -2.97433891e+06 -5.42718175e+06 -1.22594434e+06 -2.93787917e+06 -2.04702352e+06 -4.13333161e+06 -1.59030195e+06 -1.72420879e+06] [-2.53886603e+06 -6.87047562e+05 -2.94431162e+06 5.65563113e+05 -3.62655798e+06 -2.35071969e+06 -1.74276313e+06 -1.43627762e+06 -3.64742829e+06 2.57901617e+05 -4.94387337e+05 1.79446582e+05 -1.23491992e+06 -1.15908947e+06 -1.92492475e+06 -5.12728836e+06 -2.96017584e+05 -1.69338414e+06 -1.02400857e+06 -2.61772243e+06 -7.29692830e+05 -8.15101860e+05] [-8.96104347e+05 -2.22412649e+04 -1.01869036e+06 6.28488949e+05 -1.31283636e+06 -7.53678871e+05 -4.19799714e+05 -3.28511240e+05 -1.34918065e+06 4.14587793e+05 3.96181160e+04 3.91105131e+05 -2.50895185e+05 -1.96513883e+05 -5.28875290e+05 -2.53899214e+06 1.92722090e+05 -4.03841667e+05 -9.79943794e+04 -8.63906236e+05 -4.29931020e+03 -3.76088801e+04] [-1.31564758e+05 8.52613834e+04 -1.90401635e+05 2.83473137e+05 -2.17985882e+05 -8.93213769e+04 -3.44487291e+04 2.10185398e+03 -2.46140326e+05 2.51342286e+05 1.46993523e+05 2.35271927e+05 2.79456905e+04 3.46089546e+04 -7.21718342e+04 -6.15647377e+05 1.98347598e+05 6.47022668e+03 4.53254727e+04 -8.64639396e+04 8.82839934e+04 9.07095791e+04] [-1.77901613e+04 -1.23901753e+04 -1.99229514e+04 -4.38983267e+03 -2.28917018e+04 -6.47579037e+03 -1.46658272e+04 -9.73101236e+03 -2.27859783e+04 -2.53120607e+03 -7.46613294e+03 -2.31045787e+03 -1.26287759e+04 -1.31303680e+04 -1.10347476e+04 -5.03992786e+04 -4.01762342e+03 -1.40341176e+04 -8.00667142e+03 -1.19218892e+04 -1.17832815e+04 -1.37062533e+04] [ 3.35221919e+03 3.47796675e+03 3.47993758e+03 2.99539704e+03 3.53028217e+03 3.00260393e+03 3.48930506e+03 2.95308689e+03 3.54531659e+03 2.97210443e+03 3.50863110e+03 2.94355488e+03 3.55950081e+03 3.53297159e+03 2.96026379e+03 7.04531717e+02 3.00072164e+03 3.52902357e+03 2.95741072e+03 3.18980106e+03 3.52501239e+03 3.50141704e+03] [-3.48722162e+04 -9.47187388e+04 -3.21048824e+04 -1.22306365e+05 -2.92600329e+04 -2.82608937e+04 -7.29824518e+04 -6.58022503e+04 -2.03198638e+04 -1.15484473e+05 -1.05722699e+05 -1.11336887e+05 -8.26561106e+04 -8.64983821e+04 -3.68523248e+04 1.52728917e+05 -1.05101209e+05 -8.14688432e+04 -7.54764473e+04 -4.89114922e+04 -1.02850037e+05 -1.01964529e+05] [ 6.56631405e+04 -8.46487058e+04 6.73464442e+04 -7.02248078e+03 6.60902520e+04 3.50199121e+04 -2.57803440e+04 -6.18793041e+03 7.63417630e+04 -8.49815654e+04 -1.14432346e+05 -7.73463410e+04 -6.19851231e+04 -5.36492140e+04 7.24513779e+04 2.93138518e+05 -5.83680152e+04 -3.97611880e+04 -8.91821655e+03 -2.67986948e+02 -9.77388145e+04 -7.78468148e+04] [ 2.01928379e+05 -1.98485639e+05 1.48608805e+05 2.44215927e+05 1.64060284e+05 7.35669307e+04 -3.10330160e+04 3.69742781e+04 1.51764029e+05 -4.39387844e+04 -2.17538230e+05 -3.74667375e+04 -1.48742454e+05 -9.37086129e+04 2.02923063e+05 4.51005704e+05 7.60334821e+03 -4.19882161e+04 3.99436775e+04 3.24824750e+03 -2.09485183e+05 -1.23990229e+05] [ 6.82321899e+05 -4.40184044e+05 6.21759926e+05 4.66488362e+05 6.17775820e+05 3.74229465e+05 1.07417822e+05 2.87283583e+05 6.37736862e+05 -1.85731263e+05 -5.46040051e+05 -1.31869759e+05 -3.12266434e+05 -1.64823172e+05 7.47514563e+05 1.58085964e+06 -1.27717717e+04 -1.08061461e+04 2.92119076e+05 1.42911236e+05 -4.91858379e+05 -2.98762990e+05] [ 1.44288513e+06 -1.66792218e+05 1.43344786e+06 1.06405718e+06 1.60421369e+06 9.75569229e+05 4.92030086e+05 1.11593398e+06 1.51909448e+06 1.55089457e+05 -3.57928016e+05 2.42000885e+05 -2.21801773e+05 8.61539241e+04 1.65622406e+06 3.08023688e+06 5.65056854e+05 3.00941474e+05 1.28593148e+06 5.86678903e+05 -4.18331634e+05 -9.00042305e+04] [ 1.55121732e+06 -5.92480294e+05 1.61237748e+06 5.24869006e+04 1.97190402e+06 9.19907418e+05 1.74840740e+04 8.68275252e+05 1.87273745e+06 -4.00486070e+05 -7.73499866e+05 -3.17473528e+05 -8.03448512e+05 -4.25639968e+05 1.53405389e+06 5.10565280e+06 1.30833163e+05 -1.28390220e+05 1.04349272e+06 2.60872959e+05 -1.09324783e+06 -7.43424320e+05] [ 1.45635315e+06 -1.43621397e+06 1.67738259e+06 -1.94912048e+06 2.08401159e+06 6.87499190e+05 -7.04169222e+05 1.58729866e+05 2.16113035e+06 -1.94112548e+06 -1.90674588e+06 -1.76923656e+06 -1.52282515e+06 -1.22605998e+06 1.24667481e+06 8.00606104e+06 -1.21342627e+06 -9.09349547e+05 2.53874007e+05 -3.35355688e+05 -2.03039472e+06 -1.78987224e+06] [-3.73149117e+05 -3.02991688e+06 1.95491577e+04 -5.78307091e+06 5.08948168e+05 -9.28279180e+05 -2.72172349e+06 -1.78959496e+06 7.39718124e+05 -4.39083177e+06 -3.82556062e+06 -4.19002474e+06 -3.34466544e+06 -3.29637711e+06 -6.78312494e+05 1.02960778e+07 -3.54832640e+06 -3.02254286e+06 -1.70435387e+06 -2.24638313e+06 -3.97398105e+06 -3.99578624e+06] [-3.14626340e+06 -5.08562822e+06 -2.43768549e+06 -1.08797947e+07 -1.57655348e+06 -3.61792726e+06 -5.56820522e+06 -4.45750698e+06 -1.29636734e+06 -7.81308776e+06 -6.42942009e+06 -7.56645017e+06 -6.08099190e+06 -6.18485440e+06 -3.59432619e+06 1.39504070e+07 -6.74074823e+06 -6.11178462e+06 -4.34176886e+06 -5.11686465e+06 -6.59502252e+06 -6.92035759e+06] [-7.58907359e+06 -8.99832753e+06 -6.88165907e+06 -1.71054589e+07 -5.73304327e+06 -7.43719567e+06 -1.02382036e+07 -8.47074782e+06 -5.27557087e+06 -1.27388954e+07 -1.04480943e+07 -1.23708553e+07 -1.04649974e+07 -1.06821605e+07 -7.80599997e+06 1.73162292e+07 -1.14969976e+07 -1.10756835e+07 -8.57939262e+06 -9.29413002e+06 -1.08241655e+07 -1.14761834e+07] [-1.02029469e+07 -1.07605626e+07 -9.22419631e+06 -2.27351286e+07 -8.08866327e+06 -9.63228108e+06 -1.27629275e+07 -1.16187851e+07 -7.13488659e+06 -1.69325589e+07 -1.23734780e+07 -1.64443268e+07 -1.21787997e+07 -1.28580825e+07 -1.09015783e+07 2.03366362e+07 -1.57093649e+07 -1.38102255e+07 -1.21959829e+07 -1.16278908e+07 -1.27061787e+07 -1.38769127e+07] [-1.13401963e+07 -1.05447565e+07 -1.04709287e+07 -2.35562883e+07 -9.96766952e+06 -1.06325805e+07 -1.34184245e+07 -1.35957380e+07 -8.45366877e+06 -1.68674189e+07 -1.05896944e+07 -1.65495110e+07 -1.11569806e+07 -1.25125406e+07 -1.35518362e+07 1.84525936e+07 -1.64234010e+07 -1.38947237e+07 -1.49664519e+07 -1.21615669e+07 -1.19875428e+07 -1.32952975e+07] [-1.54509511e+07 -1.45492938e+07 -1.63754306e+07 -1.92096228e+07 -1.69879710e+07 -1.33001928e+07 -1.73149529e+07 -1.62185911e+07 -1.52140456e+07 -1.47033760e+07 -1.17444368e+07 -1.46315845e+07 -1.36647897e+07 -1.52623723e+07 -1.69653125e+07 8.42227591e+06 -1.54279338e+07 -1.66173816e+07 -1.80568797e+07 -1.45575311e+07 -1.47739088e+07 -1.52767756e+07] [-1.81987694e+07 -1.95680259e+07 -2.11905471e+07 -1.16017376e+07 -2.25299904e+07 -1.45518929e+07 -2.02590955e+07 -1.64900261e+07 -2.12696679e+07 -1.19818299e+07 -1.49850242e+07 -1.20811466e+07 -1.74364243e+07 -1.85458213e+07 -1.71457666e+07 -2.02450611e+06 -1.29953784e+07 -1.89377189e+07 -1.83344575e+07 -1.57005647e+07 -1.86657743e+07 -1.79534118e+07] [-1.74164010e+07 -2.22994526e+07 -2.15005122e+07 -3.11818883e+06 -2.30017047e+07 -1.34352157e+07 -1.99792482e+07 -1.31328980e+07 -2.28880785e+07 -9.16442531e+06 -1.87023488e+07 -9.23445839e+06 -2.00911917e+07 -1.98941335e+07 -1.26993833e+07 -8.72367755e+06 -9.42371468e+06 -1.89695940e+07 -1.39010253e+07 -1.49179804e+07 -2.09848436e+07 -1.91705556e+07] [-1.41410807e+07 -2.14651700e+07 -1.70738082e+07 -1.43745132e+06 -1.75179492e+07 -1.09779649e+07 -1.71658410e+07 -8.80582303e+06 -1.87631298e+07 -9.34328484e+06 -2.14159845e+07 -9.25730311e+06 -2.08898280e+07 -1.92808304e+07 -6.92841188e+06 -5.96572540e+06 -7.89470403e+06 -1.72158153e+07 -7.83443749e+06 -1.32125864e+07 -2.11666269e+07 -1.90436663e+07] [-9.70653936e+06 -1.86865857e+07 -1.07735915e+07 -2.13758470e+06 -1.02495235e+07 -7.81153230e+06 -1.32013250e+07 -4.73212387e+06 -1.20186293e+07 -1.02042365e+07 -2.19334287e+07 -9.80322715e+06 -1.93272470e+07 -1.68401508e+07 -1.64252494e+06 1.44156685e+06 -7.31838696e+06 -1.44689204e+07 -2.70494448e+06 -1.07326678e+07 -1.92142472e+07 -1.73252385e+07] [-7.85676351e+06 -1.56110562e+07 -7.55916148e+06 -6.20650914e+06 -6.12721961e+06 -6.86125818e+06 -1.13571096e+07 -3.95663443e+06 -7.90129052e+06 -1.15201645e+07 -2.04667945e+07 -1.10076692e+07 -1.73152717e+07 -1.49342826e+07 -8.69412990e+05 7.80273336e+06 -8.29913995e+06 -1.30072971e+07 -1.79532392e+06 -1.00976182e+07 -1.69985301e+07 -1.57515906e+07] [-8.16953959e+06 -1.23276400e+07 -7.91552924e+06 -8.52255770e+06 -6.42330829e+06 -7.55738527e+06 -1.10046215e+07 -4.73367648e+06 -7.86504657e+06 -1.03152963e+07 -1.58384819e+07 -9.93766390e+06 -1.46765884e+07 -1.32079783e+07 -3.24412113e+06 8.02086834e+06 -7.86902711e+06 -1.20553380e+07 -2.87330466e+06 -1.05066819e+07 -1.41245893e+07 -1.35049501e+07] [-7.66545530e+06 -9.46930890e+06 -8.33365445e+06 -8.51964566e+06 -7.90830936e+06 -7.44531377e+06 -9.99153073e+06 -5.60846349e+06 -8.51339513e+06 -8.66444345e+06 -1.12087324e+07 -8.42581710e+06 -1.12205439e+07 -1.08458961e+07 -5.03346771e+06 5.49384261e+06 -7.36321409e+06 -1.03661243e+07 -4.68713475e+06 -9.70512631e+06 -1.08708494e+07 -1.06977656e+07] [-8.11397823e+06 -8.09702889e+06 -9.48241693e+06 -8.37598181e+06 -9.84074319e+06 -7.92717256e+06 -9.81312823e+06 -7.21002250e+06 -9.69978395e+06 -7.54619945e+06 -8.24193622e+06 -7.44160763e+06 -9.00665055e+06 -9.42689765e+06 -7.21343365e+06 2.36764335e+06 -7.41025088e+06 -9.66969984e+06 -7.12681105e+06 -9.37549593e+06 -8.87122227e+06 -9.03389586e+06] [-6.18404237e+06 -5.25699043e+06 -7.28577406e+06 -6.00224331e+06 -7.90648892e+06 -6.13654250e+06 -7.01143210e+06 -5.84551273e+06 -7.50091596e+06 -5.19500048e+06 -5.13596914e+06 -5.18187420e+06 -5.70015966e+06 -6.20019471e+06 -6.02224278e+06 5.22645415e+05 -5.54260175e+06 -6.84958459e+06 -6.01019593e+06 -6.98371712e+06 -5.58263870e+06 -5.91420605e+06] [-4.11329883e+06 -2.71568192e+06 -5.01101551e+06 -2.29913873e+06 -5.58796194e+06 -4.08805611e+06 -4.16688688e+06 -3.58447770e+06 -5.39193186e+06 -1.94918812e+06 -2.36919363e+06 -2.04587895e+06 -3.17679213e+06 -3.39888473e+06 -3.96304834e+06 -2.51118151e+06 -2.48134138e+06 -3.94900089e+06 -3.54314074e+06 -4.47751443e+06 -2.90152378e+06 -3.03988365e+06] [-2.15982843e+06 -8.20671265e+05 -2.60109366e+06 -1.84581726e+05 -2.97194210e+06 -1.99745886e+06 -1.70528753e+06 -1.51992515e+06 -3.00505909e+06 -7.02923048e+03 -4.92630593e+05 -1.24127224e+05 -1.19069296e+06 -1.23277808e+06 -1.96081147e+06 -3.25436702e+06 -4.70339808e+05 -1.55463439e+06 -1.33689988e+06 -2.14169715e+06 -8.91025662e+05 -9.60296479e+05] [-4.57149466e+05 1.44831557e+05 -5.54697501e+05 4.35176554e+05 -6.79803337e+05 -3.85114946e+05 -1.73605236e+05 -1.40423981e+05 -7.34389862e+05 4.23805256e+05 2.12897981e+05 3.80172909e+05 -1.89577282e+04 6.20622041e+03 -3.17485864e+05 -1.49332514e+06 2.62275845e+05 -1.21172046e+05 -3.40423318e+03 -4.22059845e+05 1.32066108e+05 1.10543373e+05] [-9.26144114e+04 2.31501336e+04 -1.11757353e+05 9.32095397e+04 -1.30164071e+05 -7.12205138e+04 -3.69048993e+04 -4.60357031e+04 -1.39678492e+05 9.66454628e+04 5.68857943e+04 8.66868570e+04 4.97689788e+03 1.56650796e+03 -8.18569509e+04 -3.12817817e+05 6.37049641e+04 -1.81730101e+04 -2.68324732e+04 -6.75891770e+04 2.62683305e+04 2.22705903e+04] [-2.03635117e+04 -7.14422923e+03 -2.35433269e+04 -4.73386405e+02 -2.87933639e+04 -1.34386733e+04 -1.20789447e+04 -1.35225686e+04 -2.81354589e+04 2.37360482e+03 -3.79224549e+03 1.81734856e+03 -8.62262199e+03 -8.96694959e+03 -1.72015380e+04 -4.39159403e+04 -2.46956321e+03 -9.88760447e+03 -1.33278152e+04 -1.35905976e+04 -7.01020263e+03 -7.55889187e+03] [ 7.86292043e+01 -4.05252667e+01 1.15542828e+02 -1.77680284e+01 5.61217164e+01 5.62667743e+01 5.59754505e+01 5.10322790e+00 1.08000360e+02 -7.00971849e+01 -6.24751871e+01 -4.79310690e+01 -2.62755099e+01 -3.26193606e+01 8.54881240e+01 2.53222931e+02 -4.60849050e+01 -1.91723562e+00 -9.01094927e+00 4.44073809e+01 -6.53628398e+01 -6.98295754e+01] [-1.18177643e+04 -6.79087294e+04 7.22765506e+02 -8.54936423e+04 -1.79495026e+03 -6.54329128e+03 -3.99980237e+04 -3.36830565e+04 7.33810098e+03 -8.53026585e+04 -7.85001052e+04 -8.09658465e+04 -6.10320014e+04 -6.15054203e+04 -5.73799963e+03 1.45273563e+05 -7.40112772e+04 -5.36906385e+04 -3.91689151e+04 -2.82672043e+04 -8.04418499e+04 -7.73307224e+04] [ 1.43715755e+05 2.69914342e+04 1.58996219e+05 8.01893485e+04 1.39608692e+05 1.11236372e+05 8.34516731e+04 6.04408893e+04 1.57629744e+05 1.07879446e+04 1.13603073e+04 1.86588912e+04 5.15871326e+04 5.07146007e+04 1.38772699e+05 2.26442266e+05 3.04328876e+04 6.44065472e+04 5.91201896e+04 8.54967051e+04 1.07106540e+04 2.37738802e+04] [ 3.37878127e+05 -2.08346896e+05 3.47917567e+05 1.82769363e+05 3.40171234e+05 2.05280043e+05 5.28421350e+04 1.27587455e+05 3.64123088e+05 -1.36198626e+05 -2.82321820e+05 -1.14818628e+05 -1.20268355e+05 -6.14016080e+04 3.71497391e+05 8.54128958e+05 -4.01933880e+04 -1.06223178e+04 1.29605098e+05 9.14575746e+04 -2.22153643e+05 -1.47437321e+05] [ 8.16430831e+05 -3.06787913e+05 8.63249661e+05 1.83355605e+05 8.66569102e+05 5.29308487e+05 2.07876020e+05 3.63981971e+05 9.33715089e+05 -3.16309153e+05 -4.52564703e+05 -2.57254498e+05 -1.52391860e+05 -6.31261478e+04 8.27489208e+05 2.21418107e+06 -1.02475315e+05 4.70433143e+04 3.58868157e+05 2.70543778e+05 -3.80263765e+05 -2.41275158e+05] [ 1.13437403e+06 -5.89991193e+05 1.27661415e+06 -2.33857079e+05 1.45616267e+06 7.33732723e+05 6.10285652e+04 6.14470603e+05 1.50245741e+06 -7.89780197e+05 -8.90958814e+05 -6.82106638e+05 -5.23532614e+05 -3.34835256e+05 1.22575969e+06 4.41978081e+06 -3.12123307e+05 -2.01693560e+05 6.58623865e+05 2.68458761e+05 -8.30663591e+05 -6.42434743e+05] [ 5.11288201e+05 -1.67998902e+06 6.91406516e+05 -2.43508898e+06 1.09980001e+06 -1.18079446e+04 -1.19796789e+06 -4.12140665e+05 1.19806339e+06 -2.29667693e+06 -1.95655957e+06 -2.16161131e+06 -1.73611252e+06 -1.57406672e+06 2.76616147e+05 6.99777784e+06 -1.64672605e+06 -1.39586814e+06 -4.37292689e+05 -6.92958341e+05 -2.15555926e+06 -2.00224274e+06] [-9.36791023e+05 -3.77972008e+06 -5.28341434e+05 -6.93971576e+06 -8.57628914e+04 -1.34767497e+06 -3.20687315e+06 -2.58609206e+06 3.95951914e+05 -5.72508292e+06 -4.56224081e+06 -5.48212960e+06 -3.69185764e+06 -3.78160879e+06 -1.36799505e+06 1.12377182e+07 -4.86956247e+06 -3.62593367e+06 -2.80324989e+06 -2.45147889e+06 -4.39682521e+06 -4.57848984e+06] [-3.65796569e+06 -6.02908473e+06 -3.00544206e+06 -1.18906435e+07 -2.52374368e+06 -3.76717997e+06 -5.83509369e+06 -5.26919620e+06 -1.85201355e+06 -9.26092679e+06 -7.35881674e+06 -8.92480081e+06 -6.19223783e+06 -6.56575643e+06 -4.02807478e+06 1.39780459e+07 -8.21554873e+06 -6.59612803e+06 -5.53357049e+06 -5.10256312e+06 -6.96212124e+06 -7.54268219e+06] [-7.55843553e+06 -9.33496605e+06 -6.71813834e+06 -1.83275735e+07 -6.34181228e+06 -7.49661647e+06 -9.85477915e+06 -9.39545699e+06 -5.29064903e+06 -1.41856466e+07 -1.08781622e+07 -1.37595286e+07 -9.79220610e+06 -1.04905974e+07 -8.27198923e+06 1.71967779e+07 -1.31252141e+07 -1.10280585e+07 -1.00186491e+07 -9.05230345e+06 -1.05956413e+07 -1.15972319e+07] [-1.00847700e+07 -1.09884041e+07 -9.30324790e+06 -2.35084711e+07 -8.75940522e+06 -9.75470607e+06 -1.25182273e+07 -1.22502424e+07 -7.37821737e+06 -1.76961791e+07 -1.25009939e+07 -1.71765890e+07 -1.17799006e+07 -1.27519000e+07 -1.13649492e+07 1.98794654e+07 -1.67096086e+07 -1.38150517e+07 -1.32511816e+07 -1.15360066e+07 -1.25046207e+07 -1.39349923e+07] [-1.04033959e+07 -9.43471927e+06 -9.29866704e+06 -2.56146771e+07 -9.25578570e+06 -1.04518716e+07 -1.21854771e+07 -1.39408816e+07 -7.22637036e+06 -1.84801303e+07 -1.02656399e+07 -1.80509209e+07 -9.78272131e+06 -1.13943397e+07 -1.34233047e+07 1.97997172e+07 -1.83068119e+07 -1.33398636e+07 -1.56687993e+07 -1.20038755e+07 -1.06765420e+07 -1.25956430e+07] [-9.66801603e+06 -7.68110920e+06 -9.28730149e+06 -2.32990353e+07 -1.01150924e+07 -9.81731340e+06 -1.13245937e+07 -1.45290067e+07 -7.40927589e+06 -1.57034288e+07 -6.33977831e+06 -1.54360264e+07 -6.86833607e+06 -9.22877019e+06 -1.45663431e+07 1.55996674e+07 -1.67505188e+07 -1.15168161e+07 -1.71053207e+07 -1.08379782e+07 -8.18235697e+06 -1.00058244e+07] [-1.19474706e+07 -1.12078809e+07 -1.36314055e+07 -1.44742532e+07 -1.53634854e+07 -1.07054151e+07 -1.34001053e+07 -1.46119926e+07 -1.30076464e+07 -1.10915578e+07 -7.40086451e+06 -1.11081418e+07 -9.01558096e+06 -1.09523190e+07 -1.50360153e+07 3.43895174e+06 -1.30245218e+07 -1.25897070e+07 -1.72604363e+07 -1.14566822e+07 -1.04500249e+07 -1.09958505e+07] [-1.47741046e+07 -1.80364707e+07 -1.91376494e+07 -1.15827246e+06 -2.16321643e+07 -1.17558191e+07 -1.66587704e+07 -1.25146605e+07 -2.06749630e+07 -6.37490800e+06 -1.31946085e+07 -6.60601562e+06 -1.48472550e+07 -1.53355105e+07 -1.25723621e+07 -1.23163053e+07 -7.98703240e+06 -1.52274065e+07 -1.42957853e+07 -1.25635427e+07 -1.59648219e+07 -1.44468533e+07] [-1.29143605e+07 -1.99943645e+07 -1.76677077e+07 5.95855720e+06 -1.98215819e+07 -9.56428813e+06 -1.53097192e+07 -8.08304699e+06 -2.02031325e+07 -4.27266574e+06 -1.70743035e+07 -4.42409532e+06 -1.69291112e+07 -1.60120216e+07 -6.86875118e+06 -1.59155348e+07 -4.59946219e+06 -1.44274893e+07 -8.57288939e+06 -1.07738872e+07 -1.77567093e+07 -1.51900452e+07] [-5.87457246e+06 -1.52653057e+07 -7.92957101e+06 4.32845898e+06 -8.54394477e+06 -3.89787654e+06 -8.71765211e+06 -2.08192524e+06 -9.68470799e+06 -5.07675972e+06 -1.67459420e+07 -4.84968410e+06 -1.37034449e+07 -1.18394604e+07 9.32497961e+05 -4.67822159e+06 -3.42004267e+06 -9.50430939e+06 -1.24326984e+06 -5.75985389e+06 -1.44261485e+07 -1.22734784e+07] [-7.20265651e+05 -1.10448928e+07 8.64689775e+04 1.57490503e+05 1.21934180e+06 -1.55037490e+05 -4.04881726e+06 2.20235825e+06 -4.20718317e+05 -7.24833765e+06 -1.65257135e+07 -6.64844205e+06 -1.13341723e+07 -8.72819860e+06 6.24357346e+06 9.19789999e+06 -3.76823750e+06 -6.30248046e+06 4.08391187e+06 -2.69095402e+06 -1.17272078e+07 -1.01898846e+07] [ 2.89582464e+05 -7.14346416e+06 2.28743117e+06 -5.24225504e+06 4.66665719e+06 2.03985531e+05 -2.76531100e+06 2.17742391e+06 2.87542831e+06 -7.96171112e+06 -1.30063723e+07 -7.37243097e+06 -8.88667713e+06 -6.81481495e+06 5.20791338e+06 1.57239672e+07 -4.34724511e+06 -4.86879404e+06 4.03925383e+06 -2.33179893e+06 -8.97114137e+06 -8.21986160e+06] [-2.51162593e+06 -5.73284359e+06 -1.15286545e+06 -6.83708193e+06 1.08352215e+06 -2.39861168e+06 -4.72956977e+06 -1.66159741e+05 -4.22657765e+05 -6.59142237e+06 -9.50275709e+06 -6.25163471e+06 -8.23615223e+06 -7.09955222e+06 8.72610338e+05 1.23692599e+07 -3.96353144e+06 -5.85417151e+06 1.44673590e+06 -4.57348923e+06 -7.95018458e+06 -7.62232378e+06] [-6.34315968e+06 -7.38431711e+06 -6.55216220e+06 -8.38878781e+06 -5.66037932e+06 -6.20604798e+06 -8.29364566e+06 -4.70612994e+06 -6.23154715e+06 -7.27718323e+06 -8.65536169e+06 -7.14473585e+06 -9.19677643e+06 -9.02193257e+06 -4.67936619e+06 7.15764442e+06 -6.03455022e+06 -8.55678591e+06 -4.06755363e+06 -7.95232349e+06 -8.98608667e+06 -8.94081856e+06] [-9.19110038e+06 -9.11572235e+06 -1.03813336e+07 -1.14194753e+07 -1.02743968e+07 -8.82101100e+06 -1.10808495e+07 -8.84583780e+06 -1.00126553e+07 -9.17573231e+06 -8.77459141e+06 -9.12827304e+06 -1.00169562e+07 -1.06459969e+07 -9.16037662e+06 5.20681321e+06 -8.96862988e+06 -1.07769521e+07 -9.21535507e+06 -1.00536025e+07 -1.00568868e+07 -1.03269357e+07] [-8.39815596e+06 -7.54924815e+06 -9.54039703e+06 -1.00682430e+07 -9.82591956e+06 -8.05103496e+06 -9.56983821e+06 -8.66072081e+06 -9.22012787e+06 -7.81937224e+06 -6.79736381e+06 -7.85161961e+06 -7.82525592e+06 -8.68107473e+06 -9.01215376e+06 3.03803473e+06 -8.15930338e+06 -9.21865676e+06 -9.29871350e+06 -8.70521431e+06 -7.97585587e+06 -8.44296940e+06] [-5.40995678e+06 -4.46008836e+06 -6.34468431e+06 -5.30382409e+06 -6.60034725e+06 -5.21428600e+06 -5.88368941e+06 -5.46120717e+06 -6.28212560e+06 -4.00906141e+06 -3.74721817e+06 -4.12570018e+06 -4.71816050e+06 -5.16872244e+06 -5.85230764e+06 1.34375597e+05 -4.44194586e+06 -5.58996653e+06 -5.81149451e+06 -5.46824918e+06 -4.66822739e+06 -4.91306544e+06] [-2.09332507e+06 -1.15673978e+06 -2.51278065e+06 -1.28815885e+06 -2.69439621e+06 -1.95356844e+06 -1.91165962e+06 -1.89205656e+06 -2.64640139e+06 -6.82647385e+05 -7.07805496e+05 -7.88423270e+05 -1.34884863e+06 -1.52135703e+06 -2.26093364e+06 -1.33646125e+06 -1.04524559e+06 -1.74704877e+06 -1.93384890e+06 -1.96684185e+06 -1.20469441e+06 -1.32473909e+06] [-3.00418353e+05 8.83211115e+04 -3.55694326e+05 2.72889360e+05 -4.17751876e+05 -2.65719726e+05 -8.86722273e+04 -9.35663304e+04 -4.63633554e+05 3.09181534e+05 1.43059814e+05 2.69589609e+05 -1.59543931e+04 -1.37313617e+03 -2.24598518e+05 -8.05493715e+05 1.94860705e+05 -6.63546265e+04 -2.25578396e+04 -2.49131984e+05 6.33259677e+04 6.27260723e+04] [-4.22137932e+04 2.40290122e+04 -5.37411987e+04 9.27395334e+04 -6.80898418e+04 -2.17980729e+04 4.48390485e+03 1.32261376e+03 -7.79932752e+04 8.69407644e+04 3.52302017e+04 8.09633906e+04 1.12588368e+04 1.68678408e+04 -1.87309066e+04 -1.87914120e+05 6.37672307e+04 9.40794950e+03 1.23819834e+04 -1.89550345e+04 2.64507028e+04 2.73738886e+04] [-3.79725140e+03 1.84969308e+03 -4.83167863e+03 3.93184522e+03 -6.95816130e+03 -1.61243663e+03 -1.15099031e+02 -1.40833075e+03 -6.77810435e+03 5.11396497e+03 3.17946348e+03 4.76797235e+03 1.32994397e+03 1.12039395e+03 -3.06758762e+03 -1.51148949e+04 3.06509657e+03 9.04688895e+02 -1.39351954e+03 -1.20134308e+03 1.87315568e+03 1.84133689e+03] [ 1.00048923e+04 -5.21149131e+03 1.48765246e+04 -2.38168117e+03 7.17307569e+03 7.07521268e+03 7.28276073e+03 5.72794118e+02 1.39315445e+04 -9.08420633e+03 -7.96145372e+03 -6.17736977e+03 -3.41789628e+03 -4.23054577e+03 1.08935166e+04 3.25385097e+04 -5.82637711e+03 -3.21834951e+02 -1.22601400e+03 5.60618576e+03 -8.23956816e+03 -8.82168206e+03] [ 2.91005280e+04 -7.97413471e+02 3.69819481e+04 -9.82653461e+03 2.77394820e+04 2.60757082e+04 1.95734737e+04 8.26496296e+03 3.86636225e+04 -1.39861756e+04 -5.45289014e+03 -9.50794960e+03 5.18004916e+03 2.21206093e+03 2.72211960e+04 8.31141349e+04 -7.46527865e+03 8.41076086e+03 3.55475871e+03 2.13605301e+04 -6.09592702e+03 -6.90622160e+03] [ 2.04012242e+05 1.29204830e+05 2.21468624e+05 1.25158532e+05 2.02905698e+05 1.63639227e+05 1.66012268e+05 1.10188905e+05 2.22201852e+05 8.73790325e+04 1.27262944e+05 9.35778057e+04 1.55696036e+05 1.44498312e+05 1.70718708e+05 1.92843686e+05 9.80155425e+04 1.53937983e+05 1.08873872e+05 1.54967456e+05 1.17038026e+05 1.22503490e+05] [ 4.75187182e+05 4.62330688e+04 5.34743758e+05 1.08648071e+05 5.17635731e+05 3.50033549e+05 2.78812163e+05 2.36500221e+05 5.78789243e+05 -8.36280912e+04 -2.92186933e+04 -4.83617607e+04 1.54601870e+05 1.66642354e+05 4.45787729e+05 9.81933117e+05 -5.32782582e+01 1.77877105e+05 2.14194968e+05 2.71093755e+05 4.08639986e+04 7.11981575e+04] [ 8.20039114e+05 -1.73905700e+03 9.15829872e+05 9.93218675e+04 9.77774587e+05 5.89682737e+05 3.84766245e+05 4.57180468e+05 1.03167998e+06 -1.97076553e+05 -1.86073088e+05 -1.45405173e+05 1.01294324e+05 1.60611656e+05 7.93368608e+05 2.06932525e+06 7.69989328e+03 2.18358292e+05 4.67033344e+05 4.15897910e+05 -7.20517249e+04 -1.09979643e+04] [ 2.84809004e+05 -1.17869864e+06 4.72183163e+05 -1.66681703e+06 6.20339271e+05 5.83808775e+04 -5.80067821e+05 -2.75766131e+05 7.60198619e+05 -1.79211630e+06 -1.51438643e+06 -1.68596370e+06 -1.03528135e+06 -9.84632685e+05 2.83312981e+05 4.59936085e+06 -1.35712022e+06 -9.15481695e+05 -3.66688125e+05 -3.43646520e+05 -1.34330347e+06 -1.34837474e+06] [-1.67616078e+06 -3.85013665e+06 -1.49931806e+06 -6.17848064e+06 -1.14509940e+06 -1.87265073e+06 -3.34087242e+06 -2.83935901e+06 -7.66405513e+05 -5.38151184e+06 -4.28774614e+06 -5.18600771e+06 -3.64123482e+06 -3.78402483e+06 -2.01319504e+06 8.37491724e+06 -4.71489515e+06 -3.72229142e+06 -3.18755147e+06 -2.55477528e+06 -4.15613712e+06 -4.34401439e+06] [-3.97605080e+06 -7.09318377e+06 -3.22022978e+06 -1.27220611e+07 -2.96439583e+06 -4.05599167e+06 -6.11120364e+06 -6.04180778e+06 -1.95262629e+06 -1.08831071e+07 -8.52202805e+06 -1.04535332e+07 -6.50986227e+06 -6.93551679e+06 -4.35284789e+06 1.42427020e+07 -9.83502605e+06 -7.10578483e+06 -6.72399825e+06 -5.21128105e+06 -7.45106640e+06 -8.14388896e+06] [-7.21682404e+06 -9.75127995e+06 -6.46100827e+06 -1.81815826e+07 -6.43369017e+06 -7.08816741e+06 -9.32630287e+06 -9.68685782e+06 -5.03964355e+06 -1.49310656e+07 -1.12985265e+07 -1.44349412e+07 -9.20175432e+06 -1.00358311e+07 -7.97540929e+06 1.64402605e+07 -1.39620805e+07 -1.06009121e+07 -1.07187416e+07 -8.41225854e+06 -1.02595981e+07 -1.13981338e+07] [-1.08818676e+07 -1.20918021e+07 -1.01143865e+07 -2.41470758e+07 -1.03844564e+07 -1.06414682e+07 -1.26994945e+07 -1.37922587e+07 -8.52918670e+06 -1.90821215e+07 -1.35335450e+07 -1.85429337e+07 -1.17283665e+07 -1.30454311e+07 -1.24000724e+07 1.78517234e+07 -1.84413173e+07 -1.43127405e+07 -1.52686679e+07 -1.19786977e+07 -1.27676376e+07 -1.44672524e+07] [-1.09838210e+07 -1.00040758e+07 -1.02430996e+07 -2.65828876e+07 -1.06596913e+07 -1.11245595e+07 -1.22013343e+07 -1.50916211e+07 -8.42534359e+06 -1.95068585e+07 -1.10187635e+07 -1.90373632e+07 -9.77610655e+06 -1.15836936e+07 -1.42917553e+07 1.76211153e+07 -1.96257187e+07 -1.37107740e+07 -1.70893678e+07 -1.22238779e+07 -1.06826620e+07 -1.29300737e+07] [-9.41482708e+06 -6.27522216e+06 -8.75655014e+06 -2.58164953e+07 -1.00170627e+07 -1.02503299e+07 -9.99729139e+06 -1.53057329e+07 -7.08006844e+06 -1.71442400e+07 -5.99312675e+06 -1.68561336e+07 -5.46868893e+06 -8.04024834e+06 -1.50936181e+07 1.48996595e+07 -1.86366892e+07 -1.09595475e+07 -1.80588899e+07 -1.09656753e+07 -6.61116916e+06 -9.18550636e+06] [-7.20317321e+06 -3.90383925e+06 -7.36825482e+06 -2.13869436e+07 -9.09117993e+06 -8.21558516e+06 -7.73256730e+06 -1.42199829e+07 -5.75326078e+06 -1.29825108e+07 -1.71089852e+06 -1.28051128e+07 -1.89890233e+06 -4.86022192e+06 -1.43094109e+07 1.09356092e+07 -1.55190838e+07 -7.91358298e+06 -1.76486184e+07 -8.37863820e+06 -3.45746722e+06 -5.71587861e+06] [-1.06935546e+07 -1.06634251e+07 -1.36389130e+07 -8.99957065e+06 -1.59258447e+07 -9.69496180e+06 -1.16551785e+07 -1.32099075e+07 -1.37644887e+07 -7.89152953e+06 -6.71458733e+06 -8.03240280e+06 -7.55687838e+06 -9.23770589e+06 -1.33594731e+07 -4.41974600e+06 -1.06190612e+07 -1.07740090e+07 -1.60202820e+07 -9.92116276e+06 -8.91343453e+06 -9.10265714e+06] [-1.37237771e+07 -1.79526381e+07 -1.90109186e+07 5.28653363e+06 -2.16553988e+07 -1.07623485e+07 -1.50989694e+07 -1.00743780e+07 -2.13952573e+07 -2.96755613e+06 -1.40042245e+07 -3.39134334e+06 -1.44295620e+07 -1.42295126e+07 -9.83890345e+06 -2.08737042e+07 -4.84677358e+06 -1.36467797e+07 -1.13974136e+07 -1.11186132e+07 -1.52088131e+07 -1.30817792e+07] [-9.44459029e+06 -1.71228473e+07 -1.38920367e+07 1.02417832e+07 -1.58005032e+07 -6.57386576e+06 -1.11039097e+07 -4.30822179e+06 -1.66877406e+07 -1.39044072e+06 -1.58436381e+07 -1.58893408e+06 -1.39776947e+07 -1.25198554e+07 -2.56344983e+06 -1.90350925e+07 -1.57216553e+06 -1.06559995e+07 -4.41229285e+06 -7.21165546e+06 -1.46014806e+07 -1.19064892e+07] [-5.61741854e+05 -9.91332388e+06 -8.60436701e+05 3.37208390e+06 -8.31979339e+05 5.50469776e+05 -2.65250719e+06 1.85232478e+06 -1.84633108e+06 -4.45840270e+06 -1.37085897e+07 -4.04679626e+06 -8.29170578e+06 -6.40247270e+06 5.47497542e+06 1.23384885e+06 -2.36807235e+06 -4.41789306e+06 2.61565102e+06 -7.93145420e+05 -9.18426370e+06 -7.65781743e+06] [ 6.01431251e+06 -2.90981233e+06 8.93858268e+06 -2.99765523e+06 1.13175368e+07 5.46788586e+06 3.44244418e+06 6.42868340e+06 9.91591372e+06 -6.01017902e+06 -9.85776747e+06 -5.24623673e+06 -3.53955520e+06 -1.41357766e+06 1.04196520e+07 1.89651123e+07 -2.21361550e+06 5.61345239e+05 7.90796145e+06 3.65428760e+06 -4.23615008e+06 -3.66561470e+06] [ 6.55416422e+06 7.39368731e+05 9.80546454e+06 -4.75493949e+06 1.31569056e+07 5.51653012e+06 3.97383157e+06 6.77890799e+06 1.13384579e+07 -4.28671304e+06 -5.63301765e+06 -3.74995340e+06 -1.50450848e+06 1.68893353e+05 9.22935147e+06 2.11724108e+07 -5.82133222e+05 1.93299343e+06 8.46038938e+06 3.76090813e+06 -1.68672597e+06 -1.42718589e+06] [ 1.19155273e+06 -6.45150951e+05 3.40691269e+06 -5.34856447e+06 5.82930371e+06 7.48288552e+05 -2.29091711e+05 2.53324385e+06 4.57212169e+06 -3.70881950e+06 -4.15367686e+06 -3.42933994e+06 -3.05297872e+06 -2.29195646e+06 3.06658542e+06 1.41076886e+07 -1.22934546e+06 -1.34471670e+06 3.85383124e+06 -6.42097198e+05 -2.92763390e+06 -2.86523768e+06] [-5.61130524e+06 -5.68303570e+06 -5.50993387e+06 -9.29813375e+06 -4.17269238e+06 -5.66307485e+06 -7.38782244e+06 -4.73412585e+06 -4.65027608e+06 -6.67570216e+06 -6.29379869e+06 -6.64484013e+06 -7.56714987e+06 -7.63244214e+06 -5.25375337e+06 8.24480713e+06 -5.56423192e+06 -7.34233532e+06 -4.40983993e+06 -6.83724365e+06 -7.41132092e+06 -7.46933790e+06] [-9.94348180e+06 -9.40456297e+06 -1.08088915e+07 -1.40330212e+07 -1.04712771e+07 -9.49938784e+06 -1.17934427e+07 -1.04551619e+07 -9.95358616e+06 -1.03900274e+07 -8.64527784e+06 -1.04269208e+07 -1.01169970e+07 -1.10098294e+07 -1.09769095e+07 6.58965213e+06 -1.03491986e+07 -1.13607400e+07 -1.12246561e+07 -1.04142436e+07 -1.03307283e+07 -1.08811926e+07] [-1.02939348e+07 -9.32262256e+06 -1.12703249e+07 -1.38911751e+07 -1.12467524e+07 -9.73345630e+06 -1.16836258e+07 -1.11709253e+07 -1.04934034e+07 -1.03086294e+07 -8.25816106e+06 -1.04119374e+07 -9.61167312e+06 -1.06392888e+07 -1.16090273e+07 4.92099294e+06 -1.06412678e+07 -1.12390342e+07 -1.21252010e+07 -1.03269304e+07 -9.86424724e+06 -1.05918377e+07] [-6.63936901e+06 -5.73146854e+06 -7.39093760e+06 -8.31182579e+06 -7.60581542e+06 -6.28306606e+06 -7.24116038e+06 -7.27252646e+06 -7.05466830e+06 -6.10314576e+06 -4.82114654e+06 -6.18212956e+06 -5.77359003e+06 -6.45608591e+06 -7.62106855e+06 1.77813196e+06 -6.50853665e+06 -6.96378466e+06 -7.92279667e+06 -6.49759866e+06 -5.92096037e+06 -6.39898929e+06] [-2.23363436e+06 -1.45418889e+06 -2.61424330e+06 -2.37325861e+06 -2.70591254e+06 -2.07880157e+06 -2.22231553e+06 -2.37849379e+06 -2.56131053e+06 -1.35841493e+06 -9.42053680e+05 -1.44229688e+06 -1.50867354e+06 -1.80663561e+06 -2.69607733e+06 -3.62133475e+05 -1.66961700e+06 -2.01046048e+06 -2.56856992e+06 -2.02833373e+06 -1.48147820e+06 -1.67280916e+06] [-3.72358119e+05 9.47390128e+03 -4.22318200e+05 8.52758854e+04 -4.56796633e+05 -3.06231432e+05 -1.76673626e+05 -1.97802720e+05 -4.95502446e+05 2.37891028e+05 1.06305370e+05 1.91708694e+05 -7.25129413e+04 -7.78534554e+04 -3.55849299e+05 -7.23062104e+05 1.16864310e+05 -1.22582153e+05 -1.52466365e+05 -2.81797199e+05 -7.97232051e+03 -1.53478749e+04] [-1.95972562e+05 -8.41095381e+04 -2.21998631e+05 8.24923967e+04 -2.56049207e+05 -1.66321629e+05 -1.10377870e+05 -8.56420264e+04 -2.75012775e+05 5.21675470e+04 -6.32443145e+04 3.85348151e+04 -1.10802727e+05 -9.89226164e+04 -1.26342511e+05 -3.37701262e+05 1.83663110e+04 -1.07300239e+05 -5.95335317e+04 -1.64772930e+05 -8.07661362e+04 -7.38098244e+04] [-1.35837905e+02 1.75980062e+02 -3.22100381e+02 6.57188332e+02 -5.31095136e+02 6.83965068e+02 3.79858403e+01 3.84955024e+02 -5.49546115e+02 8.57560695e+02 7.01632751e+02 8.31048235e+02 2.03668135e+02 1.10234271e+02 2.26095674e+02 -3.90299910e+03 7.55963760e+02 8.32900013e+01 5.60575440e+02 1.32221502e+02 2.22888986e+02 8.05404897e+01] [ 1.83113187e-01 9.03771212e-01 -2.83193343e-01 6.35866364e-02 -2.03245158e-01 2.29458614e-01 -7.03077601e-02 9.10972997e-01 4.16974068e-01 -2.30307866e-03 -1.41831445e-02 -5.03006717e-01 -5.69227345e-01 -2.32280559e-01 -4.61166487e-01 -4.03389636e-01 -5.70050285e-01 -1.38569192e-01 -5.25302143e-01 -1.41903532e-01 4.76457166e-01 -6.19375389e-01] [ 2.20524388e+04 7.62397384e+03 2.69469474e+04 2.07285142e+04 2.45436201e+04 1.66100225e+04 1.64938993e+04 2.21998605e+04 2.41190287e+04 1.42972432e+04 8.68320513e+03 1.49046836e+04 3.04396888e+03 9.02506927e+03 2.63508666e+04 1.84285224e+04 1.96104948e+04 1.45614565e+04 2.49305273e+04 1.13061474e+04 2.17724558e+03 6.58575517e+03] [ 1.11413329e+05 6.26901426e+04 1.23749322e+05 4.35081372e+04 1.33605858e+05 8.28074764e+04 7.85575152e+04 6.95769541e+04 1.35189415e+05 3.50578839e+04 6.09632198e+04 3.71657042e+04 6.50411760e+04 6.98732319e+04 8.81194349e+04 1.80448436e+05 4.65815261e+04 7.50085115e+04 7.02905178e+04 7.29493698e+04 5.21871940e+04 5.97562779e+04] [ 2.12676057e+05 -7.65523949e+04 2.60274802e+05 -7.09829188e+04 2.80835719e+05 1.49492729e+05 6.90587474e+04 1.11966768e+05 3.12166207e+05 -1.82693600e+05 -1.51010349e+05 -1.56505004e+05 -3.44914776e+04 -8.46369833e+03 2.20291185e+05 7.46507290e+05 -1.01177381e+05 -1.09181518e+03 9.42194444e+04 8.65292898e+04 -9.00534486e+04 -6.64960348e+04] [ 6.22472794e+05 -2.08457847e+05 8.10499818e+05 -4.19704489e+05 8.45487596e+05 4.67318634e+05 2.77470599e+05 2.98916906e+05 9.51342562e+05 -6.79879845e+05 -5.62329579e+05 -6.02381643e+05 -5.89414870e+04 -4.30766924e+03 6.72783556e+05 2.27440939e+06 -4.41019761e+05 9.49480253e+03 2.55390298e+05 2.99549074e+05 -2.29171582e+05 -2.44438946e+05] [-7.48034568e+05 -2.27692812e+06 -6.22125627e+05 -3.05065319e+06 -4.98529861e+05 -8.41451180e+05 -1.65736337e+06 -1.29166914e+06 -2.64134790e+05 -3.06193938e+06 -2.67533316e+06 -2.93198868e+06 -2.06849796e+06 -2.06501502e+06 -6.81403369e+05 4.78335349e+06 -2.59171175e+06 -2.01942552e+06 -1.48379819e+06 -1.25978997e+06 -2.35411494e+06 -2.46850294e+06] [-3.50646767e+06 -6.02911558e+06 -3.20711067e+06 -9.86761254e+06 -3.02471056e+06 -3.53151463e+06 -5.23194876e+06 -5.13029296e+06 -2.26462645e+06 -8.67159448e+06 -6.87247738e+06 -8.36043173e+06 -5.49589653e+06 -5.86076557e+06 -3.88396365e+06 1.02643984e+07 -7.88611352e+06 -5.92751983e+06 -5.80088935e+06 -4.27201622e+06 -6.17661253e+06 -6.71301703e+06] [-5.74735757e+06 -9.59852868e+06 -4.85793561e+06 -1.73676209e+07 -4.87493163e+06 -5.93033763e+06 -8.11569632e+06 -8.89795674e+06 -3.35200826e+06 -1.51905285e+07 -1.14101264e+07 -1.46053028e+07 -8.50806467e+06 -9.22766049e+06 -6.60963195e+06 1.69879625e+07 -1.40233511e+07 -9.60577083e+06 -1.01594504e+07 -7.11587057e+06 -9.72604391e+06 -1.08372190e+07] [-8.76558875e+06 -1.18343138e+07 -7.69719012e+06 -2.29947982e+07 -8.04236992e+06 -8.56098581e+06 -1.08173317e+07 -1.23213376e+07 -5.97587827e+06 -1.94201431e+07 -1.39113349e+07 -1.87371563e+07 -1.04875260e+07 -1.17282971e+07 -9.93513157e+06 1.92030333e+07 -1.83866721e+07 -1.27023643e+07 -1.40335108e+07 -9.85652795e+06 -1.18469526e+07 -1.35425748e+07] [-1.01362373e+07 -1.12095373e+07 -9.01924633e+06 -2.62319356e+07 -9.90070443e+06 -1.03698361e+07 -1.13092122e+07 -1.47977954e+07 -7.31247299e+06 -2.10038553e+07 -1.30923914e+07 -2.03463952e+07 -9.72260714e+06 -1.15227988e+07 -1.27522617e+07 1.82588094e+07 -2.07499593e+07 -1.34467802e+07 -1.69781318e+07 -1.12533694e+07 -1.10712522e+07 -1.33936132e+07] [-9.40978830e+06 -7.55398604e+06 -8.72015698e+06 -2.55959213e+07 -1.01933288e+07 -1.05863762e+07 -9.68307542e+06 -1.55344235e+07 -7.27748413e+06 -1.86681965e+07 -8.21314843e+06 -1.82552870e+07 -6.16993336e+06 -8.47446725e+06 -1.44114110e+07 1.38502987e+07 -1.98368639e+07 -1.13805821e+07 -1.81945830e+07 -1.09279050e+07 -7.27096362e+06 -9.91062654e+06] [-7.38568783e+06 -3.32191753e+06 -7.25737786e+06 -2.33307137e+07 -9.29246838e+06 -9.09393047e+06 -7.01012757e+06 -1.49560706e+07 -5.94842930e+06 -1.45156520e+07 -2.15714047e+06 -1.43514020e+07 -1.52696156e+06 -4.44848698e+06 -1.47581362e+07 9.81246791e+06 -1.72734515e+07 -7.94193714e+06 -1.82831619e+07 -9.04634091e+06 -2.74795244e+06 -5.49882289e+06] [-6.04791762e+06 -2.61148312e+06 -7.07908265e+06 -1.70382813e+07 -9.17167847e+06 -7.30018577e+06 -5.78863096e+06 -1.28810849e+07 -6.07261941e+06 -9.79790513e+06 -3.77765151e+05 -9.75924261e+06 -1.22544642e+05 -2.87171335e+06 -1.29019790e+07 4.51382122e+06 -1.31130168e+07 -5.90058551e+06 -1.62940871e+07 -7.05309167e+06 -1.44378046e+06 -3.42119524e+06] [-9.55337678e+06 -1.04746025e+07 -1.33211129e+07 -2.30661835e+06 -1.59662518e+07 -8.37140825e+06 -9.72153552e+06 -1.04031080e+07 -1.44498392e+07 -4.43350794e+06 -7.37084735e+06 -4.69404611e+06 -6.85960030e+06 -7.90347511e+06 -1.01488433e+07 -1.24597164e+07 -7.36699902e+06 -9.03976828e+06 -1.28386966e+07 -8.29255174e+06 -7.93432991e+06 -7.53219359e+06] [-1.18707052e+07 -1.56635226e+07 -1.68771921e+07 8.24856561e+06 -1.93071115e+07 -8.74985364e+06 -1.19688949e+07 -7.69042240e+06 -1.95013076e+07 -8.42188283e+05 -1.28752693e+07 -1.36880929e+06 -1.17829370e+07 -1.13909198e+07 -7.15195782e+06 -2.47335458e+07 -2.79947900e+06 -1.08294425e+07 -8.85792363e+06 -8.53020254e+06 -1.24982900e+07 -1.03920990e+07] [-6.70885495e+06 -1.26013389e+07 -9.54803440e+06 7.35361110e+06 -1.07715099e+07 -4.07420325e+06 -7.05304279e+06 -2.56499810e+06 -1.15604027e+07 -1.62760105e+06 -1.30765710e+07 -1.75534427e+06 -9.55394995e+06 -8.41400380e+06 -6.98419067e+05 -1.48346694e+07 -1.75470722e+06 -7.17774964e+06 -2.86927731e+06 -4.17485635e+06 -1.03008936e+07 -8.35456998e+06] [ 3.31462192e+06 -3.66640618e+06 4.86529980e+06 -1.46108293e+06 5.65135481e+06 3.79754876e+06 2.23394129e+06 3.77287564e+06 5.09100014e+06 -4.79203950e+06 -8.61279131e+06 -4.16621052e+06 -2.42487244e+06 -1.21598296e+06 7.16635503e+06 9.53873907e+06 -2.63431242e+06 -8.30604085e+04 4.09045182e+06 2.96382542e+06 -3.57553509e+06 -3.06044733e+06] [ 1.07972605e+07 4.32620986e+06 1.49825969e+07 -6.33096359e+06 1.77139187e+07 9.36047320e+06 8.85667063e+06 8.61166288e+06 1.69912451e+07 -4.72076815e+06 -2.60607583e+06 -3.80295007e+06 3.55954892e+06 4.72586805e+06 1.20127339e+07 2.54390999e+07 -1.26512319e+06 5.86419556e+06 9.39495906e+06 8.17481027e+06 2.59675205e+06 2.23784541e+06] [ 9.21691345e+06 6.27578515e+06 1.30293010e+07 -5.23780984e+06 1.62583697e+07 7.68924808e+06 7.73821065e+06 7.82234793e+06 1.50259205e+07 -1.78291581e+06 9.40153676e+05 -1.31260447e+06 4.12192479e+06 4.98221815e+06 9.37877126e+06 2.12634479e+07 1.21230667e+06 6.02631948e+06 8.98577053e+06 6.80459357e+06 3.86314899e+06 3.48281030e+06] [ 2.26635350e+06 2.85116923e+06 4.42254983e+06 -5.40085018e+06 6.86356153e+06 1.28709260e+06 1.53808255e+06 2.12114585e+06 5.88241867e+06 -1.58531461e+06 8.73902937e+05 -1.50900627e+06 4.26366549e+05 6.81249702e+05 1.75482202e+06 1.15854030e+07 5.48443175e+04 1.13687741e+06 2.97393478e+06 8.60774128e+05 6.41869402e+05 4.26356428e+05] [-5.18581296e+06 -3.44356501e+06 -4.84247283e+06 -9.92089416e+06 -3.66127868e+06 -5.50398161e+06 -6.15079946e+06 -5.55426794e+06 -3.71173956e+06 -5.72572133e+06 -2.96064319e+06 -5.80433649e+06 -5.13299250e+06 -5.58968399e+06 -6.59360728e+06 6.61445360e+06 -5.24782139e+06 -5.73581864e+06 -5.68367939e+06 -5.89592449e+06 -4.99432489e+06 -5.38395531e+06] [-9.67508635e+06 -8.13299008e+06 -1.02531041e+07 -1.49173866e+07 -9.90246180e+06 -9.39598238e+06 -1.10188680e+07 -1.10003144e+07 -9.10546952e+06 -1.03360000e+07 -6.92392781e+06 -1.04372084e+07 -8.58933756e+06 -9.71133918e+06 -1.17884573e+07 6.24071088e+06 -1.05849369e+07 -1.04526704e+07 -1.21017410e+07 -9.79322295e+06 -8.85567168e+06 -9.69039005e+06] [-1.07181149e+07 -9.40872365e+06 -1.13311282e+07 -1.55907280e+07 -1.11627630e+07 -1.01472262e+07 -1.20347604e+07 -1.20078656e+07 -1.02983369e+07 -1.13059609e+07 -8.33185931e+06 -1.14217525e+07 -9.73672008e+06 -1.07678691e+07 -1.25167104e+07 5.82353535e+06 -1.16127520e+07 -1.16112167e+07 -1.31117886e+07 -1.07055219e+07 -9.99530928e+06 -1.09020166e+07] [-7.08941249e+06 -6.05290044e+06 -7.72271212e+06 -9.69941736e+06 -7.87387766e+06 -6.65659392e+06 -7.71045331e+06 -7.98900373e+06 -7.23481794e+06 -6.95770495e+06 -5.01388770e+06 -7.02375975e+06 -6.05103483e+06 -6.85015432e+06 -8.34759104e+06 2.36996706e+06 -7.34735302e+06 -7.40972283e+06 -8.78237034e+06 -6.86835854e+06 -6.26429360e+06 -6.86969559e+06] [-2.24825244e+06 -1.55827088e+06 -2.47411310e+06 -3.46538717e+06 -2.55325124e+06 -2.12564287e+06 -2.28856971e+06 -2.74770901e+06 -2.29600762e+06 -2.09367507e+06 -1.06261945e+06 -2.13277644e+06 -1.53322471e+06 -1.93042020e+06 -2.96414665e+06 6.44638504e+05 -2.34558212e+06 -2.15492172e+06 -3.04994778e+06 -2.06048692e+06 -1.61793286e+06 -1.91606275e+06] [-2.23646646e+05 1.31557678e+05 -2.16130810e+05 6.74947993e+03 -2.29364631e+05 -1.79098258e+05 -4.97322746e+04 -1.23045162e+05 -2.66437244e+05 2.13633239e+05 1.95200961e+05 1.72048674e+05 5.69155486e+04 4.62088513e+04 -2.61396005e+05 -5.05043130e+05 1.12616989e+05 -6.35937535e+03 -9.04110846e+04 -1.57039776e+05 9.97914418e+04 7.96124074e+04] [-2.06589069e+05 -6.47469096e+04 -2.10293050e+05 6.25401808e+04 -2.04999645e+05 -1.81700977e+05 -1.21850109e+05 -7.28048031e+04 -2.65992156e+05 7.30301558e+04 -6.02594271e+04 4.86387421e+04 -1.39609523e+05 -1.05200707e+05 -1.40751013e+05 -3.36278411e+05 5.95647758e+04 -9.89440628e+04 -2.05249902e+04 -1.77168060e+05 -9.21592859e+04 -7.27748131e+04] [-4.63466116e+02 6.00170305e+02 -1.10538036e+03 2.25491233e+03 -1.82309807e+03 2.34381183e+03 1.31247789e+02 1.32051953e+03 -1.88357712e+03 2.94110018e+03 2.40448177e+03 2.84765394e+03 6.99665399e+02 3.79014860e+02 7.71739391e+02 -1.33797166e+04 2.59047840e+03 2.82970746e+02 1.92011057e+03 4.55500258e+02 7.63757115e+02 2.75922040e+02] [-5.32705244e-01 -6.68122975e-01 -1.53031474e-01 -6.31042025e-02 -4.51126809e-01 -3.79738325e-01 3.46296357e-01 -9.39892060e-01 3.10368499e-01 4.29072924e-02 -4.25836850e-02 4.40979003e-01 3.76503783e-01 -8.70378324e-01 6.00471891e-01 -9.21649684e-01 -9.45974524e-01 5.17307684e-02 4.69597999e-01 1.11495623e-02 -4.12663847e-01 -6.27637791e-01] [ 1.16239483e+04 5.13245926e+03 1.47725328e+04 7.76425104e+03 1.37843221e+04 6.48680262e+03 9.68659448e+03 9.21140468e+03 1.33155245e+04 5.30618367e+03 5.36293120e+03 5.51834659e+03 2.50292654e+03 5.93216467e+03 1.13516769e+04 6.32401743e+03 7.81417438e+03 8.39941793e+03 1.05499011e+04 5.69458807e+03 2.79137061e+03 4.60309843e+03] [ 2.04166215e+04 -2.58575542e+03 2.50410753e+04 -3.28561313e+03 2.95793863e+04 1.15440780e+04 5.22845939e+03 1.07858752e+04 2.98904048e+04 -8.21607188e+03 -3.23249419e+03 -6.90241021e+03 -4.76821924e+03 -1.69977972e+02 1.56299985e+04 7.27987373e+04 2.12114611e+02 3.23372849e+03 1.14538220e+04 5.17657757e+03 -8.09980772e+03 -3.79583159e+03] [ 7.70609224e+04 -8.51468813e+04 1.35830625e+05 -1.28517465e+05 1.51847641e+05 5.49132495e+04 1.30067201e+04 4.58772855e+04 1.70174645e+05 -1.93195879e+05 -1.56885771e+05 -1.74753635e+05 -6.73313245e+04 -4.05497093e+04 1.02108045e+05 5.04760389e+05 -1.29831058e+05 -4.28352397e+04 3.46869453e+04 1.35491820e+04 -9.34074363e+04 -8.44064864e+04] [ 2.45591966e+05 -5.70952472e+05 4.47675942e+05 -8.88909079e+05 4.36372578e+05 1.56467786e+05 -6.40978627e+04 -3.22448365e+04 5.74045530e+05 -1.13133917e+06 -9.81721005e+05 -1.03795832e+06 -4.14564272e+05 -3.71183887e+05 3.60027333e+05 2.20550290e+06 -8.82974315e+05 -3.74101026e+05 -1.04011170e+05 -1.32566654e+04 -5.76576284e+05 -6.27083452e+05] [-1.59256161e+06 -3.13580398e+06 -1.52146116e+06 -4.34966146e+06 -1.52844845e+06 -1.62877379e+06 -2.52123417e+06 -2.27998054e+06 -1.13082510e+06 -4.19469037e+06 -3.51448371e+06 -4.02906267e+06 -2.84722252e+06 -2.94852625e+06 -1.58426303e+06 4.79256804e+06 -3.72567045e+06 -2.94328457e+06 -2.63079335e+06 -2.05651853e+06 -3.16917504e+06 -3.39323660e+06] [-3.87169114e+06 -7.06960413e+06 -3.40413609e+06 -1.23038262e+07 -3.59194455e+06 -3.99556821e+06 -5.79242727e+06 -6.33583779e+06 -2.41691917e+06 -1.09382292e+07 -8.27395399e+06 -1.05229098e+07 -6.07398718e+06 -6.70063163e+06 -4.56363854e+06 1.20675113e+07 -1.00720037e+07 -6.84869577e+06 -7.38968096e+06 -4.80934052e+06 -7.05243476e+06 -7.89812030e+06] [-5.60388480e+06 -9.83782935e+06 -4.45440938e+06 -1.99424360e+07 -4.66675895e+06 -5.96680471e+06 -8.05209084e+06 -9.79621633e+06 -2.78581838e+06 -1.73156717e+07 -1.19218828e+07 -1.66269688e+07 -8.27764430e+06 -9.28792827e+06 -7.10399544e+06 1.92491410e+07 -1.60516628e+07 -9.83076444e+06 -1.15325547e+07 -7.04446503e+06 -9.69548298e+06 -1.11724502e+07] [-7.12216150e+06 -1.05657984e+07 -5.39071344e+06 -2.50973484e+07 -5.86680017e+06 -7.39624439e+06 -8.98509747e+06 -1.23936683e+07 -3.33419494e+06 -2.10410087e+07 -1.31374191e+07 -2.02075498e+07 -8.49907466e+06 -1.00146433e+07 -9.34141416e+06 2.18173045e+07 -1.99484414e+07 -1.12723732e+07 -1.46567663e+07 -8.39893910e+06 -1.01018151e+07 -1.22995600e+07] [-6.84659572e+06 -7.63137461e+06 -5.05834016e+06 -2.59311089e+07 -5.97464958e+06 -7.83826807e+06 -7.48175626e+06 -1.33164396e+07 -3.05646802e+06 -2.01862582e+07 -9.83168209e+06 -1.94757215e+07 -5.48773875e+06 -7.56392319e+06 -1.08537099e+07 1.87562664e+07 -2.01183951e+07 -9.71198761e+06 -1.58791759e+07 -8.16517296e+06 -6.99887531e+06 -9.67346237e+06] [-5.87726919e+06 -3.57285951e+06 -4.50633761e+06 -2.42474113e+07 -5.90548576e+06 -7.72426033e+06 -5.31417097e+06 -1.34321778e+07 -2.84292801e+06 -1.66958762e+07 -4.44632847e+06 -1.62824175e+07 -1.59612649e+06 -4.06155850e+06 -1.20829300e+07 1.34712486e+07 -1.81703704e+07 -7.17045013e+06 -1.63081535e+07 -7.42441480e+06 -2.83761516e+06 -5.77596459e+06] [-4.03751986e+06 1.02940378e+05 -3.55534482e+06 -2.00278303e+07 -5.14340416e+06 -6.10204075e+06 -2.78199819e+06 -1.16825392e+07 -2.09563447e+06 -1.16287508e+07 3.15149628e+05 -1.14194830e+07 2.15161780e+06 -4.93102775e+05 -1.12797562e+07 8.46803694e+06 -1.43943243e+07 -3.94037267e+06 -1.46619440e+07 -5.44716315e+06 1.13431994e+06 -1.65800639e+06] [-4.03787862e+06 -1.02389521e+06 -5.20765169e+06 -1.20015697e+07 -6.90213360e+06 -4.85158752e+06 -2.80054216e+06 -9.08116075e+06 -4.55243966e+06 -6.57954729e+06 1.26583444e+05 -6.52640851e+06 1.44930954e+06 -6.25848907e+05 -9.02891685e+06 5.08511912e+05 -9.68422386e+06 -3.28296643e+06 -1.17928168e+07 -4.36816902e+06 5.34006143e+05 -1.15119111e+06] [-8.22408558e+06 -8.53991616e+06 -1.17154279e+07 1.64453129e+06 -1.37704758e+07 -6.26223205e+06 -6.99334833e+06 -6.87722884e+06 -1.31388641e+07 -1.69985770e+06 -6.81759862e+06 -2.07294774e+06 -5.30323655e+06 -5.80933759e+06 -6.81029862e+06 -1.62607866e+07 -4.33678588e+06 -6.56283749e+06 -8.59448578e+06 -5.85050946e+06 -5.89959621e+06 -5.36404941e+06] [-8.92510222e+06 -1.10382537e+07 -1.26196695e+07 7.90884015e+06 -1.45261391e+07 -5.66569887e+06 -7.46890012e+06 -4.26409528e+06 -1.49755459e+07 1.20769348e+05 -9.94315220e+06 -3.12208386e+05 -7.67478612e+06 -7.36162436e+06 -3.83095005e+06 -2.21705894e+07 -1.46786885e+06 -6.99796108e+06 -5.19601840e+06 -5.02753194e+06 -8.19668587e+06 -6.59302892e+06] [-1.91940407e+06 -6.05304871e+06 -3.06657061e+06 4.61923785e+06 -3.85688344e+06 9.43118155e+04 -1.21168606e+06 7.85478823e+05 -4.24479265e+06 -1.30490565e+06 -7.77599386e+06 -1.22547045e+06 -3.27689731e+06 -2.63408386e+06 2.48342968e+06 -6.90357042e+06 -1.24538617e+06 -2.12313020e+06 7.62212805e+04 3.95016158e+05 -4.21774484e+06 -3.08048057e+06] [ 9.65325471e+06 3.59579670e+06 1.26083893e+07 -2.93652059e+06 1.39092139e+07 8.74960876e+06 8.89811880e+06 7.50091154e+06 1.38194198e+07 -3.43597749e+06 -2.09556413e+06 -2.62708831e+06 4.86579044e+06 5.62437072e+06 1.09616946e+07 1.79876422e+07 -1.20848553e+06 6.12149956e+06 7.33730575e+06 8.33838657e+06 3.46179918e+06 3.33079557e+06] [ 1.48959704e+07 9.34099639e+06 1.99233580e+07 -6.46556697e+06 2.26389033e+07 1.21520802e+07 1.34148796e+07 1.03564069e+07 2.24380696e+07 -3.20410511e+06 2.58759855e+06 -2.25075223e+06 8.87953765e+06 9.68857925e+06 1.37939259e+07 2.86849291e+07 1.18539242e+03 1.02696618e+07 1.08092177e+07 1.16244922e+07 7.75525410e+06 6.98769472e+06] [ 1.04792712e+07 7.92643148e+06 1.46071569e+07 -6.06426913e+06 1.73374444e+07 7.87055029e+06 9.42644726e+06 6.84475387e+06 1.68235485e+07 -2.21616274e+06 3.34518213e+06 -1.71542921e+06 6.57032031e+06 7.15870243e+06 8.68280497e+06 2.17781440e+07 2.56466841e+05 7.56271316e+06 7.39210329e+06 7.67286555e+06 6.13907928e+06 5.46305556e+06] [ 3.58600999e+06 4.66959330e+06 5.54436640e+06 -6.03112771e+06 7.48843127e+06 1.61030020e+06 3.04189863e+06 8.86777442e+05 7.34284359e+06 -1.34982869e+06 4.01350722e+06 -1.24945982e+06 3.01208007e+06 2.90653804e+06 6.58386045e+05 1.09489056e+07 -5.38382734e+05 2.83541947e+06 9.62886629e+05 1.90014999e+06 3.11764519e+06 2.54764816e+06] [-4.43645295e+06 -1.52805743e+06 -4.17955726e+06 -1.09745710e+07 -3.27553338e+06 -5.41052954e+06 -4.96907501e+06 -6.87540840e+06 -2.83569669e+06 -5.48665664e+06 -5.62921886e+04 -5.64625421e+06 -2.65454547e+06 -3.58169708e+06 -8.02233922e+06 5.34352831e+06 -5.74047998e+06 -4.21505256e+06 -7.67947084e+06 -4.98398176e+06 -2.54454627e+06 -3.36006970e+06] [-9.53472103e+06 -7.36800934e+06 -9.90078614e+06 -1.60627640e+07 -9.58825380e+06 -9.60417730e+06 -1.06473018e+07 -1.19434315e+07 -8.56014170e+06 -1.06572337e+07 -5.65863005e+06 -1.08103041e+07 -7.61941721e+06 -8.88885631e+06 -1.28073444e+07 6.29262666e+06 -1.11791729e+07 -9.92668208e+06 -1.33973099e+07 -9.61394789e+06 -7.87034372e+06 -8.93317362e+06] [-9.76148670e+06 -8.11289078e+06 -1.01277986e+07 -1.53819709e+07 -9.98927278e+06 -9.47438901e+06 -1.08381579e+07 -1.16337932e+07 -9.05261651e+06 -1.07318108e+07 -6.93281665e+06 -1.08769818e+07 -8.35675996e+06 -9.43878937e+06 -1.21856416e+07 5.85152180e+06 -1.11397773e+07 -1.03724967e+07 -1.28391444e+07 -9.78668679e+06 -8.60880667e+06 -9.63199327e+06] [-6.46220304e+06 -4.94405984e+06 -6.81449701e+06 -9.97284861e+06 -6.76036221e+06 -6.08393023e+06 -6.93659905e+06 -7.68837083e+06 -6.15381669e+06 -6.48091784e+06 -3.85714335e+06 -6.59715133e+06 -5.04420921e+06 -5.89708834e+06 -8.20249883e+06 2.81587188e+06 -6.91082468e+06 -6.50398866e+06 -8.49550298e+06 -6.19632798e+06 -5.22731124e+06 -5.96799073e+06] [-1.99425180e+06 -1.09121082e+06 -2.02997492e+06 -3.84643341e+06 -1.95086947e+06 -1.92299766e+06 -2.00823507e+06 -2.67377815e+06 -1.74879021e+06 -2.02108829e+06 -5.19677377e+05 -2.08192705e+06 -1.19555782e+06 -1.59087137e+06 -2.98294989e+06 1.13334901e+06 -2.25139872e+06 -1.79644595e+06 -2.98197029e+06 -1.85972713e+06 -1.27148143e+06 -1.59539986e+06] [-4.58786720e+03 2.98175554e+05 6.24457218e+04 -7.12848715e+03 1.03817836e+05 -4.39541741e+03 1.28668463e+05 2.26532978e+04 4.71869924e+04 2.47097505e+05 3.22343931e+05 2.12334894e+05 1.89332154e+05 1.99450505e+05 -1.02289899e+05 -2.29555505e+05 1.89710466e+05 1.78745712e+05 6.51389761e+04 2.65849294e+04 2.31575825e+05 2.19448419e+05] [-7.53531518e+04 -2.38403566e+03 -6.86765135e+04 -3.82912078e+04 -3.44852924e+04 -6.28564436e+04 -4.87903203e+04 -3.98662577e+04 -6.71104271e+04 4.44682562e+04 2.27060887e+04 3.54413833e+04 -5.40941021e+04 -4.00184659e+04 -8.72441681e+04 -7.45699149e+04 4.06134373e+04 -2.16576487e+04 -1.31213361e+04 -5.68948184e+04 -2.99594211e+04 -2.38764602e+04] [-9.76505962e-01 9.03158843e-02 -2.28849999e-01 -6.97118111e-02 -8.94771636e-01 9.61098265e-01 7.50744808e-01 -3.74766504e-01 -8.36461357e-01 -7.86798331e-01 -7.77443470e-01 -3.71672704e-01 4.93441706e-01 7.35645218e-01 -9.92390730e-02 4.70801456e-01 -6.09176627e-01 -6.12190119e-01 -2.65113268e-01 -3.01517304e-01 -3.95841903e-01 9.95208948e-02] [-9.74609577e-01 -2.81538278e-01 3.69985040e-01 -5.84916852e-01 -6.38324587e-01 -4.11768794e-01 -9.88782993e-01 -3.10430835e-01 6.56809020e-01 -9.55421487e-01 -9.39361712e-01 -2.51288359e-01 -8.69279455e-01 7.06094629e-01 -2.72896993e-01 4.17374258e-01 -3.14984292e-01 -4.29172675e-01 -9.89017215e-01 -4.59932523e-01 1.16414084e-01 -2.09577907e-01] [ 4.47147500e+03 4.54707544e+03 5.07664859e+03 2.26625740e+03 5.23024117e+03 2.68931443e+03 4.90389671e+03 2.71788091e+03 5.07500867e+03 2.54764573e+03 4.42045014e+03 2.46192743e+03 4.35775334e+03 4.76171294e+03 2.89038784e+03 -4.76351465e+01 2.64981997e+03 4.71490602e+03 2.77129353e+03 3.78002712e+03 4.71802893e+03 4.57761834e+03] [ 1.29692250e+04 1.24276088e+04 1.43019526e+04 6.95346004e+03 1.50163768e+04 8.70731253e+03 1.33450297e+04 8.57618667e+03 1.47100477e+04 7.41738642e+03 1.20426822e+04 7.23396397e+03 1.22100159e+04 1.30520429e+04 9.08930384e+03 3.64953837e+03 7.97979018e+03 1.29386708e+04 8.63420641e+03 1.10081123e+04 1.28030240e+04 1.25503659e+04] [ 1.24206760e+05 -6.80988782e+03 1.81907378e+05 -1.27150151e+05 1.75095685e+05 1.03691763e+05 8.80818778e+04 6.33702559e+04 1.98098905e+05 -1.58360416e+05 -8.87200762e+04 -1.38888387e+05 2.78369044e+04 3.64334738e+04 1.31148192e+05 4.16815996e+05 -1.11106532e+05 3.36373525e+04 4.85790108e+04 8.42778136e+04 2.98444397e+02 -8.32826237e+03] [ 9.99987410e+04 -6.48214921e+05 2.96938047e+05 -1.06902085e+06 2.46044386e+05 3.31007313e+04 -1.57868351e+05 -2.05149019e+05 4.02638572e+05 -1.26559042e+06 -1.05580803e+06 -1.16838536e+06 -4.62490379e+05 -4.46014016e+05 1.91779827e+05 2.03599172e+06 -1.04842570e+06 -4.68807051e+05 -2.93181565e+05 -1.19637629e+05 -6.20185602e+05 -7.03754218e+05] [-2.38640682e+06 -4.13951761e+06 -2.34909145e+06 -5.63448690e+06 -2.47263452e+06 -2.46053485e+06 -3.47513304e+06 -3.34222257e+06 -1.94708241e+06 -5.43279721e+06 -4.57596318e+06 -5.25578923e+06 -3.71214986e+06 -3.92166048e+06 -2.51534125e+06 5.25616699e+06 -4.98319733e+06 -3.97716262e+06 -3.83074968e+06 -2.96667433e+06 -4.10016976e+06 -4.44063784e+06] [-3.79557382e+06 -6.77220026e+06 -3.17522678e+06 -1.29736531e+07 -3.28624690e+06 -4.19033919e+06 -5.65974982e+06 -6.57163642e+06 -2.15350570e+06 -1.13763673e+07 -8.06768479e+06 -1.09874905e+07 -5.95603529e+06 -6.57505958e+06 -4.86738573e+06 1.25883220e+07 -1.04384385e+07 -6.78182383e+06 -7.63952722e+06 -4.88116044e+06 -6.81016795e+06 -7.79035576e+06] [-4.88309017e+06 -8.60897052e+06 -3.39188382e+06 -2.02048638e+07 -3.45761141e+06 -5.65546901e+06 -7.20661034e+06 -9.44493989e+06 -1.65655811e+06 -1.72140942e+07 -1.06756673e+07 -1.65867719e+07 -7.21395506e+06 -8.28263208e+06 -6.93880163e+06 1.98244325e+07 -1.58857928e+07 -8.92200968e+06 -1.11923022e+07 -6.50728123e+06 -8.48475165e+06 -1.01217920e+07] [-3.72127213e+06 -6.52565382e+06 -1.28420843e+06 -2.35332766e+07 -1.35348375e+06 -4.90421239e+06 -5.19552422e+06 -9.92686892e+06 1.10741372e+06 -1.89543055e+07 -9.09810619e+06 -1.81746558e+07 -4.65149006e+06 -6.14929388e+06 -7.09137467e+06 2.28217018e+07 -1.77894434e+07 -7.41852370e+06 -1.21832092e+07 -5.32236892e+06 -6.07474450e+06 -8.41684800e+06] [-1.01129764e+06 -3.88775205e+05 1.74480233e+06 -2.21617470e+07 1.56638130e+06 -2.91060832e+06 -7.10587921e+05 -8.56456569e+06 4.21106049e+06 -1.51219928e+07 -2.40185401e+06 -1.44941913e+07 1.33368153e+06 -7.02261589e+05 -6.57030522e+06 1.91090065e+07 -1.50381157e+07 -2.68233759e+06 -1.09974457e+07 -2.33485615e+06 1.14876899e+05 -2.73555603e+06] [ 7.53847117e+05 3.91250612e+06 3.42867205e+06 -1.87182367e+07 2.97504808e+06 -1.47790454e+06 2.52292916e+06 -6.63816454e+06 5.52019818e+06 -1.06544482e+07 2.37052952e+06 -1.02028721e+07 5.40934459e+06 3.26235947e+06 -5.63057092e+06 1.48899307e+07 -1.16607722e+07 6.44552111e+05 -8.88672896e+06 -4.03320211e+05 4.50921974e+06 1.54479772e+06] [ 2.52691642e+06 6.82175078e+06 4.46364084e+06 -1.39067733e+07 4.16020975e+06 5.88741929e+05 4.84692432e+06 -3.76044627e+06 6.21469168e+06 -5.86356348e+06 5.43499708e+06 -5.56703936e+06 8.02990672e+06 6.03653126e+06 -3.50398322e+06 1.06655507e+07 -7.55797667e+06 3.41358181e+06 -5.70687475e+06 1.74181179e+06 7.45457706e+06 4.82098287e+06] [ 2.16411368e+06 4.79061166e+06 2.43683758e+06 -4.55512700e+06 1.96134304e+06 1.60309885e+06 4.47540203e+06 -5.77220755e+05 2.97578147e+06 -9.92686763e+05 3.79223583e+06 -9.37973263e+05 6.27162217e+06 5.16139959e+06 -5.72245505e+05 1.26389838e+06 -2.73661416e+06 3.55932322e+06 -2.04786335e+06 2.43841695e+06 5.93206303e+06 4.58605518e+06] [-2.67003991e+06 -2.20843401e+06 -4.55294378e+06 6.89862472e+06 -5.83212108e+06 -6.09823168e+05 -1.29032614e+05 7.67258947e+05 -6.12565310e+06 2.62929899e+06 -2.36774617e+06 2.38124678e+06 -2.67433289e+04 -7.17394226e+04 4.84464029e+05 -1.47976227e+07 1.28920975e+06 -1.57326386e+05 4.13594806e+04 2.75863177e+05 -1.30978711e+05 5.03543628e+05] [-1.37269480e+06 -2.39052854e+06 -3.19878415e+06 1.09559860e+07 -4.52140729e+06 1.08626399e+06 1.08601321e+06 3.34320613e+06 -5.09377855e+06 4.35062205e+06 -2.55434076e+06 4.11828494e+06 8.54740915e+04 3.74893742e+05 3.33299585e+06 -1.57254140e+07 3.62434077e+06 9.56417789e+05 2.83170216e+06 2.15048368e+06 -2.46012526e+05 9.93801995e+05] [ 5.91009309e+06 2.44503672e+06 6.53524106e+06 7.29344926e+06 6.21218567e+06 6.64799849e+06 7.14432689e+06 7.63384869e+06 5.81490015e+06 2.78387707e+06 -2.77013189e+05 2.99792492e+06 4.43145796e+06 5.11022466e+06 9.02855663e+06 1.36002872e+05 3.45509697e+06 5.74743286e+06 7.26633138e+06 7.09245213e+06 3.56649194e+06 4.37636117e+06] [ 1.62324122e+07 9.85320874e+06 2.07104294e+07 -2.44666616e+06 2.21978845e+07 1.38295662e+07 1.54287990e+07 1.16944601e+07 2.25140075e+07 -1.57150171e+06 3.47200593e+06 -5.81363445e+05 1.09188302e+07 1.16392514e+07 1.55581371e+07 2.54803365e+07 1.03575610e+06 1.20177388e+07 1.14911077e+07 1.34732495e+07 9.35996567e+06 8.76972102e+06] [ 1.80149785e+07 1.13624958e+07 2.36981878e+07 -6.85466188e+06 2.58825614e+07 1.41786219e+07 1.63541167e+07 1.10707372e+07 2.63942847e+07 -3.76347439e+06 4.22599761e+06 -2.60820366e+06 1.16446711e+07 1.22774709e+07 1.55037255e+07 3.22191224e+07 -4.92230333e+05 1.25722788e+07 1.11463587e+07 1.38114225e+07 1.02194487e+07 8.97368875e+06] [ 1.20170694e+07 8.27022624e+06 1.60372401e+07 -6.08588943e+06 1.77056826e+07 8.42173534e+06 1.08249064e+07 6.06401188e+06 1.81601670e+07 -2.90733498e+06 4.15557107e+06 -2.21464077e+06 8.10429657e+06 8.37919814e+06 8.82799214e+06 2.19434323e+07 -8.82797756e+05 8.43812028e+06 6.03929348e+06 8.50768335e+06 7.27731628e+06 6.25038780e+06] [ 3.85774534e+06 4.48410871e+06 5.52654557e+06 -6.92044184e+06 6.57920448e+06 1.03578967e+06 3.21395957e+06 -8.91524803e+05 7.07645787e+06 -2.53358636e+06 4.22521123e+06 -2.42440405e+06 3.77509531e+06 3.35604486e+06 -5.22598852e+05 9.80554266e+06 -2.28993565e+06 2.89448623e+06 -1.37024548e+06 1.66223567e+06 3.67535626e+06 2.79276370e+06] [-3.80756000e+06 -1.38323991e+06 -3.52076157e+06 -1.22670901e+07 -3.17745183e+06 -5.55834111e+06 -4.48991985e+06 -8.13432830e+06 -2.16160104e+06 -6.99802385e+06 1.89689022e+05 -7.10661985e+06 -1.63923279e+06 -2.79366367e+06 -8.65462126e+06 6.29012157e+06 -7.59303646e+06 -3.93696513e+06 -9.54295992e+06 -4.82347044e+06 -1.74343657e+06 -2.84677413e+06] [-8.29330067e+06 -6.11060792e+06 -8.59900498e+06 -1.53986026e+07 -8.60877944e+06 -8.98452131e+06 -9.25464188e+06 -1.16639381e+07 -7.40257807e+06 -1.02699759e+07 -4.24876378e+06 -1.04297846e+07 -6.01607136e+06 -7.36783838e+06 -1.22716257e+07 6.09795081e+06 -1.09963725e+07 -8.61115641e+06 -1.33366976e+07 -8.72283843e+06 -6.28920250e+06 -7.44378284e+06] [-8.23641457e+06 -6.44437614e+06 -8.38234093e+06 -1.49946626e+07 -8.25822391e+06 -8.31042271e+06 -9.15544391e+06 -1.08477154e+07 -7.21196889e+06 -1.00463181e+07 -5.19211044e+06 -1.01822885e+07 -6.42460838e+06 -7.63234284e+06 -1.13699400e+07 6.66018365e+06 -1.05505793e+07 -8.72895003e+06 -1.22371668e+07 -8.35160149e+06 -6.73036147e+06 -7.90048184e+06] [-5.47721716e+06 -3.74896242e+06 -5.52312090e+06 -9.79972489e+06 -5.32422278e+06 -5.27842721e+06 -5.84311526e+06 -6.97394887e+06 -4.74853451e+06 -6.08257292e+06 -2.80317293e+06 -6.20733042e+06 -3.90321564e+06 -4.73576072e+06 -7.54816946e+06 3.67158034e+06 -6.45613257e+06 -5.42733787e+06 -7.77347875e+06 -5.29529723e+06 -4.05743246e+06 -4.85788045e+06] [-1.92111864e+06 -8.81696182e+05 -1.76681291e+06 -3.68209471e+06 -1.56551104e+06 -1.81103759e+06 -1.92924347e+06 -2.35534311e+06 -1.46745273e+06 -1.99565326e+06 -5.87511439e+05 -2.07887088e+06 -1.14295197e+06 -1.42815512e+06 -2.71399382e+06 1.29048500e+06 -2.12447167e+06 -1.70135917e+06 -2.54022149e+06 -1.85466352e+06 -1.13487467e+06 -1.42381930e+06] [ 4.76638663e+04 2.56051179e+05 1.24985890e+05 -4.66140585e+04 1.82477079e+05 5.75420283e+04 1.02458846e+05 5.72698462e+04 1.27735064e+05 1.22937227e+05 2.11103440e+05 9.36467265e+04 1.62708583e+05 1.84187818e+05 -2.41896973e+04 -5.22332471e+04 9.90097080e+04 1.47515272e+05 9.02644887e+04 4.08208817e+04 2.05168130e+05 1.96984889e+05] [-9.91705485e+03 3.05141795e+04 3.56345877e+03 -7.30766794e+04 1.70984060e+04 9.83108350e+02 1.86333491e+03 -2.52675201e+04 2.28105802e+04 -1.70051805e+03 4.35073161e+04 -4.80320398e+03 2.50524100e+04 1.51819804e+04 -4.34290180e+04 4.72515844e+04 -1.20604676e+04 9.53001899e+03 -2.87343177e+04 4.26236067e+02 2.50059888e+04 1.21906850e+04] [ 7.83972901e-01 6.42769702e-01 7.44309053e-01 -2.00806853e-01 1.86757059e-02 -4.85587731e-01 2.13975959e-01 -1.07377556e-01 -7.18718559e-03 8.19261247e-02 1.60924372e-01 -7.06778408e-02 5.96974346e-01 -1.81532095e-01 -9.12735842e-01 6.66823693e-01 -3.04177431e-02 -4.30522881e-02 -6.46331698e-01 -1.13993224e-01 -7.13056136e-01 -9.17524407e-01] [ 9.26325540e-01 -4.76097377e-01 -7.27802483e-01 3.76112815e-01 -2.52137753e-01 2.46159879e-01 4.12892032e-01 3.24218793e-01 -5.46172933e-01 5.66746048e-01 -8.58060333e-01 5.76285430e-01 -6.75309174e-01 -1.89980691e-01 -9.87604330e-01 -6.74295515e-01 3.55717773e-01 5.00288279e-01 8.20383787e-01 -1.59548677e-01 2.20656800e-01 -1.59144068e-02] [ 2.20742620e+02 2.25395805e+02 2.52071869e+02 1.12765357e+02 2.58939268e+02 1.34138941e+02 2.43749340e+02 1.35520156e+02 2.52567498e+02 1.25805159e+02 2.19631046e+02 1.21187336e+02 2.15673332e+02 2.36131548e+02 1.43072741e+02 -2.99912195e+00 1.31788694e+02 2.34691712e+02 1.36618222e+02 1.86920041e+02 2.33554632e+02 2.27307505e+02] [ 1.00303885e+04 -6.03312127e+03 7.94175639e+03 2.03846499e+03 6.06072724e+03 2.55983666e+03 4.16938965e+03 -2.02455954e+03 6.38823624e+03 -7.87043261e+03 -1.23040545e+04 -7.27372316e+03 -3.12240227e+03 2.60750320e+01 7.67807158e+03 5.02287134e+03 -6.93409837e+03 1.89751073e+03 -4.11868643e+03 4.66701003e+03 -3.77716762e+03 -3.20636555e+03] [ 1.51904152e+05 -9.78079885e+02 1.94082254e+05 -9.34378251e+04 1.82971996e+05 1.07402849e+05 9.66819216e+04 6.20721877e+04 1.99517288e+05 -1.39525677e+05 -9.51658596e+04 -1.20860281e+05 2.98606465e+04 4.23761161e+04 1.45158330e+05 3.56180854e+05 -9.72926909e+04 5.18543761e+04 5.73778295e+04 9.61312525e+04 6.79415283e+03 -1.44837129e+03] [-2.58628674e+05 -6.99113512e+05 -2.01693247e+05 -6.94624232e+05 -2.61509775e+05 -3.43973130e+05 -4.30549288e+05 -3.89292151e+05 -1.91584804e+05 -9.42312944e+05 -9.76119594e+05 -8.92159060e+05 -6.60559892e+05 -6.05285247e+05 -1.44320267e+05 9.66147566e+05 -7.99352559e+05 -6.05681841e+05 -3.92316575e+05 -4.32530873e+05 -6.91610326e+05 -7.37742097e+05] [-3.43589490e+06 -4.28227338e+06 -3.53381300e+06 -5.50334780e+06 -3.55308713e+06 -3.47936448e+06 -4.22518010e+06 -3.88711297e+06 -3.24060558e+06 -5.21016604e+06 -4.64873625e+06 -5.11815323e+06 -4.26497693e+06 -4.39730192e+06 -3.47040739e+06 3.73130687e+06 -4.84928202e+06 -4.54290541e+06 -4.14039021e+06 -3.88605203e+06 -4.33424707e+06 -4.68705482e+06] [-4.25255854e+06 -5.71816105e+06 -3.73495877e+06 -1.18964523e+07 -3.55967446e+06 -4.81917841e+06 -5.70216103e+06 -6.49671097e+06 -2.77355235e+06 -9.95737821e+06 -6.54601356e+06 -9.72276768e+06 -5.56686579e+06 -6.12047131e+06 -5.55090243e+06 1.04004327e+07 -9.17345334e+06 -6.43045342e+06 -7.26214301e+06 -5.26397520e+06 -5.96058642e+06 -6.92127804e+06] [-3.49407455e+06 -4.87588480e+06 -2.18799792e+06 -1.70248642e+07 -1.61697087e+06 -4.79981447e+06 -5.18031232e+06 -7.84891479e+06 -3.73253731e+05 -1.30710253e+07 -5.77913690e+06 -1.27063748e+07 -4.45616104e+06 -5.50242274e+06 -6.59622063e+06 1.62314973e+07 -1.20799664e+07 -6.06248661e+06 -9.24926917e+06 -4.92503848e+06 -5.06309359e+06 -6.64728087e+06] [-4.01253930e+04 6.76471790e+04 2.47503480e+06 -1.89599154e+07 3.40533355e+06 -2.17438681e+06 -8.94621345e+05 -6.31177213e+06 5.11011564e+06 -1.25840291e+07 -1.25429046e+06 -1.20988608e+07 5.97433149e+05 -9.31554411e+05 -5.12371246e+06 1.96402568e+07 -1.16624619e+07 -1.93523328e+06 -8.05127729e+06 -1.48686987e+06 -1.33287983e+05 -2.31488681e+06] [ 4.59660209e+06 7.42102017e+06 7.93158531e+06 -1.61698928e+07 9.12288259e+06 1.92293883e+06 5.36676930e+06 -2.53844185e+06 1.08373877e+07 -7.43931669e+06 6.16526352e+06 -7.07612260e+06 7.58187148e+06 5.84223498e+06 -2.19508486e+06 1.83187979e+07 -7.21863999e+06 4.40975087e+06 -4.24714067e+06 3.46686611e+06 7.08999249e+06 4.53480493e+06] [ 8.01847214e+06 1.29578527e+07 1.15975135e+07 -1.13740948e+07 1.28040069e+07 5.24045970e+06 1.03751506e+07 1.49632131e+06 1.42368095e+07 -1.98353763e+06 1.12483263e+07 -1.73823370e+06 1.28696262e+07 1.11808995e+07 1.06935515e+06 1.59924236e+07 -2.49764089e+06 9.29900323e+06 1.18348773e+05 7.29097596e+06 1.26775573e+07 1.00560253e+07] [ 1.08819795e+07 1.63775398e+07 1.40837270e+07 -4.74975980e+06 1.54386716e+07 8.52301344e+06 1.40635014e+07 6.08450837e+06 1.61251166e+07 3.63895300e+06 1.44647047e+07 3.72506820e+06 1.59886530e+07 1.46775076e+07 5.15936279e+06 1.22293345e+07 2.80066234e+06 1.31260550e+07 5.24765495e+06 1.05440803e+07 1.60634100e+07 1.39025401e+07] [ 8.89137585e+06 1.36099908e+07 1.05251198e+07 4.89551000e+06 1.15355301e+07 8.11667604e+06 1.24710368e+07 8.69807454e+06 1.09955537e+07 8.34636273e+06 1.20483142e+07 8.04183676e+06 1.31079393e+07 1.26643176e+07 7.07452869e+06 3.81593374e+05 7.51585414e+06 1.22082015e+07 8.54677067e+06 9.93600600e+06 1.36076955e+07 1.28606243e+07] [ 4.72607537e+06 7.89355254e+06 4.08480429e+06 1.58960737e+07 3.87527578e+06 6.03206907e+06 8.54642931e+06 9.85691359e+06 2.43935243e+06 1.21883346e+07 7.69509666e+06 1.16235402e+07 8.22186094e+06 8.54889774e+06 7.66979138e+06 -1.54138302e+07 1.14063760e+07 9.17676808e+06 1.01757737e+07 8.01109265e+06 8.95119463e+06 9.95720341e+06] [ 5.91878634e+06 6.73456373e+06 5.38463484e+06 1.86773502e+07 4.73804565e+06 7.36012911e+06 9.08970681e+06 1.14926007e+07 3.35222943e+06 1.24195757e+07 6.18967595e+06 1.19758590e+07 7.61214875e+06 8.30379171e+06 1.01074567e+07 -1.42934827e+07 1.21809460e+07 9.35445969e+06 1.19382355e+07 8.93126623e+06 7.95641369e+06 9.41973879e+06] [ 1.18234639e+07 8.15385616e+06 1.39354879e+07 1.09608737e+07 1.42998060e+07 1.14981771e+07 1.26360766e+07 1.32285458e+07 1.35043984e+07 6.99149520e+06 4.47096305e+06 7.22650427e+06 8.86828397e+06 1.00582720e+07 1.44367090e+07 5.90225691e+06 8.52044104e+06 1.11922505e+07 1.37480362e+07 1.17565470e+07 8.39368622e+06 9.15485180e+06] [ 1.83255476e+07 1.15213946e+07 2.37091244e+07 -2.11371748e+06 2.54120620e+07 1.53262728e+07 1.71323605e+07 1.33826827e+07 2.57531180e+07 -8.73301069e+05 4.43341569e+06 1.71712486e+05 1.18989021e+07 1.28849098e+07 1.74300596e+07 2.93977913e+07 2.23816343e+06 1.34246931e+07 1.37556105e+07 1.47227056e+07 1.05483124e+07 9.69459475e+06] [ 1.73089487e+07 1.04025092e+07 2.25546996e+07 -6.44210008e+06 2.43814450e+07 1.34510961e+07 1.53818592e+07 1.02557089e+07 2.51757411e+07 -3.70145583e+06 3.59309374e+06 -2.62484288e+06 1.07656338e+07 1.13717243e+07 1.47602552e+07 3.07979988e+07 -6.88696068e+05 1.17413817e+07 1.04406419e+07 1.29496938e+07 9.41915484e+06 8.07735545e+06] [ 9.86090299e+06 6.79111995e+06 1.27342169e+07 -6.11812834e+06 1.37386740e+07 6.02902928e+06 8.66564164e+06 3.13281208e+06 1.44738425e+07 -2.98574123e+06 3.78559361e+06 -2.51932821e+06 6.95259623e+06 6.96278119e+06 5.78734654e+06 1.65709502e+07 -1.91131755e+06 6.81243672e+06 2.88930561e+06 6.30727457e+06 6.23979191e+06 5.14646415e+06] [ 1.08056307e+06 2.59700647e+06 1.86301024e+06 -9.10828662e+06 2.06357313e+06 -1.89705792e+06 6.15286012e+05 -4.73511523e+06 3.02430676e+06 -4.16468785e+06 3.26934374e+06 -4.17941609e+06 2.51451942e+06 1.55303998e+06 -4.48653408e+06 5.82210209e+06 -4.85505895e+06 5.67797339e+05 -5.74299012e+06 -9.85028393e+05 2.33997007e+06 1.18560168e+06] [-4.84865426e+06 -1.50081457e+06 -4.82814208e+06 -1.30549032e+07 -4.80524790e+06 -6.72676695e+06 -5.16487110e+06 -9.74757444e+06 -3.61382059e+06 -7.26094895e+06 5.14108859e+05 -7.46319015e+06 -1.62957789e+06 -3.00834307e+06 -1.04353044e+07 3.80803263e+06 -8.42205384e+06 -4.46669496e+06 -1.13307295e+07 -5.82534463e+06 -1.69424732e+06 -2.96972369e+06] [-7.14102564e+06 -4.35581035e+06 -7.42239106e+06 -1.52902431e+07 -7.46723561e+06 -8.18874634e+06 -7.88959140e+06 -1.13624021e+07 -6.18469505e+06 -9.44804411e+06 -2.29936403e+06 -9.60287280e+06 -4.22448985e+06 -5.72495358e+06 -1.20430875e+07 5.35342482e+06 -1.04023003e+07 -7.16512253e+06 -1.31318434e+07 -7.64654716e+06 -4.50281710e+06 -5.79921824e+06] [-6.19907314e+06 -3.85457673e+06 -6.18122468e+06 -1.33132189e+07 -5.91607873e+06 -6.52234967e+06 -6.81416859e+06 -9.15283360e+06 -4.97524106e+06 -7.99177375e+06 -2.49318699e+06 -8.13216977e+06 -3.90582683e+06 -5.13706099e+06 -9.86373716e+06 5.85366336e+06 -8.62900818e+06 -6.25443095e+06 -1.04428047e+07 -6.31775320e+06 -4.15827025e+06 -5.33052759e+06] [-4.07320846e+06 -2.20523375e+06 -3.96857940e+06 -7.68659885e+06 -3.65647307e+06 -4.02361224e+06 -4.22028474e+06 -5.28062301e+06 -3.32081954e+06 -4.43231809e+06 -1.46839117e+06 -4.56008870e+06 -2.57542471e+06 -3.21402669e+06 -5.91152134e+06 2.65313826e+06 -4.72508724e+06 -3.77605737e+06 -5.80892063e+06 -3.98228885e+06 -2.56009969e+06 -3.21751351e+06] [-1.40766585e+06 -1.77289104e+05 -1.24644353e+06 -2.68037878e+06 -1.00041581e+06 -1.33188067e+06 -1.26320379e+06 -1.66052885e+06 -1.01181603e+06 -1.16341363e+06 1.40610982e+05 -1.25148421e+06 -5.37130868e+05 -7.60876779e+05 -2.14397651e+06 5.83649230e+05 -1.29769809e+06 -9.88030152e+05 -1.74638862e+06 -1.30436183e+06 -4.33686578e+05 -6.59542041e+05] [ 6.74058568e+04 5.08645738e+05 1.58602787e+05 -2.04938558e+04 3.24596958e+05 5.01709912e+04 1.63360374e+05 1.49074020e+05 1.92317971e+05 3.00856404e+05 4.98724136e+05 2.53410705e+05 2.79004564e+05 3.12541450e+05 -8.03895311e+04 -1.85627453e+05 2.92296883e+05 2.68171932e+05 2.37840584e+05 8.37280317e+04 3.90373283e+05 3.89105448e+05] [ 4.32582959e+04 1.20790855e+05 6.93089781e+04 7.32997301e+02 7.50604147e+04 4.47204991e+04 7.98536142e+04 4.73958800e+04 7.88343188e+04 7.18392610e+04 1.30897311e+05 6.88825314e+04 1.06165347e+05 9.54831131e+04 1.32930067e+04 -1.50468292e+04 6.21623516e+04 8.22646126e+04 6.11064130e+04 5.36673314e+04 1.11919296e+05 9.65504737e+04] [ 1.84940875e+02 1.70627410e+02 2.05128239e+02 9.29917780e+01 2.15910640e+02 1.22840925e+02 1.93246018e+02 1.19796867e+02 2.08290558e+02 1.10594345e+02 1.61282041e+02 1.02251193e+02 1.59869835e+02 1.87944502e+02 1.33045752e+02 8.09078823e+01 1.17841556e+02 1.82868147e+02 1.24370768e+02 1.54768899e+02 1.79168410e+02 1.74465156e+02] [ 1.64722713e-01 -3.85582105e-01 4.90280846e-01 9.55393664e-01 8.09247388e-01 7.43206350e-01 -9.99415503e-01 1.65232511e-01 -4.74560016e-01 6.26011580e-01 4.69572632e-02 7.87800550e-01 4.74899065e-02 7.82270288e-01 -1.00557154e-01 1.22999688e-01 6.24969089e-01 -1.93656483e-01 1.37336397e-01 7.96922556e-01 -3.07547323e-02 1.81178271e-01] [ 4.99225152e+03 -4.17915342e+03 2.61209057e+03 1.34462692e+03 3.26301182e+03 3.32306846e+03 8.94244282e+02 -6.91648255e+02 3.26151474e+03 -2.24269988e+03 -3.02233655e+03 -2.08043676e+03 -1.33161038e+03 -9.89034516e+02 3.03385698e+03 3.25881537e+03 -2.14347688e+03 8.31711616e+02 -2.51506248e+03 3.38650688e+03 -2.60798509e+03 -1.29312314e+03] [ 1.53919031e+04 -1.66988892e+04 6.19409449e+03 7.43015329e+03 -7.47510556e+00 3.95322027e+03 3.64056616e+03 -1.02013376e+04 1.14886435e+03 -1.40895800e+04 -2.42434702e+04 -1.22749684e+04 -6.84813973e+03 -4.08357480e+03 1.04588665e+04 -3.99206639e+04 -1.51945669e+04 3.13711624e+02 -1.48425942e+04 7.15585662e+03 -1.07971717e+04 -9.68222603e+03] [ 2.13942986e+05 1.18213591e+05 2.25249985e+05 1.60974901e+05 1.92823546e+05 1.67747761e+05 2.01141464e+05 1.55788234e+05 1.90994824e+05 6.24259782e+04 4.36007716e+04 7.49720514e+04 1.39913395e+05 1.58707352e+05 2.26143832e+05 -1.46990619e+04 8.09269424e+04 1.65609945e+05 1.77494315e+05 1.72103613e+05 1.32223937e+05 1.28179323e+05] [-5.35726431e+05 -3.47500571e+05 -6.13216795e+05 1.91714075e+05 -5.77276419e+05 -6.49710376e+05 -5.03998545e+05 -2.70570717e+05 -7.17767328e+05 -1.39596789e+05 -5.13982510e+05 -1.63432054e+05 -6.31390416e+05 -4.73587094e+05 -3.19235679e+05 -6.82144026e+05 -5.89371054e+04 -4.77982798e+05 -7.24359945e+04 -6.37023518e+05 -4.16760928e+05 -4.04505467e+05] [-3.91164228e+06 -3.60399280e+06 -4.12862389e+06 -4.17305501e+06 -3.82553081e+06 -3.99837691e+06 -4.30163981e+06 -3.62667125e+06 -3.93106696e+06 -3.83623113e+06 -3.75242737e+06 -3.84989635e+06 -4.29287792e+06 -4.21098747e+06 -3.79390053e+06 1.25116582e+06 -3.52220801e+06 -4.31903056e+06 -3.46138396e+06 -4.25362910e+06 -3.90265879e+06 -4.16566933e+06] [-4.56128361e+06 -3.69747818e+06 -4.19965027e+06 -9.54496733e+06 -3.54612438e+06 -5.06952063e+06 -5.18261678e+06 -5.75165845e+06 -3.38133388e+06 -6.94429216e+06 -3.70689548e+06 -6.92434381e+06 -4.56573946e+06 -4.98932826e+06 -5.94193782e+06 5.97039036e+06 -6.44047741e+06 -5.28428420e+06 -5.98677220e+06 -5.26831887e+06 -4.26812633e+06 -5.18773785e+06] [-3.33634465e+06 -1.14349875e+06 -2.29661568e+06 -1.28618384e+07 -1.31341820e+06 -4.61688558e+06 -3.76536758e+06 -6.35599735e+06 -8.42456678e+05 -7.80034631e+06 -7.67992888e+05 -7.75643016e+06 -2.08612734e+06 -3.05437585e+06 -6.89543594e+06 9.06907988e+06 -7.42176252e+06 -3.69802548e+06 -7.08804320e+06 -4.21031748e+06 -1.84762388e+06 -3.27677114e+06] [ 1.50183260e+06 5.72103567e+06 3.87109373e+06 -1.39169868e+07 5.39408893e+06 -6.64122683e+05 2.08484605e+06 -3.57967480e+06 6.27918196e+06 -5.46774937e+06 5.96428366e+06 -5.38141518e+06 4.73470173e+06 3.16205382e+06 -4.56570152e+06 1.30110357e+07 -5.24972410e+06 2.20543957e+06 -4.69300054e+06 7.56384205e+05 4.79370446e+06 2.77716738e+06] [ 7.06141009e+06 1.35503031e+07 1.04058316e+07 -1.05405813e+07 1.23840743e+07 4.38034465e+06 9.03608947e+06 1.42377964e+06 1.31674890e+07 5.23096369e+04 1.35730386e+07 -8.93187714e+03 1.22362932e+07 1.05580389e+07 -5.38219124e+05 1.37404613e+07 -2.24257486e+05 9.22068332e+06 2.76846476e+05 6.65829173e+06 1.24918744e+07 1.02827774e+07] [ 1.14358545e+07 1.92966099e+07 1.48632279e+07 -5.13578955e+06 1.70963476e+07 8.81576383e+06 1.44801014e+07 6.52455125e+06 1.74679354e+07 6.00691658e+06 1.90673040e+07 5.79775158e+06 1.76922525e+07 1.61085744e+07 3.83105652e+06 1.26452924e+07 5.31620860e+06 1.47191521e+07 5.58160115e+06 1.14861497e+07 1.81417548e+07 1.60633931e+07] [ 1.29090144e+07 2.06251185e+07 1.57367570e+07 2.52735591e+06 1.78847082e+07 1.10636149e+07 1.65398739e+07 1.07493137e+07 1.74617334e+07 1.11318182e+07 2.00544332e+07 1.07467561e+07 1.87631713e+07 1.77490506e+07 7.69018474e+06 7.15411749e+06 1.03650339e+07 1.69789166e+07 1.05096652e+07 1.35757740e+07 1.95423427e+07 1.82014447e+07] [ 1.02929281e+07 1.66614980e+07 1.14626811e+07 1.28397075e+07 1.26052505e+07 1.00228429e+07 1.43689639e+07 1.29228468e+07 1.11971160e+07 1.53654336e+07 1.67218430e+07 1.47339717e+07 1.50612740e+07 1.48420896e+07 9.45418829e+06 -5.85340430e+06 1.46539187e+07 1.52063039e+07 1.35047674e+07 1.24007139e+07 1.60951323e+07 1.61710388e+07] [ 7.53905059e+06 1.20948753e+07 7.08787329e+06 2.24571562e+07 7.05684985e+06 8.79634686e+06 1.16547520e+07 1.41951945e+07 5.03578516e+06 1.85442593e+07 1.30020551e+07 1.77488218e+07 1.12877076e+07 1.17997674e+07 1.05771167e+07 -1.74817641e+07 1.79592223e+07 1.30403144e+07 1.53192309e+07 1.10266464e+07 1.23704135e+07 1.38908245e+07] [ 9.52522594e+06 1.02080781e+07 9.96659640e+06 2.26065371e+07 9.66376933e+06 1.07734152e+07 1.22984228e+07 1.56377390e+07 7.80514105e+06 1.62864875e+07 9.43619002e+06 1.58304056e+07 1.00595066e+07 1.11493085e+07 1.38806870e+07 -1.16023637e+07 1.67416977e+07 1.27973230e+07 1.70069754e+07 1.19027855e+07 1.05478830e+07 1.22595232e+07] [ 1.44857426e+07 1.00785147e+07 1.78301342e+07 1.14551024e+07 1.87045702e+07 1.40117158e+07 1.45861204e+07 1.59621420e+07 1.76992822e+07 7.77208159e+06 5.26124688e+06 8.18564071e+06 9.91919338e+06 1.13838879e+07 1.75350539e+07 1.13938848e+07 1.02511078e+07 1.27976967e+07 1.73754374e+07 1.34966144e+07 9.46527551e+06 1.00100215e+07] [ 1.71916764e+07 1.09426982e+07 2.24061053e+07 -2.88694533e+05 2.42596143e+07 1.47278712e+07 1.59126095e+07 1.41485000e+07 2.41558357e+07 3.26977064e+05 3.72099381e+06 1.35564280e+06 1.03801710e+07 1.15943447e+07 1.77674348e+07 2.73629978e+07 3.81807578e+06 1.24593093e+07 1.53918308e+07 1.36827763e+07 9.54498134e+06 8.70622467e+06] [ 1.21316695e+07 7.64778556e+06 1.58973985e+07 -5.50865821e+06 1.74495598e+07 9.01706026e+06 1.07059230e+07 7.10405943e+06 1.78628824e+07 -2.75093407e+06 2.70477095e+06 -1.97672957e+06 7.19671501e+06 7.72269108e+06 1.02516978e+07 2.22014142e+07 -4.79753103e+05 8.11006081e+06 7.62764610e+06 8.56500725e+06 6.56509037e+06 5.44059175e+06] [ 4.78087867e+06 4.64324679e+06 6.22213647e+06 -8.24868343e+06 6.92183724e+06 1.50557632e+06 4.25801553e+06 -1.40329422e+06 7.73604241e+06 -3.49302813e+06 3.71718278e+06 -3.27798038e+06 4.53932246e+06 4.01528120e+06 -1.91146108e+05 9.77047975e+06 -3.44806839e+06 3.39553454e+06 -2.03885393e+06 2.27822185e+06 4.26562377e+06 3.02956881e+06] [-2.51833143e+06 1.41649292e+06 -2.67538227e+06 -1.17379391e+07 -2.61941549e+06 -5.21774166e+06 -2.43273086e+06 -8.55030785e+06 -1.45474741e+06 -4.90251543e+06 3.67225962e+06 -5.12882684e+06 1.11244688e+06 -2.91367346e+05 -9.40892793e+06 1.35355963e+06 -6.50518851e+06 -1.86797698e+06 -1.00217442e+07 -3.83994196e+06 1.22010681e+06 -2.11966916e+05] [-5.58840179e+06 -4.78526432e+05 -5.99447533e+06 -1.25743556e+07 -5.98082137e+06 -7.53757473e+06 -5.23924003e+06 -1.03580269e+07 -4.95919905e+06 -5.85613710e+06 2.40137569e+06 -6.18992878e+06 -1.15046833e+06 -2.57016653e+06 -1.18930789e+07 -1.68459511e+05 -7.51076819e+06 -4.28656409e+06 -1.18487072e+07 -6.28715447e+06 -8.37910831e+05 -2.14102176e+06] [-6.58361295e+06 -2.32224754e+06 -7.00698695e+06 -1.24404038e+07 -6.71747362e+06 -7.60203051e+06 -6.58594317e+06 -9.88385776e+06 -5.98549703e+06 -6.36497335e+06 1.67016489e+05 -6.63163193e+06 -3.11648889e+06 -4.27349807e+06 -1.13922659e+07 1.19703008e+06 -7.51219969e+06 -5.60628167e+06 -1.10966300e+07 -6.84186833e+06 -2.81172374e+06 -3.89777695e+06] [-4.91521139e+06 -1.07228244e+06 -4.84341151e+06 -8.96733497e+06 -4.10311774e+06 -5.26290867e+06 -4.68900140e+06 -6.48894433e+06 -3.91903828e+06 -3.83883529e+06 5.33198447e+05 -4.14198825e+06 -2.20379293e+06 -2.90411023e+06 -8.16375473e+06 1.54347405e+06 -4.53098741e+06 -3.78255862e+06 -7.01893001e+06 -4.80727785e+06 -1.76546137e+06 -2.55449546e+06] [-3.04083773e+06 -1.71731247e+05 -2.94709143e+06 -4.15371150e+06 -2.26502527e+06 -3.01245985e+06 -2.65197993e+06 -3.07368131e+06 -2.52280818e+06 -1.21137474e+06 6.73164492e+05 -1.46123439e+06 -1.35648794e+06 -1.57523177e+06 -4.41554408e+06 -4.33261155e+05 -1.50511803e+06 -1.94069797e+06 -3.00499981e+06 -2.77269323e+06 -8.07314794e+05 -1.12544034e+06] [-5.73441856e+05 1.11485379e+06 -4.68236549e+05 -6.92891166e+05 -1.13465645e+05 -5.74860237e+05 -1.46994904e+05 -3.62502909e+05 -3.89192644e+05 6.13229068e+05 1.53851543e+06 4.64542775e+05 4.58149362e+05 3.94855557e+05 -1.19130647e+06 -1.11860556e+06 4.60588728e+05 2.30553184e+05 -2.38069137e+05 -3.78586562e+05 7.97110344e+05 7.14702496e+05] [ 1.58011761e+05 7.74905743e+05 2.51899601e+05 3.53142947e+05 4.19723016e+05 1.59588659e+05 3.70985092e+05 3.80573759e+05 2.56324073e+05 6.26083323e+05 8.53098869e+05 5.76998563e+05 5.12837413e+05 5.25581366e+05 5.39360762e+04 -5.92438590e+05 6.21388374e+05 4.78142565e+05 5.05645297e+05 2.49958546e+05 6.57623841e+05 6.57118178e+05] [ 2.71155858e+04 1.03864518e+05 3.24044587e+04 1.64987688e+05 3.63416190e+04 1.64692194e+04 6.97993356e+04 9.84829573e+04 1.97013241e+04 1.35027953e+05 1.32601271e+05 1.32931096e+05 7.35536966e+04 8.11052191e+04 4.57228537e+04 -1.58733715e+05 1.38926971e+05 7.30236917e+04 1.24628704e+05 3.63337733e+04 9.79205687e+04 1.05815163e+05] [ 3.40459798e+02 3.13465318e+02 3.77913168e+02 1.71718085e+02 3.94926351e+02 2.25323034e+02 3.56157362e+02 2.20859588e+02 3.80824132e+02 2.02024170e+02 2.96943869e+02 1.89189091e+02 2.93008799e+02 3.44312476e+02 2.42811621e+02 1.49414497e+02 2.16124959e+02 3.35446541e+02 2.26238356e+02 2.85559024e+02 3.28177540e+02 3.18219519e+02] [ 7.13008239e-01 -9.31493543e-01 4.55051555e-01 -2.71094293e-01 -3.70473412e-01 7.05450901e-01 -4.51073937e-01 8.45017695e-01 -3.20933633e-01 -4.88822732e-01 3.80058208e-01 7.39630493e-01 3.15505139e-01 -9.44886529e-01 -5.81461542e-01 -4.25851036e-01 -4.51797997e-01 8.44669600e-01 6.70332920e-01 3.82777607e-01 -2.63493809e-01 -8.58836995e-01] [ 6.64137940e+03 -1.18662365e+04 9.12366324e+01 4.28025441e+04 -4.74922280e+03 7.41955159e+03 1.03277702e+03 1.05348323e+04 -3.77100001e+03 1.29590890e+04 -5.76193607e+03 1.19002522e+04 -3.39838298e+03 1.18965657e+02 1.63968646e+04 -2.44704484e+04 1.18344170e+04 2.66690744e+03 8.94857621e+03 6.15227936e+03 -4.47547068e+03 1.44946881e+03] [ 3.62575030e+03 -2.69379337e+04 -1.40094234e+04 2.60543070e+04 -2.54871591e+04 -6.87983853e+03 -6.35892008e+03 -2.50053396e+04 -2.42296287e+04 -5.69918230e+03 -2.19847313e+04 -4.94975736e+03 -1.01136066e+04 -1.11522618e+04 -3.18176207e+03 -1.30466918e+05 -1.36371657e+04 -6.01249041e+03 -2.96679088e+04 -1.57333921e+03 -1.67622701e+04 -1.41584128e+04] [ 2.60236129e+05 3.38798085e+05 2.44585300e+05 5.83459700e+05 2.06744922e+05 2.09567318e+05 3.55338344e+05 3.11806220e+05 1.57557151e+05 4.23786413e+05 3.00637652e+05 4.23126244e+05 3.01842268e+05 3.37503088e+05 3.18846850e+05 -6.51608506e+05 4.06012180e+05 3.42378543e+05 3.88948626e+05 2.48355182e+05 3.44796120e+05 3.52367565e+05] [-1.04654302e+06 -1.70063291e+05 -1.24818395e+06 1.04286791e+06 -1.06502099e+06 -1.11029690e+06 -7.57926995e+05 -2.51304249e+05 -1.45413256e+06 6.76031723e+05 -2.02883432e+05 5.73613933e+05 -8.61964186e+05 -5.66062455e+05 -6.25117852e+05 -2.65336990e+06 6.98669751e+05 -5.51421583e+05 2.02011569e+05 -1.04166449e+06 -3.99477468e+05 -2.96844016e+05] [-4.58007807e+06 -2.68439330e+06 -4.94030243e+06 -2.66090494e+06 -4.34869173e+06 -4.52557962e+06 -4.33503786e+06 -3.30635409e+06 -4.92309488e+06 -2.06104275e+06 -2.49408929e+06 -2.19114131e+06 -4.18324304e+06 -3.89436345e+06 -4.22706102e+06 -2.37204223e+06 -1.86679295e+06 -4.00313705e+06 -2.64749514e+06 -4.67084903e+06 -3.25645080e+06 -3.41026469e+06] [-6.12291775e+06 -2.62143395e+06 -6.19762410e+06 -7.07972612e+06 -5.34634425e+06 -6.06150334e+06 -5.70916576e+06 -5.59769833e+06 -5.72952017e+06 -3.96146928e+06 -1.62932209e+06 -4.09331117e+06 -4.53290477e+06 -4.82420587e+06 -7.09842687e+06 -6.29166020e+05 -3.88998719e+06 -5.23732927e+06 -5.19666946e+06 -6.15310182e+06 -3.50709187e+06 -4.29561817e+06] [-5.45257917e+06 7.66958363e+05 -4.92205764e+06 -1.04148232e+07 -3.64761421e+06 -5.84440764e+06 -4.33581217e+06 -6.47382368e+06 -3.80628377e+06 -3.61064321e+06 2.85712195e+06 -3.84611229e+06 -1.45306643e+06 -2.47435326e+06 -9.01619845e+06 8.23711457e+05 -4.02247289e+06 -3.35246733e+06 -6.60998103e+06 -5.19120303e+06 -4.18927596e+05 -1.76248280e+06] [-1.76384253e+06 8.00480193e+06 -3.46992718e+05 -1.07302599e+07 1.37661158e+06 -2.72904166e+06 7.34417965e+05 -4.33985178e+06 1.42512514e+06 9.66482616e+04 1.08583523e+07 -3.06135168e+05 5.52400656e+06 3.73654553e+06 -8.17298736e+06 1.74475785e+06 -1.01924533e+06 2.28053638e+06 -4.98553488e+06 -9.03261436e+05 6.55890287e+06 4.70614635e+06] [ 4.34183772e+06 1.64462773e+07 6.44629327e+06 -7.19808699e+06 8.60508575e+06 2.59476488e+06 7.81900114e+06 1.05676269e+06 8.61377853e+06 6.02210123e+06 1.92887141e+07 5.36029140e+06 1.37096991e+07 1.15873549e+07 -4.01264411e+06 3.27331682e+06 4.33816726e+06 9.74547351e+06 1.61526821e+05 5.43311575e+06 1.47569130e+07 1.28870717e+07] [ 8.26729211e+06 2.12792832e+07 1.03913908e+07 -1.96569042e+06 1.28461924e+07 6.66383472e+06 1.26639315e+07 5.99562148e+06 1.23280242e+07 1.17569006e+07 2.42089664e+07 1.08352295e+07 1.81320181e+07 1.61060912e+07 1.53270022e+04 1.99362525e+06 9.86223124e+06 1.47070008e+07 5.31759347e+06 9.90870197e+06 1.93343128e+07 1.77358921e+07] [ 8.46993554e+06 2.01474324e+07 1.00575592e+07 5.88055423e+06 1.23079671e+07 7.76555861e+06 1.31391685e+07 9.46482744e+06 1.09287443e+07 1.57085190e+07 2.24761191e+07 1.45752919e+07 1.68996418e+07 1.56868687e+07 3.38566990e+06 -4.09649599e+06 1.40655926e+07 1.52404313e+07 9.71293944e+06 1.07042295e+07 1.83602653e+07 1.76982704e+07] [ 7.04508065e+06 1.56736777e+07 7.54937656e+06 1.56515676e+07 8.70257287e+06 7.61831726e+06 1.15569216e+07 1.20168796e+07 6.61274716e+06 1.83827561e+07 1.76303277e+07 1.72649361e+07 1.30330213e+07 1.28573968e+07 6.60500288e+06 -1.33604387e+07 1.72645586e+07 1.35439292e+07 1.31559698e+07 1.00505628e+07 1.45339887e+07 1.52064473e+07] [ 6.32697625e+06 1.06048513e+07 6.26998137e+06 2.23311206e+07 6.31563240e+06 8.15670539e+06 9.98267500e+06 1.37242115e+07 3.99892101e+06 1.83769984e+07 1.19596040e+07 1.75435361e+07 9.26960393e+06 9.95662459e+06 9.87442697e+06 -1.71096908e+07 1.82121285e+07 1.15218030e+07 1.52915604e+07 9.78362883e+06 1.03197626e+07 1.20812424e+07] [ 9.07070401e+06 7.69790269e+06 1.02467111e+07 1.96908732e+07 1.00708982e+07 1.06671369e+07 1.05981158e+07 1.50254227e+07 8.38280266e+06 1.33084333e+07 6.24906130e+06 1.32310085e+07 7.46213070e+06 8.67928144e+06 1.42516791e+07 -5.83368690e+06 1.47544283e+07 1.05639272e+07 1.67428980e+07 1.08344858e+07 7.54627800e+06 9.14406095e+06] [ 1.29698467e+07 7.88511598e+06 1.64468454e+07 9.00006924e+06 1.74944876e+07 1.29853888e+07 1.23972294e+07 1.47962215e+07 1.65479428e+07 5.51466675e+06 2.75214934e+06 6.22291693e+06 7.47936136e+06 8.91753219e+06 1.68902121e+07 1.37300469e+07 8.54605579e+06 1.03832725e+07 1.65513370e+07 1.17960704e+07 6.92040980e+06 7.28697411e+06] [ 1.21444276e+07 8.20351601e+06 1.61123209e+07 -1.02740580e+06 1.80030494e+07 1.05763568e+07 1.11435220e+07 1.05940798e+07 1.76390748e+07 3.01160243e+05 2.81823077e+06 1.12434961e+06 7.03083433e+06 7.98013959e+06 1.30400684e+07 2.06703822e+07 3.14671637e+06 8.73503781e+06 1.19972718e+07 9.46009996e+06 6.72924566e+06 5.96477641e+06] [ 6.34550517e+06 6.12571630e+06 8.53346419e+06 -6.12497681e+06 9.95891169e+06 3.92121522e+06 6.01237469e+06 2.77336726e+06 1.02346769e+07 -1.84136150e+06 3.90473289e+06 -1.41106783e+06 5.03529539e+06 4.95731782e+06 3.74856673e+06 1.25471819e+07 -7.90795794e+05 4.84101791e+06 3.13368886e+06 4.15142116e+06 5.14952373e+06 3.99299842e+06] [-1.44921219e+06 2.80622154e+06 -1.37739874e+06 -1.10201266e+07 -4.27940033e+05 -4.05088316e+06 -1.16814419e+06 -6.43031191e+06 1.65361511e+05 -3.57207059e+06 4.35606369e+06 -3.77496639e+06 1.59869619e+06 5.63901214e+05 -7.66650023e+06 2.46415128e+06 -4.69604808e+06 -6.15354643e+05 -7.28483322e+06 -2.61137776e+06 2.16630604e+06 7.92136129e+05] [-6.03297775e+06 5.98528448e+05 -6.96269834e+06 -1.30404668e+07 -6.44612851e+06 -8.35647211e+06 -5.28252565e+06 -1.10688175e+07 -5.66230743e+06 -4.40494250e+06 4.13133824e+06 -4.91218687e+06 -7.67155462e+05 -2.21071355e+06 -1.34875417e+07 -3.12818157e+06 -6.63837946e+06 -3.99485656e+06 -1.24123403e+07 -6.48510617e+06 4.04089150e+04 -1.43347393e+06] [-6.44406646e+06 6.88157624e+05 -7.17575610e+06 -1.09247583e+07 -6.62514208e+06 -8.17863475e+06 -5.30275685e+06 -9.97696018e+06 -6.26263930e+06 -3.24576023e+06 4.18320299e+06 -3.82272676e+06 -1.08858394e+06 -2.25609382e+06 -1.27243792e+07 -4.56050632e+06 -5.23384984e+06 -3.90879775e+06 -1.08807533e+07 -6.66782461e+06 -1.03011960e+05 -1.30303987e+06] [-5.54981129e+06 2.37917927e+05 -6.07520615e+06 -7.78571785e+06 -5.35997978e+06 -6.56197987e+06 -4.62466267e+06 -7.31155237e+06 -5.39044581e+06 -2.02335187e+06 2.96282620e+06 -2.50574229e+06 -1.58337678e+06 -2.24543669e+06 -9.81189882e+06 -3.73676571e+06 -3.39234283e+06 -3.36077206e+06 -7.73495814e+06 -5.58809035e+06 -5.67708343e+05 -1.33204600e+06] [-3.50439223e+06 9.84108696e+05 -3.43146043e+06 -4.57264939e+06 -2.67743167e+06 -3.84251133e+06 -2.59904316e+06 -3.84250070e+06 -2.88802828e+06 -4.52382404e+05 2.60850551e+06 -8.04972687e+05 -5.73169846e+05 -9.35903716e+05 -5.91193106e+06 -2.00732232e+06 -1.17825970e+06 -1.67749818e+06 -3.85210511e+06 -3.24238585e+06 2.54718748e+05 -2.53616093e+05] [-1.34632239e+06 1.80271638e+06 -1.37220698e+06 -5.64068959e+05 -5.69344380e+05 -1.40638380e+06 -6.78434057e+05 -7.72958762e+05 -1.07374121e+06 1.61109247e+06 2.81512138e+06 1.35702948e+06 4.68371834e+05 4.40996363e+05 -2.37701259e+06 -2.51701268e+06 1.30343405e+06 1.54399795e+05 -4.55398363e+05 -9.82788991e+05 1.21832470e+06 1.14517558e+06] [ 9.07928051e+04 2.00482810e+06 7.49058265e+04 1.03062454e+06 6.01187596e+05 1.03550794e+05 6.12460830e+05 7.06408402e+05 1.57317624e+05 2.08769480e+06 2.63038017e+06 1.94496911e+06 1.14274519e+06 1.19907768e+06 -3.40690842e+05 -2.11358932e+06 1.95917907e+06 1.10062777e+06 1.00512725e+06 4.01694048e+05 1.65437868e+06 1.67628995e+06] [ 4.54649604e+05 1.12428750e+06 5.19178329e+05 9.76690206e+05 6.79771846e+05 4.26018445e+05 7.39170135e+05 7.39205926e+05 5.02636141e+05 1.13488291e+06 1.29155930e+06 1.09769433e+06 8.48443141e+05 8.72383247e+05 3.96758781e+05 -8.11692323e+05 1.11513056e+06 8.46735188e+05 8.88689378e+05 5.73978398e+05 1.02374875e+06 1.03463310e+06] [ 1.20259535e+05 1.45497391e+05 1.18209960e+05 3.09969172e+05 1.36063986e+05 8.82829741e+04 1.44187538e+05 2.00446310e+05 1.09729115e+05 1.95737287e+05 1.69843660e+05 2.00122470e+05 1.20937922e+05 1.43247515e+05 1.64978569e+05 -1.08786502e+05 2.19085547e+05 1.46428445e+05 2.25142607e+05 1.10541996e+05 1.47434139e+05 1.71882056e+05] [ 1.70210976e+04 1.75609435e+04 1.81223950e+04 1.43104068e+04 1.83700105e+04 1.42329176e+04 1.80659184e+04 1.43915564e+04 1.80743044e+04 1.42440404e+04 1.76619782e+04 1.41955458e+04 1.79438120e+04 1.76336408e+04 1.43116825e+04 2.01109552e+03 1.44374939e+04 1.80032069e+04 1.45148372e+04 1.59213356e+04 1.79204633e+04 1.78577050e+04] [ 3.69463282e+03 9.15806467e+03 6.15048360e+03 -8.62600115e+03 7.88961435e+03 4.30647049e+03 7.60500641e+03 4.82494019e+02 8.22884803e+03 -2.52126882e+03 4.91831324e+03 -2.16784499e+03 8.44579695e+03 7.07842183e+03 2.02554945e+02 1.37540197e+04 -2.31037097e+03 5.33955837e+03 -5.28499833e+02 7.03739645e+03 8.82758738e+03 5.95827928e+03] [ 9.78927982e+02 -1.69182350e+03 -5.70586847e+01 5.32113555e+03 -5.75279717e+02 9.47119226e+02 2.09080399e+02 1.15089625e+03 -5.17113903e+02 1.56679781e+03 -8.03100422e+02 1.45520092e+03 -5.82570834e+02 -9.22031304e+01 2.04686109e+03 -3.55452143e+03 1.41458124e+03 4.00136481e+02 9.16249770e+02 8.50276095e+02 -6.81609726e+02 1.40925313e+02] [-2.56597422e+04 -4.60264051e+04 -5.28686278e+04 2.58544225e+04 -6.87582762e+04 -3.46858084e+04 -2.98611849e+04 -5.14559146e+04 -6.79415354e+04 -4.69612190e+03 -3.09116187e+04 -4.65503758e+03 -2.79156571e+04 -3.22994173e+04 -3.23802552e+04 -1.91801479e+05 -1.94575462e+04 -2.81828851e+04 -5.27600767e+04 -2.74806738e+04 -3.47847171e+04 -3.22226032e+04] [ 2.27985477e+05 4.30426754e+05 1.55017480e+05 9.89085730e+05 1.07922007e+05 1.94485035e+05 4.14252254e+05 4.35476356e+05 1.28544914e+04 7.46361450e+05 4.17899077e+05 7.38257687e+05 3.28999797e+05 4.08790504e+05 3.85275465e+05 -1.42276351e+06 6.90441417e+05 4.21463556e+05 5.80143401e+05 2.56650181e+05 4.38807495e+05 4.73666042e+05] [-1.10022502e+06 2.63493788e+05 -1.30596224e+06 2.16308495e+06 -1.04138812e+06 -1.06541434e+06 -4.68604537e+05 2.26941027e+05 -1.63388662e+06 1.66836530e+06 2.74903815e+05 1.53489053e+06 -6.97614811e+05 -2.61718797e+05 -3.69770159e+05 -4.63309983e+06 1.64895092e+06 -2.04932358e+05 9.32426701e+05 -1.00658919e+06 -3.46795114e+04 1.43993900e+05] [-4.96766030e+06 -1.96580036e+06 -5.44805483e+06 -4.79211382e+05 -4.76536353e+06 -4.73346433e+06 -4.11899711e+06 -2.65018887e+06 -5.68273395e+06 -1.77578344e+05 -1.68268328e+06 -3.69408892e+05 -4.05561776e+06 -3.52671898e+06 -4.00528979e+06 -6.40971152e+06 -1.06728186e+05 -3.61797345e+06 -1.45721655e+06 -4.86865000e+06 -2.71508069e+06 -2.72181867e+06] [-8.17716731e+06 -2.54902152e+06 -8.67392884e+06 -5.29674551e+06 -7.79506884e+06 -7.60483999e+06 -6.85538983e+06 -6.11090799e+06 -8.61690030e+06 -2.12975933e+06 -1.21335174e+06 -2.36586158e+06 -5.35223117e+06 -5.42192464e+06 -8.39804024e+06 -7.04311528e+06 -2.46237842e+06 -6.02957988e+06 -5.07906759e+06 -7.72668568e+06 -3.67804918e+06 -4.32739470e+06] [-9.53710627e+06 -2.44833705e+05 -9.86530803e+06 -9.92927483e+06 -8.64642440e+06 -8.97643801e+06 -7.32850501e+06 -8.49151922e+06 -9.28269057e+06 -2.31225590e+06 3.06507844e+06 -2.71983147e+06 -3.43680205e+06 -4.44540273e+06 -1.24827432e+07 -7.05739074e+06 -3.43653444e+06 -5.72680810e+06 -8.13500735e+06 -8.40291175e+06 -1.66801668e+06 -2.90298289e+06] [-6.42034202e+06 7.39679574e+06 -6.17144348e+06 -1.02593226e+07 -4.49031348e+06 -6.37861152e+06 -2.83999916e+06 -6.74645721e+06 -5.06727381e+06 2.19060906e+06 1.21915886e+07 1.38393450e+06 3.68003259e+06 1.72388864e+06 -1.27560359e+07 -7.44748615e+06 1.82452611e+04 -1.50173998e+05 -7.05251157e+06 -4.49925194e+06 5.50887132e+06 3.88259780e+06] [-2.76618813e+05 1.66301832e+07 4.47857232e+05 -6.24416420e+06 2.44297929e+06 -9.20399293e+05 4.56567727e+06 -1.63408494e+06 1.92065211e+06 9.46287639e+06 2.23688918e+07 8.24863549e+06 1.27599992e+07 1.01072436e+07 -9.30999860e+06 -7.69882176e+06 6.31801969e+06 8.02069166e+06 -2.39863009e+06 2.21856836e+06 1.44413245e+07 1.27187882e+07] [ 2.48455088e+06 2.06448484e+07 3.19454499e+06 -1.18497875e+06 5.58901733e+06 2.19152491e+06 8.27899727e+06 2.43742541e+06 4.46275681e+06 1.49939038e+07 2.66365957e+07 1.33860996e+07 1.62540539e+07 1.37215655e+07 -6.38292621e+06 -1.05633261e+07 1.15895304e+07 1.22048517e+07 1.97674972e+06 5.87011863e+06 1.81971604e+07 1.68803281e+07] [ 3.88384457e+06 1.90960930e+07 4.42834994e+06 7.01069515e+06 6.40596511e+06 4.15441859e+06 9.48244333e+06 6.62439245e+06 4.62480067e+06 1.78729248e+07 2.38081688e+07 1.62049815e+07 1.49577800e+07 1.35123918e+07 -1.49145126e+06 -1.42882409e+07 1.52209166e+07 1.29719754e+07 7.04341789e+06 7.40309473e+06 1.69718870e+07 1.66908555e+07] [ 4.64255494e+06 1.48184610e+07 4.83062609e+06 1.53186587e+07 5.85236738e+06 5.85307801e+06 9.43302216e+06 9.91346367e+06 3.67834370e+06 1.87390357e+07 1.82578740e+07 1.73840152e+07 1.19748758e+07 1.15487712e+07 3.71216834e+06 -1.74997644e+07 1.72649471e+07 1.21239059e+07 1.10325323e+07 8.31723357e+06 1.35463165e+07 1.43557941e+07] [ 5.55142236e+06 8.88356968e+06 5.74423763e+06 1.97575184e+07 5.83325203e+06 7.50000475e+06 8.38974071e+06 1.20241868e+07 3.70618343e+06 1.60880913e+07 1.03316634e+07 1.53942604e+07 7.77127092e+06 8.38345786e+06 8.83204555e+06 -1.45747460e+07 1.62767171e+07 9.95028705e+06 1.35690648e+07 8.64861436e+06 8.52034590e+06 1.01993500e+07] [ 8.14925652e+06 5.40759779e+06 9.69058296e+06 1.43954059e+07 9.63559631e+06 9.48732070e+06 8.53767586e+06 1.23583230e+07 8.57333161e+06 8.98272310e+06 3.72234800e+06 9.20538221e+06 5.61363889e+06 6.53605385e+06 1.26555591e+07 -2.23792715e+04 1.08636453e+07 8.15346245e+06 1.37167771e+07 9.21084450e+06 5.19090968e+06 6.37330238e+06] [ 8.54554179e+06 5.16699660e+06 1.12786154e+07 3.83784567e+06 1.24109250e+07 8.67100088e+06 7.83876385e+06 9.66458432e+06 1.18124057e+07 2.52224935e+06 1.40431499e+06 3.09541351e+06 4.69004595e+06 5.52818110e+06 1.11662136e+07 1.21095030e+07 4.89574497e+06 6.48340702e+06 1.09196237e+07 7.64147368e+06 4.23126067e+06 4.26714673e+06] [ 5.79631875e+06 6.14654403e+06 8.22616027e+06 -3.88858366e+06 9.75925115e+06 4.71115009e+06 5.75197850e+06 4.41452582e+06 9.62350115e+06 -1.20737806e+05 3.72193116e+06 3.16063959e+05 4.70968090e+06 4.81436564e+06 4.89562654e+06 1.11143554e+07 1.12873374e+06 4.85557807e+06 5.27220060e+06 4.40301135e+06 4.90627763e+06 3.95474853e+06] [-5.67320162e+04 5.06539999e+06 8.52916962e+05 -9.57668356e+06 2.19197493e+06 -1.81193537e+06 1.00748583e+06 -3.29512235e+06 2.54114152e+06 -1.69924464e+06 6.06038834e+06 -1.73187268e+06 3.39935216e+06 2.39278535e+06 -4.95358427e+06 3.42646688e+06 -2.35897537e+06 1.45724301e+06 -3.50814155e+06 -6.75736897e+05 4.10955000e+06 2.67421378e+06] [-5.24302426e+06 3.19306077e+06 -5.67785995e+06 -1.32895451e+07 -4.60597096e+06 -7.23438983e+06 -3.69720425e+06 -9.49948998e+06 -4.10940291e+06 -2.82790283e+06 6.57198820e+06 -3.31844376e+06 1.14493645e+06 -4.40220473e+05 -1.27461812e+07 -3.50681227e+06 -4.97503715e+06 -2.14008929e+06 -1.04117340e+07 -5.20742212e+06 2.28162976e+06 6.40602897e+05] [-7.22570906e+06 1.44647023e+06 -7.98439246e+06 -1.33139315e+07 -6.97693081e+06 -9.07784537e+06 -5.66504617e+06 -1.11192697e+07 -6.67381694e+06 -3.33249833e+06 5.07983752e+06 -4.01572093e+06 -7.82523119e+05 -2.19763017e+06 -1.45580067e+07 -5.53686716e+06 -5.70354160e+06 -4.02643518e+06 -1.20000669e+07 -7.14677810e+06 4.62809747e+05 -1.07688821e+06] [-5.96297979e+06 2.01999482e+06 -6.25816239e+06 -9.78631777e+06 -5.18615121e+06 -7.29930144e+06 -4.34276135e+06 -8.21059945e+06 -5.35930384e+06 -1.50700701e+06 4.91107279e+06 -2.16806364e+06 -5.20931519e+05 -1.41857349e+06 -1.15173308e+07 -5.41816673e+06 -3.32564608e+06 -2.91235598e+06 -8.41287805e+06 -5.91739777e+06 7.75469035e+05 -3.76399103e+05] [-4.06290678e+06 2.65133757e+06 -4.27398711e+06 -4.61653529e+06 -3.03561129e+06 -4.79054196e+06 -2.54246091e+06 -4.58848784e+06 -3.79453635e+06 1.20029424e+06 4.83381948e+06 5.57202144e+05 -1.63254109e+04 -2.90525015e+05 -7.53999102e+06 -6.25650920e+06 -3.48285734e+04 -1.12762997e+06 -4.23726966e+06 -3.83826442e+06 1.46313506e+06 8.43148756e+05] [-1.62432635e+06 3.24980401e+06 -1.58793214e+06 -8.21632871e+05 -4.81124313e+05 -1.88468148e+06 -3.67223578e+05 -1.15285906e+06 -1.34237341e+06 2.74715718e+06 4.57906589e+06 2.30331120e+06 1.12711109e+06 1.12191889e+06 -3.37784752e+06 -4.78827378e+06 2.12811998e+06 7.12428695e+05 -6.16455578e+05 -1.23674020e+06 2.33023012e+06 2.05684174e+06] [-4.29824989e+05 2.95868769e+06 -4.76811953e+05 1.88276769e+06 4.13035624e+05 -3.43051716e+05 5.31928361e+05 8.23714639e+05 -3.93336578e+05 3.62104349e+06 3.88535577e+06 3.34662401e+06 1.42286382e+06 1.60462662e+06 -9.34264973e+05 -4.54242627e+06 3.30789919e+06 1.41573627e+06 1.46083097e+06 1.20876312e+05 2.36562642e+06 2.43544971e+06] [ 3.05985939e+05 2.17820312e+06 2.47301365e+05 2.02075747e+06 7.78830042e+05 4.33340936e+05 9.17524313e+05 1.21621069e+06 2.19246907e+05 2.77520461e+06 2.75292455e+06 2.62605687e+06 1.27851339e+06 1.41115686e+06 1.85786600e+05 -2.93410760e+06 2.66390536e+06 1.39303251e+06 1.66086203e+06 7.38313957e+05 1.84609249e+06 1.92074220e+06] [ 5.10206577e+05 9.33345523e+05 5.54438974e+05 9.41326230e+05 6.68637358e+05 5.00260433e+05 7.30009812e+05 7.06376537e+05 5.29541628e+05 1.04584341e+06 1.03409288e+06 1.02915962e+06 7.29428066e+05 7.76408707e+05 5.15099551e+05 -7.63327720e+05 1.03206197e+06 8.05292914e+05 8.54433187e+05 5.94243503e+05 8.63290980e+05 8.74784948e+05] [ 2.30881562e+04 2.30643543e+04 2.31376579e+04 6.66722676e+04 2.79292977e+04 2.48746053e+04 2.97870261e+04 4.89800516e+04 1.91618377e+04 3.95252713e+04 2.24038494e+04 4.23669291e+04 1.89518937e+04 2.47677536e+04 4.46299519e+04 -4.02953427e+04 4.55130041e+04 2.67341186e+04 5.72056774e+04 2.30645356e+04 2.44762873e+04 2.84650373e+04] [-1.26440408e+04 -1.23860956e+04 -1.30605921e+04 -8.89253755e+03 -1.38697706e+04 -1.01939989e+04 -1.25329912e+04 -9.38895291e+03 -1.36848795e+04 -9.20601241e+03 -1.16722345e+04 -9.32285375e+03 -1.19435969e+04 -1.26122885e+04 -9.88226104e+03 -1.30139320e+04 -9.39592356e+03 -1.20915283e+04 -9.53590122e+03 -1.03539746e+04 -1.22729148e+04 -1.21640782e+04] [-7.86226942e-01 -3.11444516e-01 -5.95948067e-01 3.02085817e-01 -2.66853889e-03 4.13691036e-02 5.81726971e-02 -5.31873951e-01 -4.82134181e-01 3.57813355e-01 -5.37921880e-01 -7.85730324e-01 8.27641195e-02 -9.18201655e-02 -4.02846872e-01 7.51030929e-01 3.54063573e-01 -4.62385691e-01 5.61019647e-01 9.15034767e-01 -9.29768894e-01 -5.22425157e-01] [ 9.15841137e-02 -9.45430148e-01 6.57554623e-01 2.12369659e-01 -3.97863695e-01 -8.56142372e-02 -6.55890659e-01 5.27079149e-01 9.35581131e-01 -8.85176951e-01 -4.45768582e-01 1.29505750e-01 7.51332924e-01 -9.48945568e-01 -2.10212775e-01 -2.03126617e-01 -8.60731061e-01 -2.80522496e-02 6.48005884e-01 -7.22334182e-01 4.19468912e-01 8.05783238e-01] [ 2.31634119e+04 -1.49684051e+04 -2.63238761e+03 6.11873605e+04 -2.22054915e+04 6.49342744e+03 1.58397403e+04 -1.50063591e+04 -1.79235031e+04 2.01137474e+04 -2.10614221e+02 2.30650379e+04 7.31713371e+03 7.57345199e+03 1.36569739e+04 -1.76720602e+05 7.20396608e+03 1.30510626e+04 -1.45349434e+04 1.32573305e+04 -2.68124393e+02 4.10674456e+03] [ 2.87645640e+05 6.00196172e+05 2.23362533e+05 1.43470163e+06 2.26766035e+05 2.72241841e+05 5.62529061e+05 6.79774850e+05 7.01203815e+04 1.13746571e+06 5.56637674e+05 1.11941008e+06 3.86005209e+05 5.48745862e+05 5.67268152e+05 -1.94651603e+06 1.08015561e+06 5.91652670e+05 9.22253677e+05 3.33545343e+05 5.82546437e+05 6.44380802e+05] [-6.66771110e+05 8.55950342e+05 -8.31401051e+05 3.23852930e+06 -4.33415962e+05 -6.67509203e+05 1.49485426e+05 9.39820783e+05 -1.23890055e+06 2.62920412e+06 7.85473769e+05 2.45698556e+06 -3.53260479e+05 2.46223584e+05 2.37304579e+05 -5.78913661e+06 2.62690084e+06 4.18536867e+05 1.86583740e+06 -6.04182115e+05 4.77366018e+05 7.19830089e+05] [-5.18264034e+06 -1.78679350e+06 -5.75390328e+06 6.83231448e+05 -5.11244538e+06 -4.86161529e+06 -4.04960105e+06 -2.41242221e+06 -6.22128776e+06 7.69164845e+05 -1.46635729e+06 5.45948137e+05 -4.10096769e+06 -3.45634588e+06 -3.89925604e+06 -8.95998762e+06 7.04151933e+05 -3.53935366e+06 -9.79218732e+05 -5.01419394e+06 -2.58927798e+06 -2.49174158e+06] [-9.61165231e+06 -3.41800006e+06 -1.03759817e+07 -3.79349641e+06 -9.57027679e+06 -8.86744387e+06 -7.95171583e+06 -6.18647884e+06 -1.07778750e+07 -1.52057571e+06 -2.35796752e+06 -1.75791788e+06 -6.80920309e+06 -6.46564822e+06 -8.71258300e+06 -1.11674567e+07 -1.89632343e+06 -7.15158726e+06 -4.54177122e+06 -9.22573871e+06 -4.69059568e+06 -5.06051884e+06] [-1.27760323e+07 -2.20327204e+06 -1.38069421e+07 -7.97815487e+06 -1.25231467e+07 -1.17282463e+07 -1.01598707e+07 -9.30479224e+06 -1.40348742e+07 -1.51674020e+06 6.34133246e+05 -2.11282628e+06 -6.60887347e+06 -6.95245096e+06 -1.38587297e+07 -1.38412372e+07 -2.79164682e+06 -8.27874140e+06 -7.92341431e+06 -1.16622239e+07 -3.95939526e+06 -4.74816316e+06] [-1.07995769e+07 4.62430097e+06 -1.16217863e+07 -9.38835582e+06 -9.89385965e+06 -1.01081955e+07 -6.89498350e+06 -8.86828627e+06 -1.12838970e+07 2.56380448e+06 9.60557934e+06 1.49013363e+06 -2.36624751e+05 -1.74818610e+06 -1.57357631e+07 -1.48436181e+07 -1.85203165e+04 -3.60634809e+06 -8.33673216e+06 -8.70862723e+06 2.40402094e+06 1.18604005e+06] [-6.24278260e+06 1.33273596e+07 -6.68888197e+06 -7.24793595e+06 -4.70602576e+06 -5.89997850e+06 -8.12360723e+05 -5.60087109e+06 -5.82406465e+06 9.25727427e+06 2.02868706e+07 7.71666256e+06 8.32377537e+06 5.70334149e+06 -1.46045762e+07 -1.58280875e+07 5.40803616e+06 3.61991164e+06 -5.78261677e+06 -2.98639436e+06 1.07744306e+07 9.25555843e+06] [-2.84191691e+06 1.77865948e+07 -2.93708230e+06 -2.04803366e+06 -9.51973985e+05 -2.16265326e+06 3.72080230e+06 -1.65359931e+06 -2.22676026e+06 1.45927725e+07 2.51519605e+07 1.26806849e+07 1.28777669e+07 1.01743208e+07 -1.15602384e+07 -1.87042972e+07 1.03353410e+07 8.45908211e+06 -1.85669332e+06 1.42257660e+06 1.52747769e+07 1.39847062e+07] [-8.69419731e+04 1.67026734e+07 -7.93864795e+04 4.97964206e+06 1.41761444e+06 7.77392611e+05 5.91521498e+06 2.26856208e+06 -8.59093169e+04 1.65993702e+07 2.29072227e+07 1.47801771e+07 1.27005018e+07 1.08771075e+07 -6.28125321e+06 -1.94057769e+07 1.32071664e+07 1.00003842e+07 2.48726429e+06 4.11773016e+06 1.47901896e+07 1.42936239e+07] [ 2.97800782e+06 1.28918487e+07 3.15929281e+06 1.14148816e+07 3.70816671e+06 4.26052365e+06 7.30849195e+06 6.28212219e+06 2.19389176e+06 1.58948137e+07 1.71694031e+07 1.46450957e+07 1.06092354e+07 9.81049973e+06 6.11550421e+05 -1.67131229e+07 1.41218888e+07 1.00531407e+07 6.91704125e+06 6.51935868e+06 1.17984056e+07 1.21726815e+07] [ 4.14747439e+06 6.27293082e+06 4.48099811e+06 1.23041752e+07 4.12457337e+06 5.75426240e+06 5.89134302e+06 7.38426329e+06 3.29489174e+06 1.06817156e+07 8.30854217e+06 1.02789873e+07 6.28795930e+06 6.16352187e+06 5.26951326e+06 -8.89667183e+06 1.06845928e+07 7.07861938e+06 7.84485193e+06 6.64423917e+06 6.17963746e+06 7.11205140e+06] [ 4.14854319e+06 2.42415838e+06 5.33126278e+06 5.40457877e+06 5.38565832e+06 5.24020492e+06 4.02283324e+06 5.80302281e+06 5.22667236e+06 3.68687034e+06 1.94128805e+06 3.92942392e+06 2.94462719e+06 3.10441457e+06 5.93119165e+06 2.53383752e+06 4.85196104e+06 3.94869101e+06 6.14789374e+06 5.04491916e+06 2.26386448e+06 2.70225026e+06] [ 2.37280436e+06 2.52550031e+06 3.85007181e+06 -3.87760456e+06 4.96053433e+06 2.48610244e+06 2.00564569e+06 1.88733448e+06 4.97828726e+06 -7.72545414e+05 1.65510945e+06 -4.93151512e+05 1.88503625e+06 1.70553365e+06 1.69529745e+06 7.91855578e+06 1.53662862e+05 1.90371367e+06 2.08597198e+06 2.20757324e+06 1.64954331e+06 1.19140599e+06] [-3.61632920e+05 4.26030166e+06 5.16347120e+05 -1.03513129e+07 1.75580657e+06 -1.23914080e+06 3.50298190e+05 -3.28463463e+06 2.23673280e+06 -2.08258433e+06 5.13577105e+06 -2.03743396e+06 2.97915611e+06 1.84624417e+06 -4.92348567e+06 5.19624627e+06 -2.72585207e+06 9.43592848e+05 -3.71182218e+06 -4.99713871e+05 3.26688436e+06 1.89778027e+06] [-4.64932723e+06 3.67603953e+06 -4.70178031e+06 -1.42738278e+07 -3.65082254e+06 -6.07017884e+06 -2.85484458e+06 -8.63399561e+06 -2.95075152e+06 -3.26076051e+06 6.77869466e+06 -3.57556090e+06 2.00185308e+06 1.45670645e+05 -1.18507868e+07 -9.63102348e+05 -5.24871672e+06 -1.56584201e+06 -9.63463711e+06 -4.33051769e+06 2.79244243e+06 1.02812209e+06] [-7.14157350e+06 2.95896184e+06 -7.59671078e+06 -1.44284968e+07 -6.23987802e+06 -8.71539470e+06 -4.93893353e+06 -1.06413408e+07 -6.18049788e+06 -2.80245184e+06 6.56041322e+06 -3.49662516e+06 1.96467991e+05 -1.36427233e+06 -1.47754649e+07 -5.65261638e+06 -5.17959791e+06 -3.14961429e+06 -1.12823219e+07 -6.73806245e+06 1.64348934e+06 -6.72893310e+03] [-6.91286236e+06 2.38481098e+06 -7.30936255e+06 -1.12596028e+07 -5.75795463e+06 -8.24133346e+06 -4.88477425e+06 -8.88396931e+06 -6.32515082e+06 -1.45186581e+06 5.31248307e+06 -2.16791694e+06 -9.27384226e+05 -1.84262797e+06 -1.28584497e+07 -7.00415714e+06 -3.38973536e+06 -3.27987951e+06 -8.79421441e+06 -6.62717477e+06 8.21638955e+05 -4.42597494e+05] [-4.92254492e+06 3.25290006e+06 -5.08573448e+06 -5.80974128e+06 -3.46508699e+06 -5.73500148e+06 -2.94559789e+06 -5.16283767e+06 -4.58477430e+06 1.40354782e+06 5.17009865e+06 6.98462254e+05 -2.48066308e+05 -5.17600293e+05 -8.67977842e+06 -8.24038637e+06 3.88694639e+04 -1.44552664e+06 -4.33471834e+06 -4.58712875e+06 1.62941387e+06 8.60605832e+05] [-3.08311060e+06 3.45290675e+06 -3.22838920e+06 -1.00487921e+06 -1.87022331e+06 -3.15070638e+06 -1.20145308e+06 -2.00032734e+06 -3.08556079e+06 3.53970838e+06 4.94296117e+06 2.98106777e+06 5.67163998e+05 5.84827935e+05 -4.86479596e+06 -8.56518675e+06 2.62857050e+06 1.34314198e+05 -9.50221610e+05 -2.34286634e+06 2.23633683e+06 1.84485599e+06] [-5.17567573e+05 3.83327174e+06 -5.44338337e+05 2.23658756e+06 6.26277610e+05 -5.99462227e+05 7.94555025e+05 9.21563778e+05 -5.23134971e+05 4.46184799e+06 4.72197266e+06 4.06706424e+06 1.64970568e+06 1.94991693e+06 -1.16405154e+06 -6.17595641e+06 4.06846875e+06 1.81729235e+06 1.90949553e+06 -4.58119559e+04 2.96679820e+06 2.96129191e+06] [ 9.21416998e+04 3.06352825e+06 3.24465415e+04 3.24979165e+06 9.34699859e+05 2.12474850e+05 1.05190173e+06 1.78526914e+06 -8.98462777e+04 4.28091637e+06 3.68811439e+06 4.00287507e+06 1.39978163e+06 1.79509371e+06 1.70543385e+05 -5.09956715e+06 4.14277898e+06 1.84977240e+06 2.69261445e+06 5.90019229e+05 2.43253194e+06 2.64148301e+06] [ 4.84917099e+05 1.97007880e+06 4.50614241e+05 2.14485527e+06 8.99203595e+05 5.55987018e+05 1.05276372e+06 1.36296028e+06 3.33173032e+05 2.63434706e+06 2.37457966e+06 2.49755955e+06 1.14379567e+06 1.32570904e+06 5.17504816e+05 -2.73030458e+06 2.58475234e+06 1.41232224e+06 1.83723923e+06 8.05262442e+05 1.66015588e+06 1.76989850e+06] [ 3.76884039e+05 6.34648450e+05 4.18431756e+05 6.91060227e+05 4.62658825e+05 3.65908135e+05 5.31800775e+05 5.30388092e+05 3.80087273e+05 7.18583747e+05 6.79494334e+05 7.14758007e+05 5.06904315e+05 5.44502991e+05 4.20726676e+05 -4.89965201e+05 7.24330984e+05 5.61032274e+05 6.55853059e+05 4.27963639e+05 5.92595606e+05 6.02159762e+05] [ 2.75041294e+03 3.08052895e+04 9.48108689e+03 4.00223122e+04 5.34306795e+03 7.86743371e+03 2.33688971e+04 3.41526504e+04 6.02381421e+02 3.41230329e+04 2.55118933e+04 3.52393322e+04 2.05105642e+04 2.39475088e+04 2.11985256e+04 -5.11041511e+04 3.53883787e+04 1.79131124e+04 4.94697058e+04 7.21056753e+03 2.81273206e+04 2.67884905e+04] [-1.08730087e+04 -1.08704968e+04 -1.13603263e+04 -7.91793465e+03 -1.19532339e+04 -8.85919518e+03 -1.09912930e+04 -8.20880057e+03 -1.17662331e+04 -8.13559587e+03 -1.04468062e+04 -8.23922379e+03 -1.05553818e+04 -1.10418328e+04 -8.54377423e+03 -9.85542163e+03 -8.25182280e+03 -1.06804305e+04 -8.35642653e+03 -9.14772343e+03 -1.08234152e+04 -1.07209980e+04] [-3.14551726e-01 6.02967900e-01 -3.09902558e-01 -9.97615735e-01 7.54103528e-03 -8.19601033e-01 3.11828299e-01 8.98123154e-01 6.98214935e-01 -8.66436465e-01 -5.50020238e-01 3.64491293e-01 -6.31352974e-01 -6.38957247e-01 4.11289356e-01 -7.90000805e-01 -9.28265804e-01 5.52657546e-01 -6.43696225e-03 -8.25658122e-01 7.54033185e-01 9.88830009e-01] [ 1.87249831e+03 2.08550533e+03 3.76268760e+03 8.61744333e+03 6.73216349e+03 8.61897013e+02 4.06864232e+03 5.23842997e+03 2.71132333e+03 6.60516664e+03 1.35213866e+02 5.37129336e+03 -3.56773288e+03 -9.03939928e+01 5.11016749e+03 -1.09346417e+04 8.64426093e+03 3.58724248e+03 8.69792162e+03 -6.08548605e+02 -1.22684139e+03 -1.00909889e+02] [ 2.45981631e+04 7.56171147e+03 2.92983799e+04 1.36615931e+05 5.46394504e+04 3.02220978e+03 5.12181510e+04 5.43577455e+04 3.59347428e+03 9.06063926e+04 -2.06123287e+04 7.82793662e+04 -5.23418987e+04 -5.78198381e+03 6.57656395e+04 -2.53992719e+05 1.06200826e+05 3.78507404e+04 1.02951312e+05 -1.13121394e+04 -2.16005446e+04 -4.61195910e+03] [ 2.08616595e+05 5.88709748e+05 1.90131289e+05 1.51682647e+06 2.37104408e+05 2.20349242e+05 5.32149249e+05 7.25814622e+05 2.13977121e+04 1.24770827e+06 5.19282264e+05 1.21320575e+06 2.67620767e+05 4.76165274e+05 5.84675265e+05 -2.26343142e+06 1.21037425e+06 5.67853950e+05 1.06228605e+06 2.53286934e+05 5.12090878e+05 5.80673455e+05] [-3.15747295e+05 7.67266320e+05 -5.30218688e+05 3.29013057e+06 -1.87483580e+05 -3.50267848e+05 3.56381197e+05 1.09130903e+06 -9.59048027e+05 2.57063527e+06 6.42258040e+05 2.41315741e+06 -2.89309521e+05 2.71906333e+05 5.79522152e+05 -5.66994053e+06 2.57335119e+06 5.48290236e+05 1.97186778e+06 -2.96074642e+05 4.66611838e+05 6.91289234e+05] [-3.95778528e+06 -1.29139650e+06 -4.63375953e+06 2.11487610e+06 -4.01124926e+06 -3.79590932e+06 -2.99545246e+06 -1.43097834e+06 -5.26500112e+06 1.67178399e+06 -1.18169490e+06 1.41747576e+06 -3.43069635e+06 -2.64922655e+06 -2.60700779e+06 -9.67299796e+06 1.58035276e+06 -2.51106213e+06 -7.16249599e+02 -3.91034494e+06 -1.96631520e+06 -1.72156635e+06] [-1.00472739e+07 -4.23245419e+06 -1.12644663e+07 -1.42329217e+06 -1.03045380e+07 -9.29280076e+06 -8.54329493e+06 -5.70324206e+06 -1.21237927e+07 -3.12792910e+05 -3.56766328e+06 -7.13544707e+05 -8.03134092e+06 -7.14550151e+06 -8.31070295e+06 -1.45544296e+07 -7.34952304e+05 -7.54472306e+06 -3.61589743e+06 -9.79257498e+06 -5.55750532e+06 -5.44881826e+06] [-1.45108131e+07 -4.63631202e+06 -1.62786480e+07 -5.57180534e+06 -1.50008520e+07 -1.32612881e+07 -1.21288543e+07 -9.53883426e+06 -1.71332686e+07 -1.07858014e+06 -2.50162628e+06 -1.74738286e+06 -9.55676756e+06 -9.20415027e+06 -1.38563765e+07 -1.82663897e+07 -2.30092899e+06 -1.01903730e+07 -7.42679429e+06 -1.36281922e+07 -6.44184350e+06 -6.72137148e+06] [-1.50369929e+07 -5.18357320e+05 -1.68430714e+07 -7.59434728e+06 -1.51466243e+07 -1.36875230e+07 -1.13859293e+07 -1.06469983e+07 -1.72898372e+07 1.38050923e+06 3.59144359e+06 3.55087884e+05 -6.13034400e+06 -6.75644156e+06 -1.70395817e+07 -2.07905764e+07 -8.99044315e+05 -8.17943168e+06 -8.97666629e+06 -1.31000949e+07 -2.77681382e+06 -3.49557202e+06] [-1.32763591e+07 5.45639282e+06 -1.49081718e+07 -7.67777079e+06 -1.29349815e+07 -1.19305803e+07 -8.37848035e+06 -9.85648854e+06 -1.48855794e+07 5.90193297e+06 1.17570191e+07 4.34873862e+06 -4.68831485e+05 -2.15003793e+06 -1.85793167e+07 -2.23751825e+07 2.30910761e+06 -3.85756708e+06 -8.91072915e+06 -1.01656129e+07 2.79246574e+06 1.81130676e+06] [-9.95338722e+06 9.45007357e+06 -1.11749070e+07 -5.50696601e+06 -9.65060582e+06 -8.46383138e+06 -4.40060627e+06 -7.71987502e+06 -1.09376421e+07 9.11093568e+06 1.68837158e+07 7.35283177e+06 4.51230687e+06 2.14169711e+06 -1.69323674e+07 -2.22527069e+07 4.88886288e+06 4.45983554e+05 -7.52468355e+06 -5.85850640e+06 7.12504393e+06 6.07339472e+06] [-6.74255151e+06 9.90785003e+06 -7.20507718e+06 -2.60341779e+06 -6.22453814e+06 -5.26310236e+06 -1.55776457e+06 -5.13768276e+06 -7.01478941e+06 9.66377308e+06 1.68398087e+07 8.09926189e+06 6.30995704e+06 4.10314868e+06 -1.32237585e+07 -1.89867513e+07 5.93446667e+06 2.60130529e+06 -5.30046489e+06 -2.61096663e+06 8.17405863e+06 7.31169779e+06] [-2.87049269e+06 6.93050542e+06 -2.55960311e+06 -1.80004832e+05 -2.54264889e+06 -1.34321051e+06 4.81508162e+05 -2.17593122e+06 -2.57985388e+06 7.03175529e+06 1.18470977e+07 6.14497420e+06 5.72887065e+06 4.02331239e+06 -7.11441010e+06 -1.12235406e+07 4.73977046e+06 3.07180197e+06 -2.71034690e+06 4.17071970e+05 6.22449399e+06 5.68938940e+06] [-1.75807239e+06 2.53257744e+06 -1.23774866e+06 -1.76980495e+06 -1.38704595e+06 -2.85690267e+05 -5.13955102e+05 -2.00675290e+06 -9.02094080e+05 2.47450786e+06 5.65852493e+06 2.10079749e+06 2.86411240e+06 1.56900862e+06 -4.42126759e+06 -3.52182703e+06 1.41313324e+06 1.05725672e+06 -2.94553452e+06 6.04730913e+05 2.35785518e+06 2.06569620e+06] [-2.90905250e+06 -3.82910891e+05 -2.17586123e+06 -8.83345750e+06 -1.54053780e+06 -2.11803068e+06 -2.98741613e+06 -4.65558058e+06 -7.35143302e+05 -3.11721150e+06 1.56762480e+06 -3.19156713e+06 -6.19005773e+04 -1.24355789e+06 -6.12813530e+06 4.37004249e+06 -3.65967034e+06 -1.92244443e+06 -5.91244377e+06 -1.66875148e+06 -7.42887862e+05 -1.41339772e+06] [-6.01703351e+06 -1.57995521e+06 -5.94333074e+06 -1.53283697e+07 -4.69639582e+06 -5.68791404e+06 -6.19072035e+06 -8.87123942e+06 -3.95687527e+06 -6.12981386e+06 6.57217068e+05 -6.32677528e+06 -2.34983502e+06 -3.82174180e+06 -1.09795431e+07 4.51728245e+06 -7.20307826e+06 -4.78852963e+06 -1.01828393e+07 -5.15399382e+06 -2.44999688e+06 -3.62900090e+06] [-6.94934330e+06 -7.83838541e+04 -7.35888588e+06 -1.76990949e+07 -6.13153746e+06 -7.29990854e+06 -6.16677317e+06 -1.10709767e+07 -5.42519219e+06 -6.19548118e+06 2.79371693e+06 -6.57058820e+06 -1.37896335e+06 -3.27084193e+06 -1.37682520e+07 1.68964466e+06 -8.14761865e+06 -4.66658063e+06 -1.24694980e+07 -6.29449683e+06 -1.04625229e+06 -2.71906373e+06] [-8.53949200e+06 1.26013644e+05 -9.36859304e+06 -1.68950229e+07 -8.08756991e+06 -9.26997260e+06 -6.92483224e+06 -1.21556644e+07 -7.95174754e+06 -5.18834057e+06 3.41795299e+06 -5.78723592e+06 -2.19682115e+06 -3.91436102e+06 -1.55233200e+07 -3.78651227e+06 -7.52558056e+06 -5.32247562e+06 -1.30186372e+07 -7.99190339e+06 -1.16733521e+06 -2.88377122e+06] [-7.71904369e+06 1.67579009e+06 -8.32363598e+06 -1.27726767e+07 -6.66914676e+06 -8.60086116e+06 -5.57115763e+06 -9.72876806e+06 -7.39104148e+06 -2.41328646e+06 4.22023206e+06 -3.16700010e+06 -1.71086386e+06 -2.71135750e+06 -1.35141055e+07 -7.09175423e+06 -4.46064849e+06 -3.97109082e+06 -9.73863435e+06 -7.28018208e+06 4.10681630e+04 -1.36275293e+06] [-5.56933756e+06 2.67490880e+06 -5.95150948e+06 -6.25470260e+06 -4.17838531e+06 -6.18622552e+06 -3.47462272e+06 -5.44841861e+06 -5.47991061e+06 1.02718663e+06 4.24785502e+06 3.31331432e+05 -1.05995844e+06 -1.27540775e+06 -8.95974356e+06 -8.93974594e+06 -3.63302570e+05 -2.11417667e+06 -4.50682405e+06 -5.17782926e+06 9.94674665e+05 1.53812727e+05] [-3.65212454e+06 3.43993460e+06 -3.95569667e+06 -2.91925043e+05 -2.23992500e+06 -3.75264977e+06 -1.54639846e+06 -1.75454136e+06 -3.94849965e+06 4.10212245e+06 4.45961101e+06 3.45982453e+06 -1.83542598e+05 1.45652978e+05 -4.96673990e+06 -1.06079982e+07 3.23247654e+06 -2.20788058e+05 -2.51762991e+05 -2.94959479e+06 1.97666942e+06 1.64233252e+06] [-1.76202089e+06 3.27442999e+06 -1.91736723e+06 2.87447417e+06 -7.40012412e+05 -1.62399784e+06 3.94068097e+04 6.41044241e+05 -2.22036592e+06 4.84256365e+06 3.98181968e+06 4.40350449e+06 5.47603426e+05 1.03045437e+06 -1.72377344e+06 -9.18976829e+06 4.42923625e+06 9.77231418e+05 2.10511047e+06 -1.09942605e+06 2.26102315e+06 2.24727635e+06] [-1.90862508e+05 3.27419790e+06 -3.13571980e+05 4.13876618e+06 6.53668066e+05 -8.73061430e+04 1.03306279e+06 2.06987436e+06 -5.84749962e+05 4.99855427e+06 3.85545126e+06 4.68074483e+06 1.21012861e+06 1.76597881e+06 1.90071859e+05 -6.74409283e+06 4.84244469e+06 1.85962550e+06 3.27778094e+06 2.95583257e+05 2.54451811e+06 2.79703295e+06] [ 7.51429437e+05 2.83772390e+06 6.61323518e+05 3.56435920e+06 1.22017950e+06 7.25873153e+05 1.49210564e+06 2.11890073e+06 4.67216939e+05 3.94821749e+06 3.36077881e+06 3.76499010e+06 1.65216919e+06 1.98223471e+06 9.08069576e+05 -4.18184918e+06 3.86741019e+06 2.04858684e+06 2.84786362e+06 1.05148535e+06 2.43886586e+06 2.65745834e+06] [ 9.19933223e+05 1.63756981e+06 9.73644041e+05 1.75329174e+06 1.14696333e+06 8.29065488e+05 1.25938124e+06 1.29056532e+06 8.79714314e+05 1.86266826e+06 1.84851348e+06 1.80044219e+06 1.29515936e+06 1.39627730e+06 8.76650855e+05 -1.40619044e+06 1.84482463e+06 1.40098696e+06 1.53748433e+06 1.01671594e+06 1.54590512e+06 1.60506802e+06] [ 3.70096224e+05 5.02945162e+05 4.22527014e+05 4.12879012e+05 4.57696320e+05 3.07981875e+05 4.41746750e+05 3.80918426e+05 4.26119573e+05 4.55843606e+05 5.13318685e+05 4.48891763e+05 4.55459139e+05 4.67286051e+05 3.23043066e+05 -1.32857665e+05 4.58162939e+05 4.64011091e+05 4.30992180e+05 3.69831461e+05 4.92092284e+05 4.92940558e+05] [ 3.13994719e+04 5.71913759e+04 4.00658017e+04 4.42211553e+04 3.66392432e+04 2.61694672e+04 4.79906788e+04 4.10980724e+04 3.53164134e+04 4.47013230e+04 5.57412399e+04 4.52120795e+04 5.09208875e+04 5.08553555e+04 3.16424949e+04 -1.31033470e+04 4.51392363e+04 4.48762599e+04 5.18409078e+04 3.04745620e+04 5.49501348e+04 5.24647199e+04] [-4.81726256e+03 2.40610279e+03 -5.90105559e+03 7.12501762e+03 -7.74267094e+03 -3.35193029e+03 -1.10857121e+03 -2.43869638e+03 -7.98834025e+03 7.63121977e+03 4.22948312e+03 6.85042238e+03 1.99883704e+03 1.09985774e+03 -4.62192564e+03 -1.90250370e+04 5.51239735e+03 2.08410177e+02 -9.84937805e+02 -3.14450923e+03 2.50391879e+03 2.24228381e+03] [-5.39296275e-01 3.54316488e-01 4.72681428e-02 5.69672735e-01 1.02520799e-01 -4.18748063e-01 6.79049537e-01 -3.48888799e-01 -3.11077981e-01 7.92743650e-01 3.12122838e-01 8.73108814e-01 2.34954539e-01 5.03700787e-01 -4.97054790e-01 -5.41460550e-01 4.82750634e-01 8.97148157e-01 -1.61374999e-01 -2.77598673e-01 -2.64241184e-01 4.76624155e-01] [-8.63124792e+03 3.56807977e+03 2.01710231e+04 7.51884272e+04 6.74330954e+04 -1.63571860e+04 2.22509509e+04 5.15763368e+04 7.45715459e+03 6.64095684e+04 -2.78402584e+04 4.80342512e+04 -8.03080442e+04 -3.17832787e+04 3.80118457e+04 -9.63754239e+04 9.82313549e+04 1.60917456e+04 1.00259264e+05 -3.88887090e+04 -4.46433882e+04 -3.04613062e+04] [-4.44705201e+04 7.10234517e+04 -8.43533364e+03 1.90792997e+05 3.21680767e+04 -4.49540165e+04 5.65439876e+04 1.09641162e+05 -6.06728213e+04 1.80549064e+05 2.79447410e+04 1.54814943e+05 -7.11945968e+04 -6.43333851e+03 5.41429095e+04 -4.25892366e+05 2.13192982e+05 4.11874068e+04 2.14765970e+05 -6.61690945e+04 -3.78395783e+03 1.41079088e+04] [ 2.01678615e+05 5.38684395e+05 1.75672281e+05 1.32930878e+06 2.22828838e+05 2.52546899e+05 4.80625548e+05 6.55938503e+05 1.64751138e+04 1.14058569e+06 4.93472752e+05 1.09972182e+06 2.52721111e+05 4.17082180e+05 5.23321569e+05 -1.96792292e+06 1.09127589e+06 5.20408376e+05 9.45943492e+05 2.58395146e+05 4.54438344e+05 5.14078761e+05] [-2.89098429e+05 3.49023594e+05 -5.80792512e+05 2.68372748e+06 -3.79993093e+05 -2.64679698e+05 1.52541878e+05 7.92759180e+05 -9.60620484e+05 2.03305192e+06 2.74026293e+05 1.90489866e+06 -4.08355205e+05 1.98372700e+04 4.53809503e+05 -4.72201572e+06 1.98715215e+06 3.00994586e+05 1.46106645e+06 -2.27756866e+05 1.71618083e+05 3.58110744e+05] [-2.60288428e+06 -1.02606057e+06 -3.30402330e+06 2.99524325e+06 -2.95282776e+06 -2.45212746e+06 -1.86027081e+06 -5.92424011e+05 -4.01287360e+06 1.95047239e+06 -9.94815471e+05 1.71657141e+06 -2.50329347e+06 -1.77133633e+06 -1.30451346e+06 -8.90180709e+06 1.80094817e+06 -1.53059290e+06 5.28507818e+05 -2.55900567e+06 -1.38073650e+06 -1.07949532e+06] [-7.99075687e+06 -4.10802732e+06 -9.50557935e+06 1.41628971e+06 -8.89200009e+06 -7.38791353e+06 -6.92045159e+06 -4.02474524e+06 -1.05568704e+07 8.99902397e+05 -3.70260531e+06 5.43947354e+05 -7.00194082e+06 -5.96474986e+06 -5.87015736e+06 -1.48734497e+07 4.39192122e+05 -6.05850384e+06 -2.09907362e+06 -7.80049986e+06 -4.92807488e+06 -4.53338392e+06] [-1.39455116e+07 -6.96623998e+06 -1.62822231e+07 -2.73916823e+06 -1.52220259e+07 -1.27025454e+07 -1.24663721e+07 -8.64270982e+06 -1.74930002e+07 -1.03167527e+06 -5.70452836e+06 -1.64414186e+06 -1.12799295e+07 -1.03204892e+07 -1.19088805e+07 -1.94007194e+07 -2.02615097e+06 -1.08021109e+07 -6.33296385e+06 -1.34049871e+07 -8.35537651e+06 -8.07703773e+06] [-1.84393920e+07 -7.85871450e+06 -2.16244434e+07 -6.31705696e+06 -2.02710131e+07 -1.64551199e+07 -1.63105345e+07 -1.25258045e+07 -2.28048863e+07 -1.38068286e+06 -4.74345832e+06 -2.27016021e+06 -1.30582456e+07 -1.28415273e+07 -1.74864757e+07 -2.40562046e+07 -3.14031339e+06 -1.34637838e+07 -1.02768858e+07 -1.68218426e+07 -9.72806038e+06 -9.68824286e+06] [-2.09125713e+07 -7.85099020e+06 -2.43014218e+07 -8.64185913e+06 -2.31056630e+07 -1.84636395e+07 -1.82753918e+07 -1.49682271e+07 -2.53933548e+07 -1.40108077e+06 -3.16514455e+06 -2.55771150e+06 -1.31767677e+07 -1.36643661e+07 -2.12696457e+07 -2.57706504e+07 -3.97111342e+06 -1.45998932e+07 -1.31733067e+07 -1.85336690e+07 -9.89198729e+06 -9.95055565e+06] [-1.99454313e+07 -5.89220784e+06 -2.30994493e+07 -1.00422781e+07 -2.23935053e+07 -1.73191351e+07 -1.69811120e+07 -1.56438457e+07 -2.37945422e+07 -8.99231643e+05 3.24379617e+05 -2.23571762e+06 -1.00890358e+07 -1.15954247e+07 -2.25042317e+07 -2.40654094e+07 -4.29259540e+06 -1.28893140e+07 -1.50193110e+07 -1.67533234e+07 -7.55673712e+06 -7.92792805e+06] [-1.53952654e+07 -3.62853089e+06 -1.70988851e+07 -1.13300232e+07 -1.68923772e+07 -1.32636702e+07 -1.29543575e+07 -1.38298929e+07 -1.71430725e+07 -1.91039425e+06 1.96467241e+06 -2.99852966e+06 -6.18363332e+06 -8.12206688e+06 -1.94155577e+07 -1.54617263e+07 -5.02851804e+06 -9.58563667e+06 -1.42029886e+07 -1.25331926e+07 -4.84799275e+06 -5.60725733e+06] [-1.19443752e+07 -4.08687287e+06 -1.24780941e+07 -1.20511193e+07 -1.25546042e+07 -1.02391495e+07 -1.05405480e+07 -1.21829151e+07 -1.20552846e+07 -4.33532444e+06 -2.49557663e+03 -5.08270850e+06 -5.07217630e+06 -6.84037235e+06 -1.58461979e+07 -7.03916454e+06 -6.61111241e+06 -8.19781539e+06 -1.31803028e+07 -9.75022789e+06 -4.78070629e+06 -5.59064913e+06] [-1.07249786e+07 -6.51798715e+06 -1.11196819e+07 -1.45338487e+07 -1.06422963e+07 -9.45506699e+06 -1.09678097e+07 -1.22809421e+07 -9.83115684e+06 -7.68920130e+06 -3.57314979e+06 -8.12679493e+06 -6.60398820e+06 -8.07199615e+06 -1.45033058e+07 1.66537412e+05 -9.12470904e+06 -9.12267614e+06 -1.37344244e+07 -9.19378595e+06 -6.95064322e+06 -7.65055314e+06] [-1.25762581e+07 -8.87518657e+06 -1.35092476e+07 -1.90884893e+07 -1.24374748e+07 -1.15645042e+07 -1.35225131e+07 -1.49691802e+07 -1.15449117e+07 -1.09043764e+07 -6.05536787e+06 -1.13158764e+07 -9.12316093e+06 -1.05975649e+07 -1.70123250e+07 2.77205862e+06 -1.23597496e+07 -1.17288733e+07 -1.67470925e+07 -1.13814306e+07 -9.37844773e+06 -1.03568821e+07] [-1.39974324e+07 -1.03200979e+07 -1.55190876e+07 -2.12130493e+07 -1.45542117e+07 -1.32546232e+07 -1.49923787e+07 -1.66977967e+07 -1.37758646e+07 -1.26978143e+07 -7.70165819e+06 -1.31044264e+07 -1.09226321e+07 -1.23835207e+07 -1.84943449e+07 1.93435470e+06 -1.43201050e+07 -1.34927455e+07 -1.83963295e+07 -1.33303315e+07 -1.09338254e+07 -1.20617033e+07] [-1.23078763e+07 -7.51382144e+06 -1.37203309e+07 -1.88741535e+07 -1.27633649e+07 -1.20385398e+07 -1.24028607e+07 -1.47501357e+07 -1.24212944e+07 -1.01514974e+07 -5.33394380e+06 -1.05789372e+07 -8.95821774e+06 -1.02351687e+07 -1.66530490e+07 -1.02652221e+06 -1.19070105e+07 -1.12756178e+07 -1.56775385e+07 -1.21021377e+07 -8.45660752e+06 -9.74526155e+06] [-9.49394906e+06 -3.85381417e+06 -1.08350660e+07 -1.31572985e+07 -9.72501696e+06 -9.67615344e+06 -8.86622209e+06 -1.09107087e+07 -1.02296231e+07 -5.63958431e+06 -2.23405973e+06 -6.15824313e+06 -6.34206733e+06 -7.06564887e+06 -1.30584714e+07 -5.37816551e+06 -7.24101033e+06 -7.80002141e+06 -1.08932217e+07 -9.47162637e+06 -5.10505730e+06 -6.15681728e+06] [-7.01859616e+06 -1.06967743e+06 -8.04672797e+06 -6.45139650e+06 -6.70275307e+06 -7.31643132e+06 -5.68544668e+06 -6.47950193e+06 -8.02180383e+06 -1.25398842e+06 -1.70231057e+05 -1.85084081e+06 -4.30324747e+06 -4.24433141e+06 -8.86593347e+06 -8.91685594e+06 -2.42779166e+06 -4.69299365e+06 -5.54626735e+06 -6.94772677e+06 -2.48611550e+06 -3.04364653e+06] [-3.85004312e+06 1.02712140e+06 -4.44395193e+06 -8.01616529e+05 -3.09490790e+06 -4.08504549e+06 -2.45466989e+06 -2.22504554e+06 -4.68716770e+06 1.92656590e+06 1.15172050e+06 1.41913870e+06 -2.05816671e+06 -1.51355342e+06 -4.31317222e+06 -9.11633958e+06 1.28825215e+06 -1.67091165e+06 -8.62078022e+05 -3.74064492e+06 -2.17238534e+05 -3.84240963e+05] [-1.83603749e+06 2.72767141e+06 -2.15656439e+06 2.56336182e+06 -8.42762301e+05 -1.84269177e+06 -2.83682061e+05 4.84999714e+05 -2.49924630e+06 4.22166956e+06 2.87715846e+06 3.73482662e+06 -6.55108171e+04 5.65003905e+05 -1.72666534e+06 -8.89873539e+06 3.86434998e+06 5.64666786e+05 1.96357944e+06 -1.37636135e+06 1.73607592e+06 1.72934493e+06] [-6.16123980e+05 2.84102901e+06 -8.02901532e+05 3.79549093e+06 1.14415624e+05 -4.78609511e+05 7.50819115e+05 1.75075791e+06 -1.22350141e+06 4.52512180e+06 2.98092213e+06 4.16278778e+06 7.03355914e+05 1.25305182e+06 -3.64677655e+04 -7.27521901e+06 4.39993294e+06 1.41120161e+06 3.06452252e+06 -1.27371891e+05 2.08796933e+06 2.23038044e+06] [ 3.21466795e+05 2.64704251e+06 2.32141152e+05 3.64333490e+06 7.61758841e+05 3.98478333e+05 1.30540829e+06 2.04855051e+06 -9.20341540e+04 3.94146802e+06 2.93396146e+06 3.71613941e+06 1.31644883e+06 1.70033023e+06 7.68616998e+05 -4.98366312e+06 3.87869433e+06 1.79381530e+06 2.91281125e+06 6.60587567e+05 2.21394730e+06 2.39439441e+06] [ 9.27161537e+05 1.86652945e+06 9.31746231e+05 2.47178510e+06 1.11440864e+06 8.88475474e+05 1.38149215e+06 1.65178195e+06 7.35412301e+05 2.41886695e+06 1.97291274e+06 2.33068976e+06 1.34870919e+06 1.52119879e+06 1.14785026e+06 -2.24991670e+06 2.42011282e+06 1.58350043e+06 2.05880121e+06 1.04996625e+06 1.72097178e+06 1.82586149e+06] [ 6.96776724e+05 9.97272507e+05 7.84642967e+05 1.01797812e+06 8.43361538e+05 6.52018781e+05 8.82690176e+05 8.58829736e+05 7.24067228e+05 9.99866653e+05 9.91730952e+05 9.84586122e+05 8.64432076e+05 9.15445793e+05 7.28045103e+05 -5.93717143e+05 1.02309138e+06 9.17218722e+05 9.89329658e+05 7.27080442e+05 9.61383316e+05 9.79538325e+05] [ 3.07421824e+05 3.64421806e+05 3.34764665e+05 3.04295991e+05 3.41456915e+05 2.67167547e+05 3.54787219e+05 2.87107140e+05 3.26404513e+05 3.10954250e+05 3.74793311e+05 3.11209553e+05 3.54336919e+05 3.51189858e+05 2.65123244e+05 -9.71267934e+04 3.12467292e+05 3.58088104e+05 3.04284597e+05 3.00502395e+05 3.63520177e+05 3.62057980e+05] [-7.32922438e+04 -6.91717264e+04 -8.59791474e+04 -4.67321224e+04 -8.32395427e+04 -6.25909881e+04 -8.06765449e+04 -6.18991352e+04 -8.70137780e+04 -4.16625989e+04 -5.70228827e+04 -4.31927781e+04 -7.17431491e+04 -7.17687930e+04 -6.78655041e+04 -2.45594746e+04 -4.37382234e+04 -7.11132195e+04 -6.22129422e+04 -6.67121179e+04 -6.83653062e+04 -6.52518762e+04] [-9.02490274e-02 7.65076371e-02 -5.73723775e-01 -1.51842136e-02 8.74992244e-01 3.52711294e-01 3.99035282e-01 -7.16974153e-01 -2.62189121e-01 4.86779512e-01 6.25890728e-01 5.48925625e-01 -5.50427209e-02 4.80719095e-01 7.51723065e-01 -6.89824342e-01 6.42648625e-01 3.07042506e-01 -3.50553703e-01 5.36321428e-01 5.87233417e-01 6.15226199e-01] [-9.48486435e-01 3.82316564e-01 6.45833075e-01 2.83776798e-03 -6.24500749e-01 5.65272428e-01 -2.64900648e-01 6.72862446e-01 5.68698733e-01 -2.37319735e-01 3.97029608e-02 6.35912213e-01 9.14672001e-01 -5.10643439e-02 4.09926048e-01 -8.71207936e-01 -7.15539542e-02 5.77311183e-01 4.88518693e-03 8.14369016e-01 5.34982736e-01 -3.33770685e-01] [-7.89702259e-01 -9.44777894e-01 3.94922890e-01 6.49220315e-01 6.31681370e-02 -8.28349063e-01 -1.85738176e-01 3.41387869e-02 -4.58144213e-01 4.87472722e-01 -9.35711916e-01 6.21967112e-01 -8.18058239e-01 -5.67681496e-02 6.04180234e-01 9.38633107e-01 -9.58100003e-01 -2.38273094e-01 1.21964118e-01 -3.24223663e-01 1.01490036e-01 5.89007938e-01] [-5.10310607e+04 4.51250218e+04 -5.25755454e+04 5.95733290e+04 -7.33076399e+04 -3.15581476e+04 5.14481666e+03 1.22317239e+04 -8.29622044e+04 7.87489844e+04 5.41495660e+04 7.38943997e+04 2.49948461e+04 2.07924102e+04 -1.99637588e+04 -2.66160692e+05 6.75591922e+04 8.88830464e+03 4.55495691e+04 -2.41411405e+04 3.49327346e+04 3.11423558e+04] [ 6.02749476e+03 1.40751388e+05 -7.78110784e+04 7.00498693e+05 -9.68498309e+04 7.69841002e+04 1.29122035e+05 2.46350959e+05 -1.74352804e+05 5.41611661e+05 1.66491143e+05 5.28175423e+05 6.89549110e+04 1.25019367e+05 1.94104181e+05 -1.13298088e+06 4.80406346e+05 1.60480360e+05 3.68044511e+05 7.50501845e+04 1.42842755e+05 1.78994532e+05] [-3.42032308e+05 4.45393534e+04 -6.44796779e+05 1.61354363e+06 -6.17913102e+05 -2.48404186e+05 -1.25141751e+05 3.07600706e+05 -8.96425714e+05 1.23219313e+06 1.17454218e+05 1.15746026e+06 -3.22657977e+05 -1.26421874e+05 7.40082039e+04 -3.10168498e+06 1.10666823e+06 1.07889803e+04 6.59949251e+05 -2.05337687e+05 -2.38521405e+04 1.00341217e+05] [-1.89100953e+06 -1.07331639e+06 -2.53097351e+06 1.87397240e+06 -2.49352744e+06 -1.62775834e+06 -1.51456417e+06 -6.14861691e+05 -3.05964464e+06 1.11894617e+06 -9.19689114e+05 9.81529547e+05 -1.79060014e+06 -1.42648225e+06 -1.03335054e+06 -5.99961858e+06 8.66345335e+05 -1.31403636e+06 -3.59508213e+04 -1.71123780e+06 -1.19286443e+06 -9.75029766e+05] [-5.24728833e+06 -3.70277905e+06 -6.76987546e+06 1.64599153e+06 -6.83518027e+06 -4.78633750e+06 -4.83259548e+06 -2.97072461e+06 -7.69239959e+06 4.14760060e+05 -3.16275639e+06 2.12015779e+05 -4.84943947e+06 -4.28584939e+06 -3.74742190e+06 -1.07078284e+07 -1.53152140e+05 -4.36659740e+06 -2.07418360e+06 -5.05701068e+06 -3.81426596e+06 -3.41961285e+06] [-1.06307830e+07 -7.13474089e+06 -1.31152531e+07 -3.13303123e+05 -1.27650755e+07 -9.74354879e+06 -1.01259767e+07 -6.50862835e+06 -1.44369510e+07 -8.85809660e+05 -6.12157140e+06 -1.30908535e+06 -9.76480369e+06 -8.83122071e+06 -8.42070436e+06 -1.64483138e+07 -1.69725516e+06 -8.96118551e+06 -4.91834971e+06 -1.03614136e+07 -7.75102184e+06 -7.08196266e+06] [-1.72737556e+07 -1.20137335e+07 -2.08685948e+07 -3.76360400e+06 -2.02698548e+07 -1.54507190e+07 -1.68833413e+07 -1.13588193e+07 -2.24864082e+07 -3.29526939e+06 -1.02043948e+07 -3.94866970e+06 -1.58095972e+07 -1.49257389e+07 -1.43000170e+07 -2.14636595e+07 -4.40714704e+06 -1.49050963e+07 -9.30773203e+06 -1.64741664e+07 -1.32130684e+07 -1.23481831e+07] [-2.34200166e+07 -1.74581179e+07 -2.81079710e+07 -7.07158921e+06 -2.77697856e+07 -2.08194134e+07 -2.34228930e+07 -1.63929244e+07 -3.00265203e+07 -6.42524114e+06 -1.44750612e+07 -7.22145175e+06 -2.13100015e+07 -2.07290520e+07 -1.98844534e+07 -2.50874790e+07 -7.89065762e+06 -2.08479235e+07 -1.44178779e+07 -2.21390134e+07 -1.86766054e+07 -1.76258429e+07] [-2.46609500e+07 -1.91785487e+07 -2.92541082e+07 -9.88910065e+06 -2.94084577e+07 -2.17417984e+07 -2.49897453e+07 -1.84476978e+07 -3.11042261e+07 -8.80111603e+06 -1.58870029e+07 -9.59390687e+06 -2.22515684e+07 -2.21470014e+07 -2.16674774e+07 -2.21673684e+07 -1.05135006e+07 -2.25128678e+07 -1.72076025e+07 -2.33104696e+07 -2.02270312e+07 -1.93379526e+07] [-2.23197854e+07 -1.97591101e+07 -2.58929478e+07 -1.34098386e+07 -2.63945380e+07 -1.95957720e+07 -2.36309687e+07 -1.84503689e+07 -2.70808037e+07 -1.22773178e+07 -1.75617529e+07 -1.27270395e+07 -2.12646270e+07 -2.15182154e+07 -2.00543224e+07 -1.25491853e+07 -1.35705128e+07 -2.19614346e+07 -1.81559112e+07 -2.15647092e+07 -2.03900688e+07 -1.98910087e+07] [-2.07523889e+07 -2.00377583e+07 -2.36831651e+07 -1.57661703e+07 -2.38112481e+07 -1.85785499e+07 -2.26673241e+07 -1.84712874e+07 -2.41028234e+07 -1.45672922e+07 -1.87350742e+07 -1.49309152e+07 -2.07480717e+07 -2.09989628e+07 -1.92490268e+07 -5.66718186e+06 -1.55064078e+07 -2.14331100e+07 -1.88124064e+07 -2.04246771e+07 -2.04507542e+07 -2.01125289e+07] [-1.92799397e+07 -2.06025320e+07 -2.23904485e+07 -1.70476902e+07 -2.21905429e+07 -1.77518858e+07 -2.21366837e+07 -1.84474865e+07 -2.21755429e+07 -1.61923242e+07 -1.95290561e+07 -1.64718931e+07 -2.05835742e+07 -2.08154564e+07 -1.84405639e+07 -2.09937020e+06 -1.68495169e+07 -2.10969718e+07 -1.93531923e+07 -1.93683398e+07 -2.06836104e+07 -2.03825860e+07] [-1.97292296e+07 -2.13006308e+07 -2.34282094e+07 -1.67854434e+07 -2.33706261e+07 -1.82583708e+07 -2.27389302e+07 -1.88486850e+07 -2.32870943e+07 -1.60680278e+07 -1.97994578e+07 -1.63680014e+07 -2.12969524e+07 -2.15158934e+07 -1.88349362e+07 -3.76861580e+06 -1.68602317e+07 -2.17111800e+07 -1.97454789e+07 -1.98538941e+07 -2.12793325e+07 -2.10174286e+07] [-1.68512339e+07 -1.82258831e+07 -2.01176677e+07 -1.44921166e+07 -2.01117202e+07 -1.58241436e+07 -1.92749398e+07 -1.61235202e+07 -2.02466689e+07 -1.38869585e+07 -1.73375047e+07 -1.41237812e+07 -1.87676145e+07 -1.87255790e+07 -1.59776570e+07 -3.85630924e+06 -1.45807421e+07 -1.87452893e+07 -1.66153790e+07 -1.74385358e+07 -1.84464708e+07 -1.82558195e+07] [-1.15817183e+07 -1.15995848e+07 -1.38713184e+07 -1.03715235e+07 -1.37329108e+07 -1.12454128e+07 -1.28851963e+07 -1.10624492e+07 -1.42235538e+07 -9.30216495e+06 -1.17086127e+07 -9.52279776e+06 -1.28601866e+07 -1.26760336e+07 -1.09681962e+07 -4.25073342e+06 -9.90150836e+06 -1.27039683e+07 -1.08466581e+07 -1.24423393e+07 -1.21765776e+07 -1.22258095e+07] [-7.43407321e+06 -5.89888943e+06 -9.05641206e+06 -5.60029027e+06 -8.58186154e+06 -7.48286693e+06 -7.73347654e+06 -6.44836190e+06 -9.44083548e+06 -4.27083709e+06 -6.11413103e+06 -4.51156989e+06 -7.79470235e+06 -7.50979729e+06 -7.00053859e+06 -6.02000127e+06 -4.73951029e+06 -7.41470803e+06 -5.62927802e+06 -7.95689786e+06 -6.70077101e+06 -6.75489535e+06] [-4.11784699e+06 -1.71080524e+06 -4.85005393e+06 -1.90110091e+06 -4.14585423e+06 -4.33753723e+06 -3.55882240e+06 -2.81070715e+06 -5.21774572e+06 -7.64909517e+05 -2.24050558e+06 -1.04037976e+06 -3.86190246e+06 -3.38515817e+06 -3.66356072e+06 -6.12171440e+06 -1.02611996e+06 -3.31300988e+06 -1.69882300e+06 -4.39933970e+06 -2.60345442e+06 -2.64257747e+06] [-1.95015151e+06 1.23613104e+05 -2.26555752e+06 3.56629679e+05 -1.42488854e+06 -2.07128253e+06 -1.26827449e+06 -6.31229729e+05 -2.54949673e+06 9.02288591e+05 -4.65502754e+05 6.15036405e+05 -1.68409670e+06 -1.14257179e+06 -1.48003184e+06 -4.80417272e+06 8.30041525e+05 -1.00903925e+06 3.60120188e+05 -1.98986707e+06 -5.31589795e+05 -5.01754461e+05] [-1.02349409e+06 1.29832540e+06 -1.25717598e+06 1.71722718e+06 -3.91968482e+05 -9.71682114e+05 -1.17223892e+05 5.97863749e+05 -1.48269941e+06 2.31760908e+06 1.12772344e+06 2.02258087e+06 -2.60455352e+05 1.78474495e+05 -6.00913751e+05 -4.72200248e+06 2.23912859e+06 2.92994371e+05 1.52219607e+06 -7.17182385e+05 8.12467134e+05 8.90908516e+05] [-4.88431147e+04 1.91849145e+06 -1.19378890e+05 2.21655421e+06 5.06999669e+05 8.46088708e+04 7.81761284e+05 1.41557540e+06 -3.10337482e+05 2.76651944e+06 1.90003792e+06 2.55488491e+06 7.02431264e+05 1.01550186e+06 3.63733160e+05 -3.68594395e+06 2.74807843e+06 1.14498198e+06 2.17022201e+06 2.66899383e+05 1.48749836e+06 1.59210473e+06] [ 1.87559489e+05 1.59035036e+06 1.62346410e+05 2.09504361e+06 4.38930371e+05 3.24132207e+05 8.68558883e+05 1.30448006e+06 -5.58431542e+04 2.36822092e+06 1.67909564e+06 2.23529146e+06 8.20057163e+05 1.00726027e+06 5.49210042e+05 -2.96983739e+06 2.33278058e+06 1.09827229e+06 1.85697172e+06 4.40559888e+05 1.30736514e+06 1.39092269e+06] [ 3.18888099e+05 7.71742817e+05 3.69115047e+05 1.02556135e+06 3.93496190e+05 3.65342523e+05 5.97688107e+05 7.71613346e+05 2.50143514e+05 1.00010479e+06 7.30168934e+05 9.75880585e+05 5.36401057e+05 5.96483991e+05 5.46377507e+05 -9.80164625e+05 1.04078341e+06 6.18365515e+05 1.00253025e+06 4.24958117e+05 6.92561656e+05 7.06930891e+05] [ 3.14295973e+05 4.99120221e+05 3.73988889e+05 4.61173931e+05 3.55599739e+05 3.05516319e+05 4.43648624e+05 4.35067706e+05 3.29435076e+05 4.58452798e+05 4.85524207e+05 4.59333023e+05 4.48779689e+05 4.46663938e+05 3.54785669e+05 -2.90700505e+05 4.76614328e+05 4.34159223e+05 4.99800252e+05 3.47536158e+05 4.79585768e+05 4.72449288e+05] [ 1.80016822e+05 2.27464590e+05 1.97793379e+05 1.96025954e+05 1.97031580e+05 1.68927054e+05 2.19579474e+05 1.84683797e+05 1.87955067e+05 2.02342532e+05 2.33362431e+05 2.02125492e+05 2.20290894e+05 2.16616923e+05 1.66194663e+05 -8.32409804e+04 2.02607011e+05 2.20871431e+05 1.94395922e+05 1.86327794e+05 2.25757459e+05 2.25435782e+05] [-2.43235798e+04 -2.08507081e+04 -2.63830735e+04 -1.20938027e+04 -2.71530569e+04 -1.58562974e+04 -2.26960265e+04 -1.59814948e+04 -2.75622907e+04 -1.15006301e+04 -1.93902488e+04 -1.15076915e+04 -2.13403871e+04 -2.17417858e+04 -1.74480182e+04 -1.37391404e+04 -1.22195458e+04 -2.19143371e+04 -1.59892386e+04 -1.92591580e+04 -2.09079239e+04 -2.09443416e+04] [ 4.54646920e-01 7.89956641e-01 7.78695292e-02 5.60587558e-01 -4.90368830e-01 -7.31574578e-01 -4.84422411e-02 -2.13455525e-01 1.54840606e-01 -4.98061594e-01 -4.30832177e-01 4.25844906e-01 8.05646873e-01 5.89093979e-01 -5.61232531e-01 6.11764507e-01 7.76360029e-01 -5.40404580e-01 9.66961550e-01 5.06028335e-01 -3.05361807e-01 3.84530550e-01] [-3.46795024e-02 -4.21131896e-01 3.55017032e-01 6.28655784e-01 3.77740058e-01 -5.41107109e-01 6.35618865e-01 6.05628064e-01 6.20788256e-01 -2.30580303e-01 -2.26485495e-01 1.84996958e-01 -8.51285969e-01 -4.74634718e-02 -2.92242067e-01 -2.24974422e-01 -4.67872604e-01 6.36384834e-01 7.66621069e-01 -7.83176998e-02 -2.74932695e-01 -3.93394433e-01] [-8.89950537e-02 2.06855714e-01 -7.27961216e-01 -2.09327736e-01 -1.24749766e-01 8.25158540e-01 8.54743341e-01 7.75067517e-01 -1.12883426e-02 2.56780765e-01 -4.06336817e-02 5.68655190e-01 -9.82424869e-01 5.27301914e-01 -1.95739363e-01 -6.30813648e-01 1.82418903e-01 5.06540806e-01 -8.41570679e-01 6.07705016e-01 -7.25352608e-01 -1.56033046e-01] [-5.00788124e+03 -4.39447616e+03 -1.13880101e+04 2.40239933e+04 -1.81227317e+04 5.46963629e+03 -3.52017076e+02 2.01986854e+03 -1.66274343e+04 2.28646644e+04 3.13852648e+03 2.14964974e+04 4.01370363e+03 2.30275620e+03 2.43876480e+03 -4.95388924e+04 1.74964138e+04 3.90425384e+03 1.67361021e+03 6.37828714e+03 -4.06006322e+02 3.47086587e+02] [ 4.51223908e+04 3.14087925e+03 6.82340431e+03 1.76665002e+05 -9.17609661e+03 4.47044843e+04 4.50770202e+04 5.05930019e+04 -1.57364503e+04 1.17678547e+05 1.55987802e+04 1.14715432e+05 2.31810033e+04 3.52298296e+04 6.97723006e+04 -2.33704156e+05 9.71246013e+04 5.37681919e+04 5.90232823e+04 4.66065341e+04 2.11354269e+04 3.54918528e+04] [-5.69581703e+04 -5.71840134e+04 -2.07701028e+05 5.77651764e+05 -2.26848032e+05 -5.89684854e+04 -3.39454434e+04 4.30368163e+04 -3.09830460e+05 3.98499581e+05 4.04693682e+03 3.64259464e+05 -1.04906281e+05 -4.88309966e+04 2.30230598e+04 -1.09507473e+06 3.31075081e+05 1.60242431e+04 1.09659894e+05 -3.64492951e+04 -4.33024219e+04 1.40516044e+04] [-8.54028107e+05 -6.32746720e+05 -1.20198577e+06 7.20596679e+05 -1.18416190e+06 -7.62458404e+05 -7.90710976e+05 -3.98770952e+05 -1.42260416e+06 4.58720681e+05 -4.97035866e+05 3.76179272e+05 -8.86441189e+05 -7.44691122e+05 -5.77531645e+05 -2.45604025e+06 3.14310547e+05 -6.55755326e+05 -2.14470012e+05 -7.79709392e+05 -6.77663356e+05 -5.45248434e+05] [-2.73411667e+06 -2.13379618e+06 -3.70152568e+06 1.17820779e+06 -3.74723299e+06 -2.55339696e+06 -2.66942683e+06 -1.58544586e+06 -4.22126288e+06 3.61856410e+05 -1.62126082e+06 2.16314291e+05 -2.67990720e+06 -2.39201416e+06 -2.05326251e+06 -5.78387410e+06 2.01675492e+04 -2.32686657e+06 -1.24741935e+06 -2.63669993e+06 -2.18108109e+06 -1.81504173e+06] [-5.76764744e+06 -4.96544892e+06 -7.56171185e+06 2.53198593e+05 -7.71704804e+06 -5.41032990e+06 -6.00361829e+06 -3.86736090e+06 -8.35271427e+06 -9.52271653e+05 -4.16792527e+06 -1.14302549e+06 -5.88022191e+06 -5.44874290e+06 -4.53629864e+06 -8.64076596e+06 -1.47798549e+06 -5.42643688e+06 -3.37165260e+06 -5.72754269e+06 -5.09652321e+06 -4.47919190e+06] [-1.11988698e+07 -1.04734319e+07 -1.41678608e+07 -1.96477234e+06 -1.45789693e+07 -1.01473696e+07 -1.19542828e+07 -7.99749626e+06 -1.54528268e+07 -3.63603551e+06 -9.18468637e+06 -3.95007746e+06 -1.17165308e+07 -1.11997888e+07 -8.83835136e+06 -1.25659997e+07 -4.38112818e+06 -1.09885772e+07 -7.30508763e+06 -1.10104841e+07 -1.07555456e+07 -9.77702126e+06] [-1.71298524e+07 -1.71590213e+07 -2.12557307e+07 -4.56843485e+06 -2.18904923e+07 -1.53153085e+07 -1.87006861e+07 -1.25624501e+07 -2.29703245e+07 -7.20753858e+06 -1.55092487e+07 -7.63291691e+06 -1.84332317e+07 -1.78066361e+07 -1.34306817e+07 -1.53782426e+07 -8.05089068e+06 -1.74159018e+07 -1.18572662e+07 -1.68327033e+07 -1.74186148e+07 -1.60452864e+07] [-1.96615002e+07 -2.24821052e+07 -2.38164073e+07 -7.64708708e+06 -2.49331346e+07 -1.76670811e+07 -2.24356711e+07 -1.51291543e+07 -2.56928478e+07 -1.20323718e+07 -2.19906718e+07 -1.22573445e+07 -2.30339209e+07 -2.21841847e+07 -1.47381228e+07 -1.14609371e+07 -1.24473550e+07 -2.18377249e+07 -1.46573215e+07 -2.01072067e+07 -2.25705280e+07 -2.10726374e+07] [-1.94489909e+07 -2.49764853e+07 -2.37072420e+07 -9.05326051e+06 -2.51247747e+07 -1.75468646e+07 -2.32354436e+07 -1.57076954e+07 -2.54218424e+07 -1.50105262e+07 -2.55439725e+07 -1.50019272e+07 -2.42728698e+07 -2.35096563e+07 -1.39748092e+07 -6.97050079e+06 -1.51042457e+07 -2.31859292e+07 -1.56896786e+07 -2.04937063e+07 -2.44761936e+07 -2.30759371e+07] [-1.77099145e+07 -2.46804500e+07 -2.18696608e+07 -9.12684562e+06 -2.31178899e+07 -1.62920118e+07 -2.19296507e+07 -1.47791722e+07 -2.31944863e+07 -1.56656677e+07 -2.57936792e+07 -1.55795896e+07 -2.34286398e+07 -2.25799751e+07 -1.23965303e+07 -3.50179191e+06 -1.55193104e+07 -2.22659702e+07 -1.50788996e+07 -1.92554488e+07 -2.38738715e+07 -2.25573803e+07] [-1.63139304e+07 -2.33888771e+07 -2.07164615e+07 -8.65902703e+06 -2.15412768e+07 -1.51966252e+07 -2.06925113e+07 -1.39360310e+07 -2.17031515e+07 -1.45791551e+07 -2.41624093e+07 -1.45140362e+07 -2.21587859e+07 -2.13832029e+07 -1.16018535e+07 -3.47569816e+06 -1.45085245e+07 -2.08530881e+07 -1.43080423e+07 -1.78236647e+07 -2.26192438e+07 -2.13718307e+07] [-1.46784775e+07 -2.12143067e+07 -1.88655790e+07 -7.18851675e+06 -1.94733286e+07 -1.37298412e+07 -1.87111510e+07 -1.24290843e+07 -1.98344430e+07 -1.27587563e+07 -2.19167531e+07 -1.27439068e+07 -2.03604324e+07 -1.94670133e+07 -1.03140443e+07 -3.81427503e+06 -1.26411586e+07 -1.87900192e+07 -1.26233621e+07 -1.60854716e+07 -2.06085196e+07 -1.93966444e+07] [-1.03357763e+07 -1.51907722e+07 -1.35608620e+07 -4.01316395e+06 -1.43429818e+07 -9.83493080e+06 -1.31369454e+07 -8.58952197e+06 -1.47074773e+07 -8.59185083e+06 -1.59343352e+07 -8.60565595e+06 -1.47928402e+07 -1.40416644e+07 -6.93641982e+06 -4.40662149e+06 -8.58207649e+06 -1.33705891e+07 -8.50953521e+06 -1.16176665e+07 -1.48836168e+07 -1.39160726e+07] [-5.47266951e+06 -8.41536194e+06 -7.10912683e+06 -3.43065223e+06 -7.55565136e+06 -5.36136057e+06 -7.05560219e+06 -4.62363867e+06 -7.83151287e+06 -5.82058340e+06 -9.84464184e+06 -5.75183649e+06 -8.48562780e+06 -8.05059133e+06 -3.30245539e+06 -1.24544486e+06 -5.61039832e+06 -7.57692211e+06 -4.33600378e+06 -6.45672536e+06 -8.47454966e+06 -8.08410865e+06] [-2.69429941e+06 -4.22580642e+06 -3.49364617e+06 -2.03673006e+06 -3.55098323e+06 -3.00113628e+06 -3.42917145e+06 -2.24293811e+06 -3.87954365e+06 -3.30226454e+06 -5.44316465e+06 -3.22118147e+06 -4.60444093e+06 -4.22522593e+06 -1.42551537e+06 -3.89207332e+05 -3.00342459e+06 -3.91875608e+06 -1.80492429e+06 -3.43007808e+06 -4.41388661e+06 -4.21261146e+06] [-1.35970459e+06 -1.76815543e+06 -1.57531111e+06 -1.19402917e+06 -1.53574174e+06 -1.60643023e+06 -1.38240694e+06 -1.03123063e+06 -1.79119810e+06 -1.62162049e+06 -2.68093684e+06 -1.61201745e+06 -2.20233638e+06 -1.98587620e+06 -6.13552031e+05 -3.42794234e+05 -1.42589007e+06 -1.80554025e+06 -6.05050940e+05 -1.66232664e+06 -1.98132035e+06 -1.96280101e+06] [-4.52140471e+05 -4.86813397e+05 -3.69575461e+05 -5.06562004e+05 1.28000839e+04 -6.04702342e+05 -2.94020310e+05 -7.82424021e+04 -3.80137353e+05 -6.66942296e+05 -1.35703864e+06 -7.49076176e+05 -9.66488428e+05 -7.25632315e+05 7.72316549e+04 3.68478192e+05 -4.36792167e+05 -5.68862933e+05 3.24944942e+05 -5.51505969e+05 -6.88716435e+05 -7.16622406e+05] [-2.97368788e+05 5.74163363e+04 -1.96642189e+05 -1.61586074e+05 2.42592858e+05 -2.73846680e+05 -2.26172233e+04 2.02253002e+05 -1.30839705e+05 -3.52021926e+04 -5.07319361e+05 -1.20718492e+05 -4.38599313e+05 -2.68543830e+05 8.50473424e+04 1.89470793e+05 1.36579535e+05 -1.49268461e+05 5.67118192e+05 -1.89701513e+05 -1.39051103e+05 -1.99837408e+05] [-6.12048020e+04 6.01687162e+05 1.54925454e+04 4.41503906e+05 3.61832586e+05 8.37818952e+04 3.32395372e+05 5.82913354e+05 3.69304000e+04 7.17293318e+05 2.92998167e+05 6.36420212e+05 1.11797293e+05 2.47973022e+05 2.77612906e+05 -5.83221312e+05 8.05708020e+05 3.36722676e+05 9.24528812e+05 1.71973883e+05 4.01332633e+05 3.61755543e+05] [ 8.01260809e+04 5.03797771e+05 1.37124605e+05 6.76979772e+05 2.01684506e+05 1.81889816e+05 3.82195938e+05 5.08062596e+05 7.74514292e+04 6.80069918e+05 3.90470784e+05 6.55071007e+05 3.31809005e+05 4.01701058e+05 3.29689075e+05 -7.67450304e+05 6.86579992e+05 3.84514145e+05 6.79171736e+05 2.36727944e+05 4.73239545e+05 4.67376938e+05] [ 8.79211119e+04 2.90932964e+05 1.13914925e+05 2.84420767e+05 1.30560550e+05 1.25162202e+05 2.28685107e+05 2.62497420e+05 8.41099371e+04 3.19580399e+05 2.50638413e+05 3.14851925e+05 2.14736689e+05 2.36715751e+05 1.71462414e+05 -3.17297017e+05 3.32342084e+05 2.27839715e+05 3.36808506e+05 1.78316257e+05 2.77700769e+05 2.69699624e+05] [-1.01857137e+04 6.90090718e+04 9.85143847e+03 1.70625331e+04 1.44514355e+04 1.21538688e+04 4.17377062e+04 4.50300916e+04 6.58684615e+03 5.51746866e+04 5.35322643e+04 5.61361340e+04 4.49257515e+04 4.70528650e+04 1.49537172e+04 -5.28361940e+04 5.68679607e+04 3.82672399e+04 6.72529103e+04 1.60871414e+04 6.08780101e+04 5.23672729e+04] [ 1.39943990e+04 1.71917663e+04 1.70210917e+04 1.51215907e+04 1.73144454e+04 1.54411056e+04 1.88667287e+04 1.47808020e+04 1.67161974e+04 1.43670844e+04 1.52170092e+04 1.47759734e+04 1.78093909e+04 1.80079939e+04 1.45687963e+04 -3.00512197e+03 1.36819796e+04 1.74335709e+04 1.42748060e+04 1.43774319e+04 1.76082426e+04 1.72158014e+04] [ 5.62930308e+03 6.11187610e+03 6.15537998e+03 5.22006453e+03 6.16445191e+03 4.75708562e+03 6.29160271e+03 4.93449239e+03 5.90166203e+03 5.06801674e+03 6.45953022e+03 5.18817237e+03 6.28280619e+03 5.89412789e+03 4.66386617e+03 -1.23356090e+03 4.97130185e+03 6.30745825e+03 5.07483078e+03 5.50324478e+03 6.13125770e+03 6.22735566e+03] [ 9.80750367e-01 6.97848677e-01 -6.47065917e-02 6.83495520e-01 -6.78346186e-01 -9.64137248e-01 2.50118786e-01 -1.50996665e-01 6.96125775e-01 4.92736446e-01 -6.31273842e-01 6.82806020e-02 2.22094745e-01 -2.80858422e-01 -2.61119941e-01 -2.95475793e-01 4.95119782e-01 -3.86395243e-01 -3.52526440e-02 1.59074178e-01 6.88479271e-02 8.48682843e-01] [ 4.69854029e-01 -5.27804462e-01 -5.13103377e-01 -4.30010489e-01 3.20886934e-01 7.02934218e-01 3.53072646e-01 9.39876884e-01 -8.09859909e-01 -1.23405239e-01 3.26504265e-01 -3.51986732e-01 -1.60253880e-01 -7.12262672e-01 9.97111976e-01 -6.46982202e-01 -9.50682588e-02 -3.12030914e-01 7.11761861e-01 3.62461166e-02 9.71917804e-01 8.76073317e-01] [-1.89679039e-01 1.98986627e-01 -9.98215741e-01 -6.57261760e-01 -4.64622033e-01 -4.74180925e-01 -2.68521931e-01 -6.31922218e-01 2.11839160e-01 -8.44403534e-02 8.95490189e-01 -6.38469404e-01 3.80828014e-01 6.26019173e-01 -4.19879589e-01 6.61958573e-01 4.16602061e-01 -4.08677251e-01 1.87949276e-02 -9.58161915e-02 -1.93778285e-01 1.95437274e-01] [-8.83603187e-01 -3.55576437e-01 -5.31687822e-01 -3.95537154e-02 3.81202778e-01 -8.50112434e-01 -4.92879045e-01 -8.68150583e-01 -6.57611008e-01 -7.53953499e-01 -9.32352843e-01 -8.65662557e-01 3.53468760e-01 -5.28023743e-01 -4.22310901e-02 1.53515447e-02 -9.60507961e-01 -4.90325543e-01 9.95604864e-01 8.24250279e-01 -2.99558957e-01 1.12710013e-01] [ 2.75474280e+04 2.67759275e+02 1.74172124e+04 4.12623724e+04 8.60308533e+03 1.71182150e+04 2.02955228e+04 1.50090666e+04 8.77332709e+03 1.75365660e+04 -3.50670739e+03 1.80650870e+04 1.02400371e+04 1.26698227e+04 3.15844569e+04 -5.57781861e+04 1.35861694e+04 1.90437049e+04 1.29695392e+04 2.00646516e+04 6.50845561e+03 1.24540017e+04] [ 1.63643024e+04 -7.98937931e+04 -2.56483092e+04 1.12393329e+05 -4.62329910e+04 -2.16464835e+04 -1.76475192e+04 -1.75085591e+04 -5.12558637e+04 1.10669619e+04 -6.87018310e+04 1.06745262e+04 -4.51982233e+04 -3.13607808e+04 2.97221717e+04 -1.60537174e+05 -1.26913945e+03 -2.00517078e+04 -2.54882521e+04 -1.96207109e+04 -5.40515684e+04 -3.10216585e+04] [-2.94902211e+05 -3.68846693e+05 -4.57058237e+05 2.14221137e+05 -4.84373455e+05 -3.00425878e+05 -3.36637055e+05 -2.25440423e+05 -5.29387485e+05 2.69352706e+04 -2.81272542e+05 5.32348261e+03 -3.62455152e+05 -3.26507885e+05 -2.16054395e+05 -6.22163654e+05 -2.39784856e+04 -3.00280893e+05 -2.19008083e+05 -3.03163983e+05 -3.41332162e+05 -2.71924440e+05] [-5.90649776e+05 -8.25345809e+05 -1.01369018e+06 6.29681797e+05 -1.07562271e+06 -6.12931225e+05 -7.43933444e+05 -4.03291919e+05 -1.18764514e+06 8.60789136e+04 -6.16074876e+05 6.20291513e+04 -8.42155242e+05 -7.40888414e+05 -4.09228866e+05 -1.42863910e+06 3.58868322e+03 -6.56386949e+05 -3.99425158e+05 -6.51476824e+05 -7.74679383e+05 -5.83340032e+05] [-1.33000229e+06 -2.06932867e+06 -2.07086883e+06 3.59530537e+05 -2.22282328e+06 -1.32773061e+06 -1.87498248e+06 -1.02351752e+06 -2.33434059e+06 -5.82675560e+05 -1.84753071e+06 -5.84089342e+05 -2.12423482e+06 -1.94724185e+06 -9.02239308e+05 -1.56963693e+06 -6.24476920e+05 -1.75445716e+06 -1.02028422e+06 -1.55398922e+06 -2.05948508e+06 -1.71899130e+06] [-2.51706531e+06 -3.61859304e+06 -3.74091078e+06 1.60460993e+05 -4.00802000e+06 -2.45555868e+06 -3.52906890e+06 -1.87804748e+06 -4.17596886e+06 -1.15951494e+06 -3.28743311e+06 -1.20865035e+06 -3.78414340e+06 -3.52781324e+06 -1.70419609e+06 -1.88815431e+06 -1.26112990e+06 -3.27008797e+06 -1.81875785e+06 -2.94055222e+06 -3.71072058e+06 -3.13919607e+06] [-4.48969346e+06 -6.43187086e+06 -5.94425275e+06 -1.11621654e+06 -6.42649119e+06 -4.32653306e+06 -5.97796513e+06 -3.45826473e+06 -6.58875906e+06 -3.15186091e+06 -6.20815779e+06 -3.16212181e+06 -6.55930469e+06 -6.17293063e+06 -2.98913692e+06 -1.19466493e+06 -3.12684830e+06 -5.87633305e+06 -3.41515313e+06 -5.18556756e+06 -6.60504515e+06 -5.81428556e+06] [-5.14145952e+06 -9.52749926e+06 -6.07200583e+06 -4.11727713e+06 -6.84195479e+06 -4.96533304e+06 -7.52263086e+06 -4.52997501e+06 -6.59424218e+06 -7.06616958e+06 -1.05283413e+07 -6.77841131e+06 -8.98082618e+06 -8.54850484e+06 -2.81217591e+06 4.04025605e+06 -6.48265542e+06 -8.23179352e+06 -4.68015158e+06 -6.53064015e+06 -9.68312611e+06 -8.89590195e+06] [-4.60242381e+06 -1.02260245e+07 -5.17058315e+06 -5.29672048e+06 -6.00767527e+06 -4.44448823e+06 -7.20024603e+06 -4.35084931e+06 -5.53501415e+06 -8.74432739e+06 -1.22630260e+07 -8.29268461e+06 -9.13929940e+06 -8.72943039e+06 -1.79025400e+06 7.01293273e+06 -7.88615288e+06 -8.44833032e+06 -4.60508047e+06 -6.35396686e+06 -1.01873339e+07 -9.58868453e+06] [-4.30151355e+06 -9.96817428e+06 -5.04060493e+06 -5.43656937e+06 -5.37444644e+06 -4.24770177e+06 -7.05264227e+06 -4.06355519e+06 -5.11128017e+06 -8.63098154e+06 -1.21635505e+07 -8.23725161e+06 -9.03495844e+06 -8.51807584e+06 -1.54226083e+06 7.46072084e+06 -7.68401546e+06 -8.22460939e+06 -4.31685575e+06 -6.10287060e+06 -9.92854417e+06 -9.39616898e+06] [-4.40132754e+06 -9.30869496e+06 -5.75854567e+06 -4.47779451e+06 -5.55251383e+06 -4.31660739e+06 -7.10358234e+06 -3.98733441e+06 -5.69028693e+06 -6.90385157e+06 -1.07845654e+07 -6.70416353e+06 -8.70753906e+06 -8.23671462e+06 -2.09089150e+06 5.11628201e+06 -6.18947219e+06 -7.67966972e+06 -4.11879675e+06 -5.80054974e+06 -9.35996981e+06 -8.77354521e+06] [-3.71698209e+06 -8.72066119e+06 -5.08811403e+06 -3.40077884e+06 -5.01509124e+06 -3.52998781e+06 -6.32564529e+06 -3.31105401e+06 -5.13605124e+06 -6.08445386e+06 -1.00213035e+07 -5.86271706e+06 -8.03306738e+06 -7.51563697e+06 -1.44398109e+06 4.56265993e+06 -5.35775756e+06 -6.87298288e+06 -3.49012880e+06 -4.96527960e+06 -8.64820569e+06 -8.03094564e+06] [-1.36860201e+06 -5.80106292e+06 -2.10882301e+06 -2.32837135e+06 -2.16022149e+06 -1.29674411e+06 -3.45165780e+06 -1.41807439e+06 -2.14803412e+06 -4.45318817e+06 -7.20674520e+06 -4.17609639e+06 -5.19728556e+06 -4.78639681e+06 3.45498451e+05 5.04924407e+06 -3.70047265e+06 -4.16461112e+06 -1.53526490e+06 -2.46408758e+06 -5.77668433e+06 -5.37961242e+06] [-2.25190158e+05 -3.27166457e+06 -1.36877296e+05 -3.14985558e+06 -2.31417770e+05 -1.72714906e+05 -1.40296169e+06 -6.31381171e+05 -4.01935362e+04 -3.95194365e+06 -4.91450254e+06 -3.64923506e+06 -2.76943510e+06 -2.63076708e+06 8.61263342e+05 5.61574774e+06 -3.26989458e+06 -2.29434298e+06 -6.83354057e+05 -8.58713066e+05 -3.32984321e+06 -3.32000620e+06] [-3.67583885e+05 -2.52493367e+06 -4.97386674e+03 -3.39305250e+06 1.62019254e+05 -3.32666995e+05 -1.02435564e+06 -6.61789458e+05 2.57150734e+05 -3.65126803e+06 -4.05618207e+06 -3.39015753e+06 -2.20124325e+06 -2.08305069e+06 4.41935054e+05 5.46175615e+06 -2.92671009e+06 -1.85581482e+06 -6.14969336e+05 -6.25469758e+05 -2.56987503e+06 -2.65064777e+06] [-5.50281028e+05 -2.03658175e+06 -1.32816339e+05 -3.02406232e+06 1.44904890e+05 -4.99728664e+05 -8.88305257e+05 -6.96544923e+05 1.82489012e+05 -3.04874707e+06 -3.17681345e+06 -2.87707827e+06 -1.86601679e+06 -1.75835931e+06 6.46917782e+04 4.50166530e+06 -2.40441477e+06 -1.54107043e+06 -6.43199688e+05 -6.11010828e+05 -2.07568895e+06 -2.15934101e+06] [ 2.30252255e+05 -8.44308649e+05 6.80194888e+05 -1.93839493e+06 8.81362626e+05 1.72604254e+05 5.90752664e+04 -1.64718862e+04 9.57150435e+05 -1.99891710e+06 -1.76212502e+06 -1.87430715e+06 -6.21345821e+05 -6.04214318e+05 5.46905559e+05 3.90187839e+06 -1.53176901e+06 -5.09394232e+05 -4.25721257e+04 1.44103969e+05 -8.19968633e+05 -9.48172177e+05] [ 2.21741997e+05 -4.31989959e+05 5.15433962e+05 -1.22170494e+06 7.34449160e+05 1.96223360e+05 1.32023236e+05 7.82602147e+04 7.32740191e+05 -1.24780089e+06 -1.11064079e+06 -1.18059064e+06 -3.23257646e+05 -2.91218749e+05 4.24213254e+05 2.69237385e+06 -9.46433260e+05 -2.60346789e+05 7.97150042e+04 1.90510384e+05 -4.08264825e+05 -5.38848131e+05] [ 6.43048946e+04 -2.20366994e+05 2.26027359e+05 -4.87478438e+05 3.45743012e+05 1.21088884e+05 6.55058698e+04 1.12337212e+05 3.45779478e+05 -5.64916626e+05 -6.36856559e+05 -5.25814406e+05 -1.83699771e+05 -1.26738584e+05 3.10829985e+05 1.24266976e+06 -4.08922723e+05 -1.41581019e+05 1.50395965e+05 9.09685096e+04 -2.02988837e+05 -2.88314305e+05] [ 6.39544705e+04 3.73851636e+03 1.09543111e+05 -6.31152894e+04 1.82027974e+05 9.82585587e+04 7.83240469e+04 1.27891117e+05 1.52382389e+05 -6.26903842e+04 -1.22847754e+05 -5.73341577e+04 -2.97326548e+03 2.78115791e+04 1.63070944e+05 3.33462263e+05 7.72145278e+02 2.83235740e+04 1.59856349e+05 1.13144940e+05 1.64784719e+04 -1.10296937e+04] [ 8.50677714e+02 1.59527081e+04 6.37241827e+03 -1.39232484e+04 3.61735432e+04 1.47332192e+04 1.81114096e+04 5.03340533e+04 1.52051774e+04 8.10152036e+03 -2.02519087e+04 8.28465758e+03 -1.10533876e+04 4.01061197e+03 3.44976575e+04 6.94029138e+04 3.74960157e+04 4.74430986e+03 7.03349406e+04 3.80587015e+04 1.59034814e+04 1.09225772e+04] [ 2.39053987e+03 4.09036088e+04 1.04031940e+04 3.54630465e+04 1.62006820e+04 2.57198081e+04 4.01599806e+04 3.55295228e+04 8.72305981e+03 5.44229345e+04 3.64126814e+04 5.42438669e+04 3.35132296e+04 3.89467969e+04 2.26833410e+04 -3.39105514e+04 4.97777666e+04 3.57246616e+04 4.27830262e+04 2.52294552e+04 4.49404260e+04 4.39632856e+04] [ 1.63635956e+04 1.19956884e+04 2.36504489e+04 1.33822837e+04 1.87920715e+04 1.61015334e+04 2.30299839e+04 1.42523650e+04 2.19868502e+04 1.99626230e+03 3.59741917e+03 3.07612678e+03 1.73050247e+04 1.97123787e+04 2.04593569e+04 1.03234000e+04 1.21585277e+03 1.52998403e+04 1.18500122e+04 1.24760377e+04 1.47094151e+04 1.40460127e+04] [-9.80584554e-02 -2.48788869e-01 -5.52460396e-01 8.75806299e-01 4.87511176e-01 -5.17552938e-01 -9.93370772e-01 -1.15738239e-01 2.13478809e-02 5.32730604e-01 -2.95744111e-01 -6.32942587e-01 3.06432296e-01 -2.21843761e-02 5.93959823e-01 5.90027800e-01 8.10282461e-01 -9.33438691e-01 5.18443693e-01 8.02383330e-01 -4.38758650e-01 5.41554876e-03] [-9.39270729e-01 -2.94383852e-01 -1.13814852e-02 6.81127216e-01 9.82310495e-01 7.24597681e-01 1.45316016e-02 -8.97195388e-01 1.80445739e-01 -1.07002489e-01 -3.85783100e-01 -8.10864019e-01 -8.48055013e-01 1.62480641e-01 2.59877833e-01 -3.00115072e-01 6.83705229e-01 -7.98641513e-01 9.13721960e-01 -4.74811192e-01 -1.81856446e-01 1.20333570e-01] [-9.82430020e-01 -6.93330868e-01 -1.27523828e-03 -2.54431034e-01 1.68073327e-01 8.22701754e-01 6.62088579e-01 3.70522053e-01 -6.77604329e-01 -2.20683112e-01 -2.40443778e-02 1.50892006e-01 5.26178594e-01 -3.12669117e-01 1.28108577e-01 3.60268892e-01 2.69218969e-01 9.15793493e-02 9.43509123e-01 4.87359040e-01 2.58741467e-01 5.98237301e-01] [-8.14802350e-01 -5.81108512e-01 -6.30444837e-01 -3.41377841e-01 -7.11728201e-02 -7.83588386e-01 -2.92236613e-01 -5.51020664e-01 5.95921760e-01 -8.21316853e-01 -9.32036088e-02 3.17073124e-01 7.84149517e-01 3.61068740e-02 1.79717016e-01 7.33017935e-01 1.51625428e-01 -6.72875001e-01 3.15978153e-01 5.27197581e-01 3.33050551e-01 2.44235744e-01] [-6.95547366e-01 -1.95516803e-02 -5.19384798e-02 -4.80843824e-01 1.34575236e-01 -7.07153070e-01 7.75251008e-02 1.56447027e-01 6.73797941e-01 6.30877159e-01 -8.65706240e-01 3.68139307e-01 -2.86563875e-01 -7.87393624e-01 -7.48258291e-01 -9.50258848e-01 -9.10755479e-01 9.14610542e-01 2.77029417e-01 2.31245033e-02 9.69384889e-01 3.23894790e-01] [ 9.43338055e+03 -7.16175333e+02 1.11647227e+04 -9.26178975e+03 1.34649263e+04 7.14295946e+03 4.25060252e+03 5.16567947e+03 1.45298318e+04 -1.01527754e+04 -3.49234386e+03 -8.79189658e+03 -2.79705509e+02 1.00367475e+03 8.66865072e+03 3.98277608e+04 -6.41453895e+03 2.05897638e+03 3.25293342e+03 5.81385600e+03 -1.12178654e+03 -7.57382883e+02] [ 3.94331265e+04 -3.26740503e+04 3.97373369e+04 -6.84361474e+04 5.41621921e+04 2.85024719e+04 -5.78751397e+03 6.30707881e+03 6.10584804e+04 -7.53030459e+04 -4.98305385e+04 -6.78681038e+04 -2.58145400e+04 -2.09699115e+04 3.25378190e+04 2.34758920e+05 -5.43105160e+04 -1.49411601e+04 -8.93255523e+03 1.77670105e+04 -3.44742991e+04 -3.14514416e+04] [-1.25290267e+05 -2.49457839e+05 -1.90478135e+05 -2.23939193e+04 -1.72041073e+05 -1.15107288e+05 -1.98712393e+05 -1.07265699e+05 -1.83774675e+05 -1.36757464e+05 -2.40268998e+05 -1.31808542e+05 -2.52698523e+05 -2.20126628e+05 -7.62598869e+04 1.61222008e+05 -1.14101700e+05 -2.05625836e+05 -1.27943220e+05 -1.43669211e+05 -2.44450315e+05 -2.04992021e+05] [-6.94604949e+04 -3.06988799e+05 -1.71664514e+05 1.18921268e+05 -1.56978475e+05 -8.10148039e+04 -2.05351968e+05 -7.20440151e+04 -1.77459937e+05 -1.22290279e+05 -2.81978354e+05 -1.08317872e+05 -3.05104513e+05 -2.46407267e+05 -4.01040759e+03 2.35915399e+05 -8.06787443e+04 -2.20812158e+05 -9.80732487e+04 -1.46726505e+05 -3.01359865e+05 -2.23643588e+05] [ 1.89563206e+05 -4.82681248e+05 1.34233917e+05 6.72785423e+04 1.36100465e+05 1.89673572e+05 -1.80814419e+05 9.77163877e+04 1.57147307e+05 -3.49661801e+05 -5.65265034e+05 -2.94105744e+05 -4.10409483e+05 -3.14930462e+05 3.52195145e+05 1.33791413e+06 -1.87177592e+05 -2.54261289e+05 5.57216503e+04 -2.34083713e+04 -4.83897613e+05 -3.72132568e+05] [-9.11479262e+04 -1.66244385e+06 -1.28582880e+05 -5.22468374e+05 -9.64909126e+04 -6.99009782e+04 -9.70202830e+05 -2.10889571e+05 -7.39487163e+04 -1.29437970e+06 -1.88974867e+06 -1.18227337e+06 -1.54541453e+06 -1.32901874e+06 3.57284954e+05 3.12308078e+06 -9.01124552e+05 -1.13380036e+06 -2.40566790e+05 -5.58322888e+05 -1.73284279e+06 -1.49156470e+06] [-3.99129715e+05 -3.40785814e+06 -1.54772074e+05 -2.09255660e+06 -9.91483044e+04 -3.68519011e+05 -1.92941696e+06 -6.50241639e+05 2.05437612e+03 -3.28139559e+06 -4.15670564e+06 -3.03940979e+06 -3.13347049e+06 -2.74571790e+06 5.00809535e+05 6.39379910e+06 -2.45938823e+06 -2.44193265e+06 -6.93511221e+05 -1.23441769e+06 -3.56013376e+06 -3.24358796e+06] [-2.73262112e+05 -4.73916878e+06 5.48407415e+05 -3.96306469e+06 5.99330857e+05 -8.49360347e+04 -2.20607900e+06 -8.73154961e+05 9.49091738e+05 -5.48970083e+06 -6.42171306e+06 -5.03832040e+06 -4.03881762e+06 -3.62211044e+06 1.13137345e+06 1.05381464e+07 -4.21520688e+06 -3.29037678e+06 -9.71282078e+05 -1.34347698e+06 -4.90682484e+06 -4.69587993e+06] [-5.55654271e+04 -5.11979713e+06 1.16873525e+06 -5.45413148e+06 1.48434296e+06 2.17547157e+05 -2.10418862e+06 -7.93846966e+05 1.92833656e+06 -6.68875384e+06 -7.35123506e+06 -6.15056497e+06 -4.22465988e+06 -3.83188369e+06 1.54381887e+06 1.32320309e+07 -5.10291152e+06 -3.47691090e+06 -9.03259329e+05 -1.12927127e+06 -5.28685381e+06 -5.20541722e+06] [ 5.13781780e+04 -5.08584037e+06 1.23424836e+06 -6.10089643e+06 1.88181760e+06 1.99479180e+05 -2.08835124e+06 -8.41195119e+05 2.24004413e+06 -7.07437660e+06 -7.41365921e+06 -6.55798670e+06 -4.27419711e+06 -3.85894854e+06 1.49341454e+06 1.39819448e+07 -5.39380892e+06 -3.52940523e+06 -1.04193704e+06 -1.00095770e+06 -5.29907669e+06 -5.29543057e+06] [ 4.72433691e+05 -4.44455554e+06 1.53972140e+06 -6.17993518e+06 2.35214731e+06 5.64842234e+05 -1.63533012e+06 -7.03388758e+05 2.68437236e+06 -6.76303929e+06 -6.74754062e+06 -6.24492383e+06 -3.73396976e+06 -3.39322873e+06 1.60977141e+06 1.40545268e+07 -5.12068198e+06 -3.02874911e+06 -9.72771882e+05 -4.84466401e+05 -4.73224306e+06 -4.82626118e+06] [ 1.07664998e+06 -3.24507506e+06 2.10132157e+06 -5.20711294e+06 2.69555648e+06 1.14188535e+06 -7.31670430e+05 -2.18462863e+05 3.05015340e+06 -5.55016944e+06 -5.28569932e+06 -5.06849752e+06 -2.61425510e+06 -2.36569429e+06 1.89307175e+06 1.23965237e+07 -4.10294635e+06 -1.96633185e+06 -4.76180051e+05 2.47537267e+05 -3.53005207e+06 -3.65838767e+06] [ 8.89336829e+05 -2.66056588e+06 1.75542095e+06 -4.05937207e+06 2.07485122e+06 1.02656077e+06 -5.63600116e+05 -5.13744781e+04 2.39775925e+06 -4.40798660e+06 -4.28134111e+06 -3.99332265e+06 -2.15741803e+06 -1.96386848e+06 1.66665461e+06 9.69133218e+06 -3.21612453e+06 -1.56467736e+06 -2.19217209e+05 2.30312245e+05 -2.89085998e+06 -2.96723698e+06] [ 4.52595625e+05 -2.03133397e+06 1.29671337e+06 -4.05480602e+06 1.69547385e+06 6.99974842e+05 -4.51980250e+05 -2.22481354e+05 1.93038111e+06 -3.91758671e+06 -3.50234838e+06 -3.58638106e+06 -1.64715323e+06 -1.54590016e+06 1.06558284e+06 8.23114913e+06 -2.94843355e+06 -1.32492919e+06 -3.37854436e+05 1.79540454e+05 -2.18626986e+06 -2.37975012e+06] [-1.87297016e+05 -2.00258724e+06 6.76445648e+05 -4.34758493e+06 1.11691249e+06 7.27051841e+04 -6.96339927e+05 -6.75102286e+05 1.32229846e+06 -3.89673116e+06 -3.37401646e+06 -3.61710583e+06 -1.70506974e+06 -1.66351752e+06 3.25291809e+05 7.33501443e+06 -3.05079842e+06 -1.52077435e+06 -7.39649403e+05 -2.43911090e+05 -2.13988905e+06 -2.38298128e+06] [-4.67274312e+05 -1.77462653e+06 1.56163745e+05 -3.48574962e+06 5.41748633e+05 -2.92384296e+05 -8.22213675e+05 -7.19099963e+05 6.84412399e+05 -3.12038566e+06 -2.77610920e+06 -2.93139934e+06 -1.57600092e+06 -1.56005237e+06 -7.01874478e+04 5.49852764e+06 -2.46318120e+06 -1.43250534e+06 -7.80441511e+05 -4.46091154e+05 -1.86598144e+06 -2.03294981e+06] [-1.21228902e+05 -1.13560959e+06 2.90125969e+05 -2.31253091e+06 5.28891818e+05 -7.35172207e+04 -4.39171690e+05 -3.56682411e+05 6.25702996e+05 -2.12534803e+06 -1.84784894e+06 -2.00302698e+06 -9.69559302e+05 -9.65209036e+05 9.19236902e+04 3.87896489e+06 -1.66899933e+06 -8.81038749e+05 -4.24941946e+05 -1.48580318e+05 -1.17400196e+06 -1.29085330e+06] [-3.56720530e+05 -9.15839474e+05 -1.68623282e+05 -1.58734183e+06 3.54203629e+04 -2.97155353e+05 -5.77127606e+05 -4.42051109e+05 5.79127885e+04 -1.42929047e+06 -1.33846765e+06 -1.37219466e+06 -8.61752213e+05 -8.48172291e+05 -2.14333002e+05 2.33478007e+06 -1.16895165e+06 -8.11288963e+05 -4.79063754e+05 -3.45119275e+05 -9.47639325e+05 -1.02418208e+06] [-3.17705007e+05 -5.75681610e+05 -2.20041756e+05 -9.86418392e+05 -3.38061465e+04 -2.50864544e+05 -4.31751870e+05 -3.34940165e+05 -4.35099030e+04 -8.29716713e+05 -7.99002531e+05 -8.06030279e+05 -5.71743685e+05 -5.56901899e+05 -2.31069600e+05 1.33661259e+06 -6.88704957e+05 -5.42619396e+05 -3.52435283e+05 -2.80433318e+05 -6.01660156e+05 -6.61823851e+05] [-8.26669058e+04 -1.46664194e+05 -4.09764597e+04 -3.31754644e+05 4.50909114e+04 -5.31975213e+04 -1.10152759e+05 -8.09266086e+04 3.52724438e+04 -2.48201469e+05 -2.30398979e+05 -2.43157482e+05 -1.57950428e+05 -1.51880806e+05 -5.79118751e+04 4.84163663e+05 -1.92972415e+05 -1.44560192e+05 -7.98759887e+04 -4.99496833e+04 -1.59902648e+05 -1.89776210e+05] [-4.42809189e+04 -7.17498555e+04 -2.82761522e+04 -1.14611417e+05 -8.10294700e+03 -4.28445504e+04 -6.13601593e+04 -3.81699570e+04 -6.92510057e+03 -1.00527547e+05 -9.01497874e+04 -9.77884591e+04 -7.62727587e+04 -7.26680384e+04 -3.69223255e+04 1.91305885e+05 -7.48749691e+04 -7.28779813e+04 -4.23442081e+04 -4.15677204e+04 -7.56537937e+04 -7.70861913e+04] [-1.99096592e+04 -2.16333798e+04 -1.72933492e+04 -2.72730075e+04 -1.32357267e+04 -1.70249732e+04 -2.15187911e+04 -1.60542235e+04 -1.30972599e+04 -2.28002763e+04 -2.11235026e+04 -2.23231145e+04 -2.30035815e+04 -2.27333027e+04 -1.80289247e+04 2.83167387e+04 -1.86833565e+04 -2.32614382e+04 -1.66548959e+04 -1.76374019e+04 -2.23598912e+04 -2.24468731e+04] [-1.22129699e-01 -9.92827890e-01 6.55892736e-01 2.08326287e-01 8.43003551e-01 5.39990731e-01 -6.73717475e-01 6.02090332e-01 -3.17121804e-01 1.57920595e-01 6.76603011e-01 9.31269860e-01 4.79638463e-01 -6.67663629e-01 2.93478656e-01 2.51113145e-01 8.16376760e-01 5.00744160e-01 -7.17478333e-01 2.68644247e-01 -7.70944368e-01 -9.47974347e-02] [ 2.04748624e-01 -9.09724503e-01 5.73247476e-01 -8.08881842e-01 5.72312504e-02 -4.36344262e-01 4.42947039e-01 3.27271949e-01 -6.39028108e-01 1.50507973e-01 -4.75152266e-01 -6.92265453e-01 8.65475042e-01 4.05392514e-01 -8.66029518e-01 6.53541490e-01 -9.20814918e-01 -1.83162650e-01 8.32407587e-01 -5.55571836e-01 3.68606327e-01 -1.35694014e-01] [-6.93230455e-02 -1.74044049e-01 8.53889750e-01 -1.33993817e-01 -8.82181588e-01 7.75106798e-01 5.46758395e-01 -4.54159544e-01 6.88626923e-01 1.97738434e-01 -9.20653185e-01 2.65844567e-01 -7.72708895e-01 7.53507816e-01 9.76525793e-01 8.40299862e-01 5.48556620e-01 5.18729718e-01 6.12916535e-01 -9.50163128e-01 -3.59524770e-01 9.34212782e-01] [-3.49651989e-01 3.72925581e-01 6.69660551e-01 2.06035275e-01 2.47647500e-01 -5.76081697e-01 5.88030905e-01 3.88980247e-01 4.08506472e-01 1.34953206e-01 4.44306431e-02 -3.37170674e-01 6.04268775e-01 -6.93927732e-01 -2.04720940e-01 -7.55718300e-01 -1.32972565e-01 8.71983785e-01 -2.45525961e-01 9.23864974e-01 -2.59170924e-01 9.03801079e-01] [ 6.04923465e-01 9.84274297e-01 -5.10202590e-01 -3.70327801e-01 -6.97834080e-01 4.77610055e-01 7.93959525e-01 4.37984861e-01 2.08802383e-01 4.23846188e-01 -3.64593465e-01 9.17852700e-01 6.91441040e-02 -9.31828389e-01 4.51626804e-01 8.84480533e-01 -8.96307188e-01 -5.93242556e-01 -9.06510528e-01 8.13265972e-01 5.66853586e-01 5.08971410e-01] [-3.52606778e-02 2.95917798e-01 8.76238691e-01 -7.72107673e-02 -6.10819178e-01 -8.98676038e-01 -6.62874921e-01 -2.96085732e-01 -6.65165220e-01 9.66562190e-01 1.93983146e-01 6.55555173e-01 6.61530607e-01 7.92565447e-01 -9.09185160e-01 3.17981356e-01 4.42387895e-01 9.15600872e-01 6.84593362e-02 2.67039133e-01 6.18390755e-01 -7.75829702e-01] [-9.43802074e+03 -9.99402114e+03 -9.93595887e+03 -7.82138490e+03 -9.96714636e+03 -7.79890749e+03 -1.00635906e+04 -7.71801575e+03 -9.93907861e+03 -7.90076316e+03 -1.01467698e+04 -7.80307704e+03 -1.01335139e+04 -1.01372627e+04 -7.73192816e+03 7.12006837e+02 -7.73067329e+03 -1.00984737e+04 -7.69458424e+03 -8.80884292e+03 -1.01154439e+04 -9.93883179e+03] [-7.84906792e+04 -8.36668578e+04 -8.27697564e+04 -6.53316093e+04 -8.24535189e+04 -6.43111970e+04 -8.44021183e+04 -6.41495884e+04 -8.21477776e+04 -6.62884861e+04 -8.51584071e+04 -6.54280710e+04 -8.46449780e+04 -8.46697115e+04 -6.38500577e+04 1.03327692e+04 -6.42093330e+04 -8.47412666e+04 -6.38751209e+04 -7.36159664e+04 -8.45864614e+04 -8.31090004e+04] [-5.59348424e+04 -7.01327653e+04 -6.62961743e+04 -3.24908477e+04 -6.14557829e+04 -4.15432615e+04 -6.87020545e+04 -4.37634961e+04 -6.18401070e+04 -4.62745991e+04 -6.79341860e+04 -4.55902930e+04 -6.92149334e+04 -6.74136513e+04 -3.96474423e+04 5.20694544e+04 -4.11663852e+04 -6.92020399e+04 -4.38098112e+04 -5.65323956e+04 -7.04186910e+04 -6.57219706e+04] [ 5.20097053e+04 -2.70265569e+04 4.43744184e+04 5.43838351e+04 5.05902128e+04 6.06656975e+04 2.48745335e+03 3.58398277e+04 5.30526941e+04 -5.30903003e+03 -3.27219372e+04 1.13218249e+03 -8.33795664e+03 -1.48828540e+03 7.64623786e+04 1.81775375e+05 1.59560689e+04 -1.28385738e+03 3.54459339e+04 2.31784891e+04 -2.57595535e+04 -1.12851889e+04] [ 3.30048079e+05 5.33859324e+04 3.45890887e+05 2.55372188e+05 3.37846291e+05 3.27368947e+05 1.71206809e+05 2.75690018e+05 3.53913370e+05 8.90679250e+04 1.04967027e+04 1.07939563e+05 1.03693766e+05 1.30460008e+05 4.01013486e+05 6.22760777e+05 1.62044429e+05 1.52990821e+05 2.80104409e+05 2.26660059e+05 4.62988042e+04 8.83844326e+04] [ 3.48074302e+05 -3.72763425e+05 4.11448790e+05 2.93673678e+04 3.94607724e+05 3.67823716e+05 -6.34236331e+04 2.18615871e+05 4.39833145e+05 -2.84697889e+05 -4.66163710e+05 -2.32463862e+05 -2.67028734e+05 -2.08671317e+05 5.14299780e+05 1.54617651e+06 -1.12809042e+05 -1.16017333e+05 2.21663186e+05 1.09395220e+05 -4.20750137e+05 -3.25231633e+05] [ 3.73242606e+05 -9.97119333e+05 5.44030320e+05 -6.22500332e+05 5.43353570e+05 3.95735833e+05 -3.95512057e+05 7.16484846e+04 6.34333633e+05 -1.03648326e+06 -1.27480130e+06 -9.21858724e+05 -8.06521064e+05 -7.22009773e+05 6.35642502e+05 3.02309238e+06 -6.80367573e+05 -5.46778365e+05 5.94895651e+04 -3.49156792e+04 -1.10270150e+06 -9.71692258e+05] [ 1.00547390e+06 -9.79292878e+05 1.46958523e+06 -7.94334841e+05 1.39851372e+06 9.68169675e+05 6.88422292e+04 4.72181892e+05 1.61798706e+06 -1.39840269e+06 -1.57168337e+06 -1.19598106e+06 -6.50766426e+05 -5.29862442e+05 1.38886736e+06 4.75081689e+06 -8.51779901e+05 -3.26615704e+05 4.55593385e+05 3.72369303e+05 -1.12215023e+06 -1.01002031e+06] [ 7.22499160e+05 -1.30018336e+06 1.22654002e+06 -1.55244380e+06 1.43458587e+06 7.48536387e+05 -2.42830838e+05 2.44462359e+05 1.60653779e+06 -1.87581494e+06 -1.97658695e+06 -1.67525939e+06 -1.00993050e+06 -8.53603726e+05 1.15789470e+06 5.67918486e+06 -1.24107059e+06 -6.37413322e+05 2.44768937e+05 1.42493902e+05 -1.46993096e+06 -1.38635524e+06] [ 4.54070458e+05 -1.46045910e+06 9.30198709e+05 -2.11587240e+06 1.31614336e+06 4.93038663e+05 -4.41631505e+05 -5.88399691e+04 1.43634456e+06 -2.22538380e+06 -2.17911228e+06 -2.03854712e+06 -1.22153397e+06 -1.06063589e+06 8.13510913e+05 5.97935694e+06 -1.59005071e+06 -8.70368923e+05 -9.33690167e+04 -1.89097464e+04 -1.64136195e+06 -1.62615762e+06] [ 5.08317037e+05 -1.41603265e+06 1.00006361e+06 -2.06008146e+06 1.29196709e+06 5.05764489e+05 -3.67140071e+05 -7.42065334e+04 1.45374678e+06 -2.23511351e+06 -2.13157861e+06 -2.03824485e+06 -1.19343914e+06 -1.03128868e+06 8.39202750e+05 5.80678756e+06 -1.60692611e+06 -8.47930603e+05 -1.40808552e+05 -3.11701878e+03 -1.61297411e+06 -1.60769967e+06] [ 6.70092211e+05 -1.10169204e+06 1.13892546e+06 -1.82413620e+06 1.31491729e+06 7.29101738e+05 -1.12083276e+05 6.43019976e+04 1.51797603e+06 -1.93277842e+06 -1.72545386e+06 -1.73138968e+06 -8.27054189e+05 -7.40998187e+05 9.44286855e+05 5.29383355e+06 -1.36697127e+06 -5.57043086e+05 -3.40781486e+04 2.47080583e+05 -1.26320745e+06 -1.27642074e+06] [ 6.19062865e+05 -7.41326602e+05 9.18306291e+05 -1.26844710e+06 1.04396800e+06 6.42776918e+05 -5.64913136e+04 1.32225346e+05 1.18676691e+06 -1.33902184e+06 -1.16795328e+06 -1.19531458e+06 -5.23198862e+05 -4.81897670e+05 7.70360691e+05 3.86921008e+06 -9.19182864e+05 -3.37121265e+05 5.46361091e+04 2.86867219e+05 -8.61683050e+05 -8.51891564e+05] [ 1.53109808e+05 -6.91896111e+05 4.49292325e+05 -1.61120871e+06 6.60311475e+05 2.60389791e+05 -2.45083004e+05 -1.95612056e+05 7.57047258e+05 -1.37126537e+06 -1.08509618e+06 -1.26281261e+06 -5.72681830e+05 -5.69346199e+05 2.36388294e+05 3.37186711e+06 -1.04162714e+06 -5.05533020e+05 -2.49694276e+05 2.86519899e+04 -8.15320472e+05 -8.73925424e+05] [-1.56557599e+05 -8.39046080e+05 1.97559917e+05 -1.72764247e+06 3.23387289e+05 -2.24251537e+04 -4.00209345e+05 -3.49034277e+05 4.46053677e+05 -1.47332129e+06 -1.24296247e+06 -1.37375265e+06 -7.44718117e+05 -7.41637580e+05 4.80316308e+03 2.96346190e+06 -1.14869373e+06 -6.79790299e+05 -3.69994941e+05 -2.09286158e+05 -9.47436146e+05 -1.00605806e+06] [-2.44929408e+05 -8.82417244e+05 6.72039058e+04 -1.49896263e+06 1.66468736e+05 -1.88085411e+05 -4.75439958e+05 -3.57623504e+05 2.64926629e+05 -1.35785702e+06 -1.23660768e+06 -1.27664707e+06 -8.33299697e+05 -8.01312700e+05 -6.74585642e+04 2.53246956e+06 -1.05187773e+06 -7.25282903e+05 -3.62226495e+05 -3.24800926e+05 -9.71669695e+05 -1.00003230e+06] [-1.58389041e+05 -6.09234536e+05 1.76308056e+04 -9.61751635e+05 1.00996421e+05 -1.52850142e+05 -3.53178532e+05 -2.30102526e+05 1.42703691e+05 -8.93782929e+05 -8.37250132e+05 -8.47447803e+05 -5.81556973e+05 -5.46801112e+05 -5.74870234e+04 1.66007162e+06 -6.94601773e+05 -4.96019292e+05 -2.32771378e+05 -2.34839326e+05 -6.56290028e+05 -6.61130845e+05] [-2.14042041e+05 -4.31936864e+05 -1.06729893e+05 -7.33546343e+05 -2.83170398e+04 -1.83315172e+05 -2.97848174e+05 -2.43448068e+05 -9.70364028e+03 -6.40913857e+05 -5.82252630e+05 -6.15801765e+05 -4.08621816e+05 -4.01068388e+05 -1.55552098e+05 1.01041651e+06 -5.34385493e+05 -3.90770015e+05 -2.49438405e+05 -2.23757250e+05 -4.51360962e+05 -4.75860200e+05] [-7.72628190e+04 -1.51163154e+05 -2.75926642e+04 -3.56411766e+05 3.04770136e+04 -5.17413531e+04 -9.94916020e+04 -9.61414310e+04 3.73623728e+04 -2.84171465e+05 -2.32041469e+05 -2.72012232e+05 -1.36471671e+05 -1.41913427e+05 -5.88880909e+04 5.23381416e+05 -2.38551186e+05 -1.44417925e+05 -1.04423044e+05 -5.73432798e+04 -1.56602961e+05 -1.82789546e+05] [-9.56457643e+03 -4.47548039e+04 -2.70180913e+03 -7.14041702e+04 6.90336259e+03 -1.17566736e+03 -2.65582561e+04 -4.90166693e+03 9.62288004e+03 -6.50647266e+04 -6.33244616e+04 -6.20490538e+04 -4.06482107e+04 -3.82696564e+04 3.13640243e+03 1.49080465e+05 -5.00001948e+04 -3.50785570e+04 -1.10823070e+04 -6.38401981e+03 -4.55662610e+04 -4.61207429e+04] [ 2.68311067e+03 -5.55098254e+03 2.64181972e+03 -7.11342475e+03 4.18633251e+03 4.74391660e+03 -2.06907008e+03 5.30317365e+03 4.81014276e+03 -6.85801392e+03 -8.16352242e+03 -6.29206278e+03 -4.70885443e+03 -3.84452019e+03 5.74931192e+03 3.33388981e+04 -3.62317392e+03 -2.71555460e+03 3.66703164e+03 2.77339608e+03 -5.67505716e+03 -4.78081072e+03] [-2.45217782e-01 -9.92762732e-02 -4.00419121e-01 4.53175308e-01 2.89065322e-01 3.04723500e-01 3.02826098e-01 9.49705992e-01 -1.82438983e-01 4.55746066e-01 6.79021029e-01 7.88643764e-01 5.85206524e-02 -7.92185225e-01 9.61341711e-01 4.24652891e-01 -8.59609031e-01 3.56644298e-01 4.56220490e-01 -1.97765422e-02 -2.30368410e-01 -1.00944337e-01] [ 1.68430645e-01 -4.35221766e-01 9.60817617e-01 6.71461821e-01 -2.25723705e-01 -2.34248047e-01 -1.07092827e-02 6.73080844e-01 8.27788037e-01 -3.28854698e-01 -5.76798063e-01 4.68469156e-01 -3.44371426e-01 5.69930709e-01 -2.11557096e-02 3.17011413e-01 -1.19899334e-01 -1.37631013e-01 3.95475861e-01 2.08595126e-01 -2.06224308e-01 -3.85005464e-01] [-6.89296053e-01 3.47119998e-01 1.11572269e-01 -8.32950788e-01 -9.10351962e-01 -9.87183895e-01 -6.26986331e-01 -7.87151589e-01 -3.18243509e-01 9.04690000e-01 -5.53091427e-01 -2.01593444e-02 3.09076738e-01 4.42211157e-01 -7.95917397e-01 -8.47824929e-01 -3.66341155e-02 -9.82310920e-01 3.24283981e-01 7.63118387e-01 1.83983344e-01 -6.82721365e-01] [ 9.37436219e-01 5.02488033e-01 -5.23104155e-01 8.97228509e-01 2.92298202e-01 -1.04851397e-01 -9.80234351e-01 1.39575188e-01 -8.39676431e-02 -4.57281278e-01 3.90584159e-01 8.59482798e-01 -6.26174923e-01 -2.88869063e-01 -6.00717080e-01 -5.72269431e-01 -3.49021356e-01 1.59324863e-01 6.42235690e-01 -9.65999123e-01 -5.61543254e-01 -1.57721714e-02] [-3.25940141e-01 2.83997474e-01 -6.72544923e-01 6.28241251e-01 2.24384730e-02 5.31556822e-01 -7.30470426e-01 -5.49468146e-01 8.90289458e-01 7.22279020e-01 -7.28983252e-01 2.36932536e-02 -9.46700638e-01 9.85930646e-01 9.97244421e-01 -1.61646307e-01 -4.89658502e-02 5.19540944e-01 -6.86045213e-01 -7.37154712e-01 -2.51273772e-01 -4.04855754e-01] [ 5.65352246e-01 -9.54120566e-01 4.19378270e-01 9.11165898e-01 5.80504452e-01 -9.16408651e-01 6.62528090e-02 -3.59118392e-01 -3.64863284e-02 -3.26993320e-01 -9.69006722e-02 -4.19949159e-01 -5.66775952e-01 -7.97385198e-01 9.45495833e-01 5.11025629e-01 3.77642164e-01 -7.35406191e-01 -6.09162838e-01 7.84113443e-01 1.98580246e-01 5.46072669e-01] [-7.80083049e-01 9.49331955e-01 -9.75176492e-01 6.47670506e-01 1.23948288e-01 1.47046120e-01 -4.40086390e-01 1.29226882e-01 3.63278769e-01 -1.49917881e-01 8.52580653e-01 5.58512106e-01 8.71820168e-01 -9.79969754e-02 8.78331262e-01 -5.35347883e-01 8.20877879e-01 5.70425486e-01 1.73566735e-01 -2.91739576e-01 8.04099897e-01 -7.15364586e-01] [ 1.39678418e-01 -8.95080705e-01 -8.89090697e-01 5.19721936e-01 5.11599231e-01 7.65731135e-01 -6.72551948e-01 -1.26665664e-01 -7.71157575e-02 3.09064166e-02 9.22944702e-01 -7.86986195e-01 -5.75166842e-01 -2.07536652e-01 -9.17373954e-01 -4.58431609e-01 4.67015464e-01 -8.33862174e-01 9.22292984e-01 5.80944453e-01 -9.14520621e-01 2.38487623e-01] [-2.17457270e-01 3.75694498e-01 -9.64639194e-01 4.38568318e-01 7.03800212e-01 6.66259318e-01 -7.26265413e-01 9.17717891e-01 -7.72819806e-01 -9.14470703e-02 9.35010880e-01 -5.89586636e-01 3.43112763e-01 -9.96042738e-01 -4.68547264e-01 6.57016602e-01 7.04826589e-01 -6.22042565e-01 -6.63448448e-01 6.84854877e-01 -8.34101085e-02 7.52266189e-01] [ 2.53940999e+04 2.65451241e+04 2.64633886e+04 2.17277199e+04 2.72064630e+04 2.25348169e+04 2.64334230e+04 2.12307622e+04 2.71765313e+04 2.15230325e+04 2.66840830e+04 2.13325312e+04 2.72014689e+04 2.72145760e+04 2.20013820e+04 8.78717908e+01 2.21244268e+04 2.63955573e+04 2.14310353e+04 2.33528329e+04 2.71210537e+04 2.65739715e+04] [ 5.97534287e+04 6.24654850e+04 6.22624760e+04 5.11824742e+04 6.40017867e+04 5.30523767e+04 6.22018272e+04 5.00021976e+04 6.39329540e+04 5.07007835e+04 6.28042807e+04 5.02467320e+04 6.40145502e+04 6.40354502e+04 5.17971255e+04 2.74298755e+02 5.20931424e+04 6.21278924e+04 5.04648250e+04 5.49880661e+04 6.38133933e+04 6.25329752e+04] [ 1.08949003e+05 1.06607736e+05 1.12238351e+05 8.90574273e+04 1.16902529e+05 9.49514970e+04 1.06946598e+05 8.87001852e+04 1.16385262e+05 8.74725694e+04 1.08860040e+05 8.70842574e+04 1.09431208e+05 1.09638006e+05 9.25246856e+04 2.08442351e+04 9.05234749e+04 1.08104935e+05 8.91783206e+04 9.76420521e+04 1.07580688e+05 1.07305183e+05] [ 9.89546662e+04 7.58392112e+04 9.88508009e+04 6.90190527e+04 1.07173102e+05 7.93656695e+04 7.87955676e+04 7.20153644e+04 1.03395379e+05 6.44610653e+04 8.08948904e+04 6.54921276e+04 7.63418293e+04 7.97875627e+04 7.81586905e+04 6.02482119e+04 7.18059419e+04 8.31346309e+04 7.29409974e+04 7.88404789e+04 7.28427403e+04 7.88246919e+04] [ 1.34382147e+05 1.15686310e+05 1.38624075e+05 9.73035444e+04 1.44574114e+05 1.14292151e+05 1.19208027e+05 1.04537786e+05 1.42539436e+05 9.47000494e+04 1.18206566e+05 9.58428573e+04 1.18497050e+05 1.19786088e+05 1.11096799e+05 7.57200237e+04 1.00222115e+05 1.24317782e+05 1.04740998e+05 1.16237359e+05 1.12639161e+05 1.17729920e+05] [ 2.17276597e+05 1.51053232e+05 2.35288794e+05 1.20629492e+05 2.29494917e+05 1.95851707e+05 1.77560891e+05 1.59334351e+05 2.38745707e+05 1.07250570e+05 1.37912306e+05 1.12268821e+05 1.73968438e+05 1.68648755e+05 1.93836461e+05 1.83231133e+05 1.20558107e+05 1.77864470e+05 1.54664867e+05 1.83291109e+05 1.46028574e+05 1.52431776e+05] [ 2.10663232e+05 8.24742823e+04 2.33941087e+05 9.41963967e+04 2.19720395e+05 1.87316566e+05 1.30336921e+05 1.47241444e+05 2.34623540e+05 5.57882049e+04 6.31532536e+04 6.43774194e+04 1.05492481e+05 1.10146499e+05 2.05266519e+05 2.85495469e+05 8.65168489e+04 1.26825298e+05 1.42882655e+05 1.50080843e+05 6.80580680e+04 8.66057442e+04] [ 1.78058747e+05 -1.52820093e+03 1.97573271e+05 -1.84457884e+03 2.04979380e+05 1.59481114e+05 6.43669701e+04 1.02241415e+05 2.21700427e+05 -3.52186762e+04 -2.20181070e+04 -2.25057192e+04 2.00131982e+04 3.36946905e+04 1.77884874e+05 5.02385983e+05 7.65268012e+03 5.25561680e+04 9.01833835e+04 1.01047888e+05 -2.39039653e+04 -2.71486554e+02] [ 1.69640593e+05 1.50184908e+04 1.74324102e+05 2.30843012e+03 1.93220446e+05 1.50220252e+05 6.88307018e+04 9.06456204e+04 2.05378495e+05 -2.98211870e+04 -7.33715160e+03 -1.85483944e+04 4.35513398e+04 5.06088659e+04 1.56175768e+05 4.87564183e+05 3.63029734e+03 5.98142383e+04 7.57981044e+04 1.05451443e+05 4.44696865e+03 2.06484477e+04] [ 2.27803817e+05 1.18781410e+04 2.45145825e+05 -4.58288005e+04 2.60468645e+05 2.06322065e+05 1.11545099e+05 9.88358669e+04 2.77562217e+05 -8.62654731e+04 -2.88150247e+04 -6.65557278e+04 7.35188891e+04 6.93723345e+04 2.06437842e+05 5.51296029e+05 -4.14030322e+04 8.82774922e+04 7.24640177e+04 1.64231911e+05 1.36262305e+04 1.59666067e+04] [ 2.37066977e+05 6.77532741e+04 2.59870567e+05 8.27878644e+04 2.66583264e+05 2.11077710e+05 1.49102935e+05 1.49985160e+05 2.79756474e+05 2.23831967e+04 5.26548887e+04 3.72375595e+04 1.04386549e+05 1.08545482e+05 2.34195584e+05 3.80023161e+05 5.95247296e+04 1.32233479e+05 1.40367645e+05 1.68294461e+05 5.79797212e+04 7.26791501e+04] [ 1.80983935e+05 1.14493717e+05 1.99491330e+05 7.63166112e+04 2.09692000e+05 1.70681196e+05 1.42925874e+05 1.27664128e+05 2.18235119e+05 7.40549013e+04 1.15494174e+05 7.94680182e+04 1.29891123e+05 1.28468050e+05 1.63816827e+05 2.57898958e+05 8.90516344e+04 1.37314200e+05 1.20922494e+05 1.47538995e+05 1.03094502e+05 1.09101577e+05] [ 5.75414453e+04 1.73567314e+02 6.41578407e+04 -4.35103895e+04 6.82951610e+04 5.16875642e+04 2.19775538e+04 1.16320645e+04 8.10846508e+04 -3.51698741e+04 -5.53874944e+03 -2.95763177e+04 1.26362847e+04 6.48530772e+03 4.09403857e+04 2.14118925e+05 -2.21729022e+04 1.22654237e+04 2.15679843e+03 3.78088136e+04 -9.52685804e+03 -8.17969689e+03] [-5.25943930e+04 -1.26900492e+05 -4.53636160e+04 -1.42359801e+05 -4.59388459e+04 -5.54966313e+04 -9.49039047e+04 -8.80637387e+04 -2.98080228e+04 -1.45851608e+05 -1.39001990e+05 -1.37671558e+05 -1.18200707e+05 -1.22174072e+05 -5.35657885e+04 2.18469595e+05 -1.30118498e+05 -1.13403752e+05 -9.58420883e+04 -7.56058436e+04 -1.40455305e+05 -1.37695042e+05] [-3.64803470e+04 -7.00435478e+04 -3.21675281e+04 -8.42550643e+04 -2.13329091e+04 -4.12647708e+04 -5.73739300e+04 -5.04281721e+04 -2.07395684e+04 -8.47954347e+04 -8.08757964e+04 -8.26298960e+04 -6.82414322e+04 -6.69960179e+04 -3.93950651e+04 1.15483303e+05 -7.70220476e+04 -6.68826708e+04 -5.25006894e+04 -4.99699046e+04 -7.43381024e+04 -7.41306800e+04] [ 6.63863125e+03 -8.94974971e+03 1.98360635e+04 -6.62828263e+04 3.57945210e+04 -1.04780133e+02 3.46603030e+03 -1.52430626e+04 3.43354336e+04 -5.58485133e+04 -2.92658582e+04 -5.42624324e+04 -5.19318910e+03 -4.96253257e+03 -6.30467244e+03 1.14589853e+05 -4.88423274e+04 -1.15252671e+04 -2.00426442e+04 4.59703001e+03 -7.72983753e+03 -1.75050227e+04] [ 2.72573110e+04 3.00404033e+04 3.93502833e+04 -2.00338649e+04 4.99905085e+04 2.73600512e+04 3.59961004e+04 1.36427843e+04 4.92026076e+04 -8.19396490e+03 1.36302161e+04 -8.40219642e+03 3.47435171e+04 3.23587502e+04 1.81553071e+04 6.95724445e+04 -6.39910166e+03 2.48376515e+04 1.04470123e+04 3.52663523e+04 3.30311449e+04 2.17560602e+04] [ 1.81389542e+04 3.31122845e+03 2.27953841e+04 -1.22591183e+04 2.82137988e+04 1.72539122e+04 1.25684296e+04 1.39677114e+04 2.82854911e+04 -1.25973407e+04 -4.59790509e+03 -1.13663764e+04 5.09564777e+03 6.96859554e+03 1.99015505e+04 5.51948084e+04 -6.58845113e+03 7.47965437e+03 1.08872436e+04 1.82863837e+04 4.05664061e+03 2.43839537e+03] [-2.13715898e-01 -8.16647602e-01 -9.83412567e-01 5.37288628e-01 -4.64964356e-01 -7.78150338e-01 2.16348752e-01 2.56440702e-01 -9.02779893e-01 -6.30071630e-01 8.62481936e-01 -7.74783063e-01 -9.51922070e-01 7.20066700e-01 -5.60350685e-01 6.98578360e-01 2.88478476e-01 5.42921279e-01 2.56388443e-01 -6.52915058e-01 -6.26186934e-01 -6.38771620e-02] [ 3.77627347e-01 8.05947882e-01 -5.03838411e-01 -9.54309407e-01 9.30470601e-01 2.54470556e-01 5.85521623e-01 7.77131372e-01 2.65719295e-01 -1.32294335e-01 5.56736473e-01 3.27631341e-01 4.91213548e-01 -3.63928989e-01 -6.81418188e-01 3.82392101e-01 -4.79567517e-02 -2.24710021e-01 2.65751831e-01 -6.58988920e-01 -3.80228430e-02 3.76756269e-01] [ 2.15923264e-01 -9.01243099e-01 -5.12163339e-01 -6.46891975e-01 -6.01737920e-01 1.37789993e-01 8.34069797e-01 -5.86730340e-01 -8.34645932e-01 7.42084400e-01 4.10620452e-01 2.61308496e-01 -9.85771409e-01 2.18523899e-01 5.93627420e-01 5.23846294e-02 -6.82588792e-01 3.44199474e-01 -6.96614555e-01 -1.20452404e-01 5.06671804e-02 -1.04763298e-01] [ 2.89332897e-01 5.28976705e-02 -6.70493041e-01 7.59589681e-01 -2.58522641e-01 8.64368569e-01 -9.84374033e-01 -7.50404886e-01 4.20620841e-01 -8.89469682e-01 5.87415133e-01 1.30375957e-01 -1.23053921e-02 -2.72228004e-01 -1.16956980e-02 -4.59358931e-01 -6.13079611e-01 4.68105185e-02 -7.97489897e-01 -2.13255381e-01 3.54207401e-01 -9.94110523e-02] [ 4.74070786e-02 2.57367202e-01 1.59671407e-01 -7.73961732e-01 1.13246870e-01 2.84189905e-01 -3.71551403e-01 3.96017297e-01 6.21237906e-01 -9.92443940e-01 1.50994640e-01 -7.83110180e-01 -9.48166123e-01 4.72378151e-01 1.75724586e-01 7.06493294e-01 -7.90381826e-01 4.88861759e-01 7.96823363e-01 -1.61232204e-01 -1.09601674e-01 -8.85478998e-01]] syn1 = [[-2.61926560e+01 -2.17018562e+01 9.24287393e-01 -4.41683299e+00 -2.98081885e+01 -9.09691156e+00 -5.89801749e+00 9.16195332e+00 1.98788050e+01 -4.85461233e+00 -7.05667014e+00 2.41790367e+01 1.40198702e+01 -5.22752990e+00 3.83246629e+01 -1.16662206e+01 -1.34469382e+01 -3.76886030e-01 -2.05684117e+01 -7.14753372e+00 1.42220600e+01 3.85763798e+01 2.94129704e+01 1.42908343e+01 -6.13686573e+00 -1.18466949e+01 2.03255276e+01 3.32767945e+00 -3.17105978e+01 -1.51552357e+01] [ 3.04992618e+01 -1.75562587e+01 -1.61180736e+00 -2.92082386e+01 -3.23007749e+01 -3.96224702e+01 -4.15370368e+00 -5.04635956e+00 5.28453177e-01 -3.37349902e+01 8.92970400e+00 1.57595633e+01 -1.01398468e+01 -1.83365630e+01 2.40554735e+01 1.42578095e+01 -2.78731169e+01 -6.59614104e+00 4.94182830e+00 -1.81490324e+00 -5.86968609e+00 2.78501468e+01 6.07652663e+00 6.29660340e+00 -2.01993908e+01 -6.93605722e+00 1.83098744e+00 -1.38079956e+00 -1.83071422e+01 -1.41513647e+01] [-4.86863520e+01 1.85095503e+00 1.32234162e+01 -1.99215671e+01 -3.88574339e+01 -8.44771698e+00 2.12266803e+00 1.95591251e+01 2.25175581e+01 -2.72724773e+01 -1.27095518e+01 3.41168826e+01 1.16208217e+01 -6.51392905e+00 3.91557742e+01 -1.63570548e+01 -4.11408473e+01 8.17604453e-01 -3.04895170e+01 -2.26191678e+01 1.15345024e+01 4.86070338e+01 2.32093290e+01 1.94063116e+01 -1.35622403e+01 6.06632284e+00 1.36197135e+01 6.82874100e+00 -4.66163596e+01 -7.86014931e+00] [-5.76640273e+01 -3.32207829e+01 1.04426523e+01 9.77538486e+00 9.72122986e+01 2.41803748e+01 3.47001832e+00 2.58905592e+01 3.00735538e+01 -5.40792994e+00 -1.76105558e+01 6.62502974e+00 2.49372096e+01 -4.65384747e+01 5.81832834e+01 -5.76794039e+00 -9.78859829e-01 1.06798554e+01 2.67213869e+01 -6.40699026e+00 1.20483192e+01 4.37849127e+01 3.25768361e+01 9.89691170e+00 -2.99056385e+01 2.67110054e+01 3.11324963e+01 1.74544151e+01 5.50934292e+00 -5.07022779e+01] [-2.46793747e+01 7.23247465e+00 9.23236786e+00 -4.50857618e+01 -2.17781403e+01 -3.74009240e+01 8.16085452e+00 1.07576876e+01 1.41180983e+01 -3.72867820e+01 1.31052402e+01 2.03164265e+01 -7.77708354e+00 -3.30683022e+01 3.02845093e+01 -1.33064375e+01 -5.71610142e+01 1.64457616e+01 -2.34090088e+01 -1.22241983e+01 -2.30363114e+01 3.31417374e+01 5.61310643e+00 -1.45360340e+00 -2.75446588e+01 3.68649361e+01 1.11880998e+00 -1.05673440e+00 -4.03326730e+01 -2.31258256e+01] [-1.67042229e+01 -3.12983233e+01 -1.99803313e+00 7.15309306e+00 -3.59361528e+01 -7.69897764e+00 -2.17268866e+00 8.53932198e+00 2.08131419e+01 -3.70350515e-01 8.89366576e-02 1.57391260e+01 1.84440547e+01 -2.02823972e+01 3.99121691e+01 -7.63427615e-01 -5.28451845e+00 8.67519209e-01 -6.69350665e+00 2.68231391e+00 1.65282313e+01 3.83485027e+01 2.82144807e+01 1.43182312e+01 -1.74516035e+00 -9.85860191e+00 2.13376348e+01 4.99317980e+00 -3.01832411e+01 -2.33386668e+01] [-1.87254951e+01 -2.29472747e+01 -8.45737677e+00 3.68936026e+00 -4.36223125e+01 -9.30816198e+00 2.84372535e+00 6.84514816e+00 1.74657516e+01 -2.14505834e+01 -1.48310980e+01 2.83835957e+01 1.66336615e+01 -8.96635850e+00 4.25673068e+01 2.64182611e+00 -2.46888511e+01 -1.23029363e+01 -1.24798419e+00 -5.19599285e+00 7.28290344e+00 4.57574264e+01 2.70445025e+01 1.52639129e+01 -8.09330807e+00 5.11620741e+00 1.74496157e+01 -2.87626786e+00 -2.73642675e+01 -2.01722481e+01] [-2.76926602e+01 -1.03724726e+01 8.89496399e+00 -1.80772553e+01 3.91669617e+01 -8.08488429e+00 -4.17304819e+00 1.11149353e+01 2.28003825e+01 -2.93881507e+01 3.29699683e-01 1.43071392e+01 1.12002603e+01 -4.38687367e+01 3.94459265e+01 -5.37813504e-01 -3.01645011e+01 5.50823867e+00 1.22634621e+01 -2.26783077e+01 2.43228244e+00 3.33142581e+01 1.25832169e+01 3.41665207e+00 -3.13194298e+01 2.15354483e+01 1.02154434e+01 1.02062086e+01 -2.32201220e+01 -3.20321029e+01] [-3.12229346e+01 -3.49792839e+00 2.70151207e+00 -2.00045926e+01 -5.33918035e+01 -1.43825055e+01 6.21333962e+00 6.73362075e+00 1.58333173e+01 -1.85081351e+01 -7.77708856e+00 2.57170869e+01 1.12126244e+01 -3.77925824e+00 3.01926886e+01 -1.61205892e+01 -3.39982558e+01 -4.71586992e+00 -2.33649518e+01 -1.42006673e+01 1.36004434e+00 3.35247931e+01 1.92735500e+01 1.21840141e+01 -1.36853859e+01 8.83242864e+00 1.31928004e+01 -3.89580565e+00 -4.75879321e+01 -1.18835132e+01] [ 1.72035405e+01 -3.64463048e+01 -9.73266888e+00 4.70442348e+00 4.60045093e+01 -2.46524879e+01 2.02004992e+00 3.12393086e+00 1.18273053e+01 -1.08904239e+01 -5.28322244e+00 -1.63172478e+00 7.15194362e+00 -4.78785513e+01 4.44845095e+01 2.30598120e+00 1.89510834e+00 2.57030749e+01 2.44955439e+01 -1.59619051e+00 1.16462978e+00 2.74191642e+01 2.45937601e+01 -4.13810747e+00 -2.29328552e+01 2.96505497e+01 1.45590892e+01 -3.96499950e+00 1.43779608e+01 -3.97201727e+01] [ 7.32574625e+01 -3.73611088e+01 -1.11789453e+01 -6.18072339e+00 -3.12261662e+01 -3.07968116e+01 -1.02509215e+01 -9.08090593e+00 -5.87725535e+00 -9.26780522e+00 1.24251163e+01 -1.56581712e+00 2.03831143e+00 -6.96318582e-03 2.19157152e+01 8.93853021e+00 2.70695804e+00 5.29666782e+00 -3.56819269e-01 -9.08159554e-01 1.18614583e+01 2.33886427e+01 1.76357576e+01 4.52481792e+00 -1.45223994e+00 -2.38520500e+01 4.32364185e+00 -7.14749609e+00 4.81080196e+00 -1.18556656e+01] [ 1.21584744e+01 -3.97774961e+01 -1.06080628e+01 1.05429841e+01 3.50404208e+01 -1.78026757e+01 -1.29540611e+00 2.42382814e+00 1.47300431e+01 -7.88531950e+00 -8.10407167e+00 -9.60131415e-02 1.13777954e+01 -3.73413221e+01 4.62263688e+01 5.22184704e+00 4.65040806e+00 1.92609120e+01 2.77031786e+01 -1.81976736e+00 5.44747835e+00 2.80648818e+01 2.60830845e+01 -4.45654318e-01 -1.92346644e+01 2.26695515e+01 1.69945606e+01 -3.58678496e+00 4.64697223e+00 -3.97459909e+01] [ 2.46290180e+01 -4.87665973e+01 -8.92899977e+00 6.67504548e+00 -6.25423354e+01 -5.40955157e+00 -1.72903209e+00 -6.36279653e+00 6.68598726e+00 -2.63240218e-01 -7.65560962e+00 8.25895334e+00 1.20714543e+01 1.07619343e+01 2.92297986e+01 9.56418333e+00 -2.77370635e+00 -3.44407667e+01 5.64303938e+00 1.85944948e+01 1.88695373e+01 2.52317121e+01 2.58232156e+01 1.04901917e+01 7.75689624e+00 -4.25292534e+01 2.22199095e+01 -2.84605585e+00 -5.85829350e+00 -1.68673574e+01] [ 1.07741380e+01 -3.14744552e+01 2.17003475e+00 -1.38141778e+01 -2.65672061e+01 -1.48034709e+01 6.03560773e+00 8.35135018e-01 1.03913061e+01 -1.45979528e+01 8.84147687e-01 1.22666742e+01 3.67990814e+00 -9.54475318e+00 3.07020837e+01 8.37914549e+00 -1.89043586e+01 -1.64069662e+01 6.86849181e+00 8.97141270e+00 2.78642858e+00 2.73728918e+01 1.85234363e+01 1.14783922e+01 -4.81527239e+00 -1.41957744e+01 1.50361998e+01 -3.09482592e+00 -1.10190748e+01 -1.88323648e+01] [-6.81039124e+01 -2.77240106e+00 8.55220358e+00 1.38570322e+00 1.72957668e+01 6.02974732e+00 -2.77395586e+00 1.31466346e+01 2.41321484e+01 -1.36280866e+01 -1.03093004e+01 1.75671622e+01 1.63528693e+01 -3.31364416e+01 3.74020555e+01 -6.16689635e+00 -2.44203563e+01 2.11409660e+00 -4.75141495e+00 -1.08613221e+01 5.21848330e+00 3.48642840e+01 1.97886929e+01 8.62517439e+00 -1.90195836e+01 1.52279799e+01 1.72004444e+01 1.33344336e+01 -3.23105006e+01 -2.14517795e+01] [-5.89784528e+01 1.72894151e+00 -2.49453530e+01 -3.34549060e+01 -1.25455707e+02 1.95494682e+00 -2.60282717e+01 -4.27596791e+01 -3.20993610e+01 -2.21328658e+00 -4.27431090e+01 -8.72323434e+00 -3.07644248e+01 5.43781649e+00 -3.75193459e+01 -5.84617188e+01 9.19537372e-01 -8.24651783e+01 -6.11394605e+01 -4.79431722e+01 -4.61390533e+01 -2.30454140e+01 -3.10646125e+01 -2.27376114e+01 7.83133889e-01 -2.90731222e+01 -3.31021495e+01 -2.85066117e+01 -1.43154145e+02 2.31456037e-01] [ 7.32557215e+00 -3.26594654e+01 -1.27369626e+01 -3.62600831e+00 4.29069580e+01 -3.09872264e+01 1.15167967e+00 6.34193280e+00 1.19785359e+01 -2.18981246e+01 3.14470735e+00 1.33653885e+00 5.54505084e+00 -4.76667387e+01 4.26869846e+01 7.49969096e+00 -1.17996989e+01 3.01125734e+01 2.20317480e+01 -1.01116323e+01 -5.22365390e+00 2.54096154e+01 1.69888408e+01 -5.57163085e+00 -3.09388815e+01 4.54488149e+01 5.80609738e+00 -4.00149168e+00 -2.06498214e+00 -4.55446774e+01] [ 1.66758245e+01 -3.28241458e+01 -6.54794612e+00 -2.62783136e+00 -1.66715306e+01 -2.29334540e+01 5.37482575e+00 8.71256564e+00 1.07575090e+01 -1.60293882e+01 1.41641709e+00 1.58478169e+01 7.41034604e+00 -1.04183556e+01 3.78115081e+01 7.60852795e+00 -1.38626466e+01 2.02882434e+00 6.64452880e+00 8.96930224e+00 6.51984892e+00 3.73739514e+01 2.16027776e+01 1.30817492e+01 -8.16128070e+00 9.72921839e+00 1.46223737e+01 -6.51226129e-01 -1.52347079e+01 -2.73054701e+01] [-3.40481383e+01 9.46129608e-01 9.38372712e+00 -2.60582086e+01 6.43600027e+01 -1.39165948e+01 -4.40389091e-02 1.22301561e+01 2.31332163e+01 -4.34878768e+01 1.40324719e+00 1.59166287e+01 9.18610195e+00 -5.73942641e+01 4.30368603e+01 6.65195420e+00 -4.16585020e+01 1.63651703e+01 1.95148458e+01 -2.84906470e+01 -8.05680523e+00 3.32335723e+01 6.66817316e+00 -4.49299491e-01 -4.59894357e+01 4.88076981e+01 5.48259841e+00 9.92694532e+00 -2.40624614e+01 -3.41344336e+01] [ 3.12173193e+00 -5.02346363e+01 -1.08044742e+01 1.70384644e+01 -5.29252549e+01 -5.79382176e+00 -1.87939745e+00 -3.53817806e-04 1.83614757e+01 3.86879808e+00 -7.68945422e+00 1.24060717e+01 1.81701066e+01 -7.31135337e-01 3.56212416e+01 1.15746081e+01 -2.73283247e+00 -1.35463306e+01 -7.80076074e-01 9.61497792e+00 1.77248152e+01 3.03478412e+01 2.79939397e+01 9.91363802e+00 4.76594462e+00 -1.82831085e+01 2.34428370e+01 2.70392649e+00 -2.72086543e+01 -3.18861304e+01] [ 4.04614308e+01 -4.30036048e+01 -5.32326841e+00 -1.49905234e+01 -4.50232684e+01 -2.69553256e+01 -2.03729088e+00 -1.07384542e+01 4.27085231e-01 -1.18605226e+01 2.99133129e+00 2.34965185e+00 -1.14484560e+00 -8.93089627e+00 2.39354217e+01 2.56718637e+01 -9.23592509e+00 -2.64240991e+01 2.02314144e+01 2.08000942e+01 7.24071924e+00 1.24593704e+01 1.27175674e+01 8.55085603e+00 -6.07038427e+00 -3.04567121e+01 1.00072206e+01 2.95102963e-01 -1.41056655e+01 -2.30226225e+01] [ 3.53461066e+01 -3.70831250e+01 2.84876459e+00 -2.40197050e+01 -1.70205274e+01 -2.93205108e+01 -2.60536389e+00 -1.16790743e+00 -1.24129498e+00 -1.83054213e+01 1.52457045e+01 4.95780034e+00 -4.36465023e+00 -1.72532521e+01 2.57273252e+01 1.46102769e+01 -1.92681933e+01 -1.34461948e+01 1.79252714e+01 1.10306980e+01 1.96647210e+00 1.75900223e+01 8.90012839e+00 1.04667539e+01 -1.03716903e+01 -1.95223430e+01 6.04425521e+00 1.53236405e+00 -8.24000111e+00 -2.84337290e+01]] syn2 = [[-2.72279529e-01 -5.45771250e+00 6.20414356e+00 -1.54861296e+00 -3.66024323e+00 -1.83198005e+00 5.77240990e+00 -3.87931334e+00 -2.53062342e-01 -1.91925804e+00] [-5.01620037e-01 -7.95210873e+00 -4.01504608e-01 -3.67329102e+00 -5.18632616e+00 1.80806131e-01 -5.45352415e+00 -5.11564428e-01 -1.17684882e+00 1.69263451e+00] [-1.78499143e+00 -1.72945889e+00 3.19643963e+00 -1.70158534e-01 -1.54920512e+00 -7.65496926e-01 -3.77807277e+00 3.10283677e+00 -4.74816336e-01 4.64960384e-01] [-5.72629551e-01 4.77511207e+00 -5.34236237e+00 4.74110974e-02 -1.10134203e+00 -1.66152654e-01 1.97285143e+00 1.78345766e+00 7.81632914e-01 -6.07989315e-01] [-1.68844493e+00 2.46386344e+00 7.40047097e+00 3.47128308e+00 -7.94006328e+00 4.70762666e-01 -9.64506924e+00 -5.42619940e+00 1.50997031e+00 -8.88545082e+00] [-8.98514813e-02 3.31672748e+00 -7.28500873e+00 -3.16892630e+00 -1.15391570e+00 1.55647141e+00 -2.65971841e+00 1.12000603e-01 -2.39641436e-01 9.27288837e-01] [-1.36215646e+00 2.06983436e+00 -1.43829838e+00 1.58370565e+00 -1.52834603e+00 -2.95032551e+00 -1.43469777e+00 -1.27615564e+00 -5.92343397e-01 1.47802817e+00] [-7.63437224e-01 -4.81284093e-01 -1.05669301e+00 1.56269522e+00 1.52665461e+00 -2.55616619e-01 -1.85795248e+00 8.18200221e-01 -1.37332212e+00 4.73557917e-01] [-2.71161887e-01 -3.79405909e-01 1.22221271e+00 -1.98726128e+00 2.97945938e+00 -7.78808805e-01 -3.14608555e+00 3.44502285e-01 -1.34123013e+00 5.15974690e-01] [-9.08727357e-01 7.81570938e-01 -3.62303571e+00 4.05410724e-01 -2.40251173e+00 -1.33348922e+00 1.11595562e+00 -6.80948458e-01 8.31716579e-01 -2.10711355e+00] [-2.72999374e-01 -5.26026514e+00 6.24020854e+00 4.84739408e-02 -2.36552919e+00 5.11658090e-01 -1.76834380e+00 -2.30193989e+00 1.58134708e-01 -1.59410011e+00] [-1.63373137e+00 -2.28725064e+00 -2.27994347e+00 -2.68552668e+00 1.75024637e+00 -3.88194341e+00 -2.20335031e+00 -2.09043532e+00 -2.71460369e+00 -2.85241555e-01] [-6.46877418e-01 -2.92324964e-01 -1.42531150e+00 -8.86929102e-01 7.61641994e-01 1.00040925e+00 -6.53265604e-01 -1.44291393e+00 -1.05347676e+00 -7.97037714e-01] [-1.27555413e-01 -3.24239433e+00 -7.02199552e+00 -3.03258403e+00 1.13377195e+00 -2.91466791e+00 6.45432394e-01 7.45276919e-01 -4.16668249e+00 1.16492412e+00] [-2.51642977e+00 -1.45590795e+00 -2.34606115e+00 -2.34254953e-01 -6.80468940e-01 -2.58960875e+00 -8.57454788e-04 -2.96927777e-01 -2.12994846e+00 9.05935507e-01] [-5.27093247e-01 -2.45902011e+00 1.98645007e+00 -2.24731624e+00 -1.53766296e+00 1.38326687e+00 4.88242923e+00 -1.53447778e+00 -7.35189970e-01 2.38665697e-01] [-1.42788874e+00 -2.31139856e+00 -3.71598876e+00 -1.26206768e+00 -2.68197238e+00 1.55356702e+00 1.53535983e+00 -1.44337872e+00 -3.76810375e-01 -1.73372641e+00] [ 2.36833595e-01 1.51712343e+00 5.26513650e+00 1.93413398e-01 -2.89575251e+00 7.62261127e-01 -4.90632264e+00 4.07981801e-01 9.80784875e-01 -4.52115930e+00] [-2.21487178e+00 -3.59789073e-01 -2.03619148e-01 -9.14517198e-01 -1.74572643e+00 1.76061577e+00 7.13640711e-01 -3.54256978e+00 6.45166356e-01 -1.81897282e+00] [-9.45245346e-01 -1.72758737e+00 -3.73075585e+00 2.48708124e+00 -4.91222517e+00 -3.72702157e-01 3.40334346e+00 -3.88175211e+00 -2.15442095e-01 -6.69935185e-01] [-8.63614321e-01 1.08501235e-01 -2.86335871e-01 -3.28276276e-01 3.15911868e+00 -1.12828392e+00 -3.03207205e-01 1.33578895e+00 -2.42510267e+00 -3.31372121e+00] [-2.87343629e+00 -1.32377055e+00 -4.68264384e+00 -3.28694913e+00 2.01902843e+00 -1.80427375e+00 -3.20611222e+00 -1.64090596e+00 -1.36377966e+00 -4.45386544e-01] [-1.93923009e+00 -8.53839343e-01 -3.41387331e+00 6.44788085e-01 -9.17931470e-01 -1.13106290e+00 1.47557125e+00 -5.21648650e-01 -1.06534184e+00 -3.98878258e+00] [-1.95789564e+00 6.30901986e-01 -1.31295813e+00 -3.55829606e+00 1.85614969e+00 -1.19654385e+00 7.59257977e-01 1.22543245e+00 -1.23044795e+00 -3.63695270e+00] [ 4.99347270e-01 5.46145698e-01 -3.50162319e+00 1.67382081e+00 -1.16380260e+00 -1.75545456e+00 -7.05349451e-01 -6.98112671e-01 -1.39505064e+00 -1.59443063e-01] [-1.36350724e+00 1.19071217e+00 9.95921600e-01 3.97475263e-01 -3.35191491e+00 -1.77110450e+00 -1.53211783e+00 2.89157095e+00 -2.33366377e+00 2.36332035e+00] [-7.04506940e-01 -6.62947405e-01 -1.36475980e+00 -1.60826386e+00 -2.63858735e+00 -2.85723588e+00 5.77043433e-01 -5.00098378e-01 -2.20039099e-01 -7.35449628e-01] [-1.23964142e-01 1.30927711e+00 6.22345230e-01 -3.81073785e+00 9.30934010e-01 -1.95005777e+00 -2.54299643e+00 8.67157630e-01 -7.95745744e-01 -2.77779306e+00] [-1.04532676e+00 2.67060817e+00 9.61776287e-02 3.45126974e+00 -5.62582928e+00 2.31338007e+00 -3.66207608e+00 -6.04651022e+00 4.49624906e+00 -5.98259239e+00] [ 8.78966861e-01 -7.62775019e+00 -1.72840924e+00 -4.63256955e-01 -4.37986491e-03 4.23034385e-01 -3.39369578e+00 -1.08915779e+00 -1.30760594e+00 -6.00498856e-01]] b0 = [[-795.05370699 -888.65637856 -843.38493583 -742.70469309 -832.26406088 -688.61027301 -864.20138884 -674.06130063 -836.94722637 -741.18228952 -924.47019823 -738.71980524 -890.46653089 -881.01297127 -659.7569183 111.75327485 -713.97499983 -880.06198367 -682.24998232 -758.14040568 -897.35314216 -885.57872814]] b1 = [[-3.22092845 -1.82447463 -3.16436819 -3.93626503 -1.93805169 -2.74406915 -3.21638455 -2.63614184 -3.83964719 -2.7859646 -2.67585194 -3.60934965 -3.77999705 -2.80698683 -2.97213013 -3.63970304 -1.23358956 -2.95541476 -3.4341114 -2.52303848 -3.75816382 -3.2074504 -2.92940225 -3.98437189 -2.98563262 -3.76575999 -3.39159414 -3.68362168 -2.10585768 -1.89702698]] b2 = [[-0.55270531 -1.17351202 -1.43338435 -0.12336077 -1.31350089 -1.26177085 -1.59943152 -1.22955594 -0.30305603 -0.77184599]]
fbe0ba3fe7398923a4ecff8dc91faf96af99e846
28b5eedc39b697186ba9afc42ec544cd0b13c70d
/spark/regression/linear_regression.py
1619640ed02616968947da8db3b6c2ddc873eeac
[]
no_license
arunpa0206/mltrainingtechcovery
7915ccac779a186d3f1bfa1f6cebbe5ac2455422
ce284c31eefa0468c88c790913532b87a0f77e3a
refs/heads/master
2022-12-08T23:50:04.415494
2021-03-13T08:53:22
2021-03-13T08:53:22
224,205,026
2
10
null
2022-12-08T02:36:22
2019-11-26T13:58:42
Jupyter Notebook
UTF-8
Python
false
false
1,109
py
# make pyspark importable as a regular library. import findspark # create a SparkSession from pyspark.sql import SparkSession spark = SparkSession.builder.getOrCreate() # load data data = spark.read.csv('./boston_housing.csv', header=True, inferSchema=True) # create features vector print(data.head(5)) feature_columns = data.columns[:-1] # here we omit the final column from pyspark.ml.feature import VectorAssembler assembler = VectorAssembler(inputCols=feature_columns,outputCol="features") data_2 = assembler.transform(data) # train/test split train, test = data_2.randomSplit([0.7, 0.3]) # define the model from pyspark.ml.regression import LinearRegression algo = LinearRegression(featuresCol="features", labelCol="medv") # train the model model = algo.fit(train) # evaluation evaluation_summary = model.evaluate(test) evaluation_summary.meanAbsoluteError evaluation_summary.rootMeanSquaredError evaluation_summary.r2 # predicting values predictions = model.transform(test) predictions.select(predictions.columns[13:]).show() # here I am filtering out some columns just for the figure to fit
29f3206bbd185cbf2b04e3e4ceb2d42791d0a6bb
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p03814/s672199772.py
200b2992354025d407dc07f4f9b7d06b602a55c4
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
77
py
s = input() left = s.find("A") right = s.rfind("Z") print(right - left + 1)
581e7f1ac73ddc919efb69e776016f838a8ce99e
b44b690c96cfbaba35fa3cc32e8da4442adb9fad
/Python/1101. The Earliest Moment When Everyone Become Friends.py
151cb5ceccfe90d52025d61102662b977a2e5ccc
[]
no_license
faisalraza33/leetcode
24d610c6884e218719d82a5c79f1695cb6463d68
d7cf4ffba14c6f1ff4551634f4002b53dfeae9b7
refs/heads/master
2022-08-10T02:05:21.932664
2022-07-05T09:59:47
2022-07-05T09:59:47
238,060,131
0
0
null
2020-02-03T20:54:51
2020-02-03T20:54:50
null
UTF-8
Python
false
false
3,082
py
# There are n people in a social group labeled from 0 to n - 1. You are given an array logs where logs[i] = [timestampi, x_i, y_i] indicates that x_i and y_i will be friends at the time timestampi. # Friendship is symmetric. That means if a is friends with b, then b is friends with a. Also, person a is acquainted with a person b if a is friends with b, or a is a friend of someone acquainted with b. # Return the earliest time for which every person became acquainted with every other person. If there is no such earliest time, return -1. # # Example 1: # # Input: logs = [[20190101,0,1],[20190104,3,4],[20190107,2,3],[20190211,1,5],[20190224,2,4],[20190301,0,3],[20190312,1,2],[20190322,4,5]], n = 6 # Output: 20190301 # Explanation: # The first event occurs at timestamp = 20190101 and after 0 and 1 become friends we have the following friendship groups [0,1], [2], [3], [4], [5]. # The second event occurs at timestamp = 20190104 and after 3 and 4 become friends we have the following friendship groups [0,1], [2], [3,4], [5]. # The third event occurs at timestamp = 20190107 and after 2 and 3 become friends we have the following friendship groups [0,1], [2,3,4], [5]. # The fourth event occurs at timestamp = 20190211 and after 1 and 5 become friends we have the following friendship groups [0,1,5], [2,3,4]. # The fifth event occurs at timestamp = 20190224 and as 2 and 4 are already friends anything happens. # The sixth event occurs at timestamp = 20190301 and after 0 and 3 become friends we have that all become friends. # # Example 2: # # Input: logs = [[0,2,0],[1,0,1],[3,0,3],[4,1,2],[7,3,1]], n = 4 # Output: 3 # # Constraints: # # 2 <= n <= 100 # 1 <= logs.length <= 10^4 # logs[i].length == 3 # 0 <= timestampi <= 10^9 # 0 <= x_i, y_i <= n - 1 # x_i != y_i # All the values timestampi are unique. # All the pairs (x_i, y_i) occur at most one time in the input. class Solution: def earliestAcq(self, logs: List[List[int]], n: int) -> int: # First, we need to sort the events in chronological order. logs.sort(key=lambda i: i[0]) uf = UnionFind(n) for ts, x, y in logs: uf.union(x, y) if uf.get_count() == 1: return ts # More than one groups left, i.e. not everyone is connected. return -1 class UnionFind: def __init__(self, size): self.root = [i for i in range(size)] self.rank = [1] * size self.count = size def find(self, x): if x == self.root[x]: return x self.root[x] = self.find(self.root[x]) return self.root[x] def union(self, x, y): rootX = self.find(x) rootY = self.find(y) if rootX != rootY: if self.rank[rootX] > self.rank[rootY]: self.root[rootY] = rootX elif self.rank[rootX] < self.rank[rootY]: self.root[rootX] = rootY else: self.root[rootY] = rootX self.rank[rootX] += 1 self.count -= 1 def get_count(self): return self.count
e3c31baedb01d6c813d3b7b845d8f3bcb35ed6e2
c1c8b0363bb6dd52115c0aad9298b6573a6ba062
/sparse_binary_number.py
c051e68a78ba80ce4b6058d4bf0ce5a1b3a8b3d7
[ "MIT" ]
permissive
beepscore/sparse_binary_number
3f5bd50e772c6e3345e8f57d4317c4a7d8e572d7
a89c5b04189c5f7015855075222a0ced4a650db7
refs/heads/master
2021-01-10T20:28:56.885423
2015-05-14T18:38:41
2015-05-14T18:38:41
33,623,410
0
0
null
null
null
null
UTF-8
Python
false
false
4,631
py
#!/usr/bin/env python3 def next_sparse(sparse_number): """return next larger sparse number Keyword arguments: sparse_number -- a sparse number, as defined by is_sparse This algorithm uses powers of two. Estimated time complexity >= O(log(n)). """ # print("sparse_number 0b{0:b}".format(sparse_number)) # Edge case. Handle explicitly for clarity if sparse_number == 0: return 1 power_max = twos_power_max(sparse_number) for power in range(0, power_max): # print("power", power) if is_zero_bit_and_no_neighbor_ones(sparse_number, power): # print("at middle of 000 change to 010") return sparse_number + (2 ** power) if is_right_end_of_001(sparse_number, power): # print("at right of 001 change to 01 followed by all zeros") sparse_zeroed_low_bits = (sparse_number >> (power + 1)) * (2 ** (power + 1)) # print("sparse_zeroed_low_bits {0:b}".format(sparse_zeroed_low_bits)) return sparse_zeroed_low_bits + (2 ** (power + 1)) return (2 ** (power_max + 1)) def next_sparse_incremental(sparse_number): """return next larger sparse number Keyword arguments: sparse_number -- a sparse number, as defined by is_sparse return None if reached internal limit without finding a next sparse. This algorithm uses "brute force". Estimated time complexity >= O(n). Increments until possible_sparse is_sparse or reaches limit. """ # limit is arbitrary in Python # http://stackoverflow.com/questions/5470693/python-number-limit limit = 2 ** 32 for possible_sparse in range(sparse_number + 1, limit): if is_sparse(possible_sparse): return possible_sparse return None def is_sparse(number): """return True if number binary digit 1s have no adjacent 1s. Keyword arguments: number -- an integer >= 0 return True if number is 0b1 """ if number == 0: return True if number == 1: # edge case. List explicitly for clarity. Define to be True return True else: bits = bits_list(number) # start power_of_2 at 1 so previous_bit index won't be out of list range for power_of_2 in range(1, len(bits)): current_bit = bits[power_of_2] previous_bit = bits[power_of_2 - 1] if ((current_bit == 1) and (previous_bit == 1)): # number has two consecutive 1s return False return True def bits_list(number): """return list of bits in number Keyword arguments: number -- an integer >= 0 """ # https://wiki.python.org/moin/BitManipulation if number == 0: return [0] else: # binary_literal string e.g. '0b101' binary_literal = bin(number) bits_string = binary_literal.lstrip('0b') # list comprehension bits = [int(bit_character) for bit_character in bits_string] return bits def bit_at_twos_power(number, exponent): """return bit in number at location 2 ** exponent Keyword arguments: number -- an integer >= 0 exponent -- a integer >= 0 """ bits = bits_list(number) # NOTE: reverse() modifies object, returns None bits.reverse() if exponent > (len(bits) - 1): return 0 else: return bits[exponent] def twos_power_max(number): """return highest power of two in number Keyword arguments: number -- an integer >= 0 """ bits = bits_list(number) return len(bits) - 1 def is_zero_bit_and_no_neighbor_ones(number, exponent): if (bit_at_twos_power(number, exponent) == 0 and is_bit_no_neighbor_ones(number, exponent)): return True else: return False def is_bit_no_neighbor_ones(number, exponent): if (is_bit_no_right_one(number, exponent) and is_bit_no_left_one(number, exponent)): return True else: return False def is_bit_no_right_one(number, exponent): if (exponent == 0 or bit_at_twos_power(number, exponent - 1) == 0): return True else: return False def is_bit_no_left_one(number, exponent): if bit_at_twos_power(number, exponent + 1) == 0: return True else: return False def is_right_end_of_001(number, exponent): if (bit_at_twos_power(number, exponent) == 1 and bit_at_twos_power(number, exponent + 1) == 0 and bit_at_twos_power(number, exponent + 2) == 0): return True else: return False
fb013c9f1fb83d9df51c54bbae2e997159c4c7e8
059b43c54e69fdca5419d5565c19cc5cb0114a92
/__unported__/sale_order_line_analytic/__openerp__.py
9edf7867b2603f7b614f424a96badd02f2aaaa26
[]
no_license
caiuka/eficent-odoo-addons
b3594b0e4d52594c95bb4cea39fdb47933e77d22
458df2c04944688c2273885b5d09fe3753e0ca7e
refs/heads/master
2020-05-29T11:00:45.273732
2014-11-06T13:32:26
2014-11-06T13:32:26
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,086
py
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 Eficent (<http://www.eficent.com/>) # <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { "name": "Purchase Requisition Analytic", "version": "1.0", "author": "Eficent", "website": "www.eficent.com", "category": "Generic Modules/Projects & Services", "depends": ["analytic", "purchase_requisition"], "description": """ Organizations often require to integrate purchase requisitions with projects or contracts, and to find requisitions by searching by it the project/contract code, name or project/contract manager. This module adds the following features to purchase requisitions: - Adds the analytic account to the purchase requisition lines, - When the purchase order is created from the purchase requisition, it copies the analytic account. - Introduces the possibility to search purchase requisitions by analytic account or by project manager. - Introduces a new menu entry in Purchasing to list purchase requisition lines. """, "init_xml": [ ], "update_xml": [ "purchase_requisition_view.xml", ], 'demo_xml': [ ], 'test':[ ], 'installable': True, 'active': False, 'certificate': '', }
ff5dff19f34684d16ef5281dee8f6be0988851a6
09652bdf74f8ade0cf56a75c9f716bda9dc6f4ff
/server/src/tests/samples/newType2.py
31086a4f728f54cbba6821b213aacd9ac83b7663
[ "MIT", "LicenseRef-scancode-generic-cla" ]
permissive
zeronone/pyright
b68005063cc3623ae2f572a0fec1d4c7845ec57f
d9babcd56b08cb0be024c9d527df333cff5b2b97
refs/heads/master
2022-11-18T14:04:31.561923
2020-07-15T08:49:53
2020-07-15T08:49:53
278,020,180
0
0
NOASSERTION
2020-07-08T07:26:01
2020-07-08T07:26:00
null
UTF-8
Python
false
false
400
py
# This sample tests the special-case handle of the multi-parameter # form of the built-in "type" call. # pyright: strict X1 = type("X1", (object,), dict()) X2 = type("X2", (object,), dict()) class A(X1): ... class B(X2, A): ... X3 = type(34, (object,)) X4 = type("X4", 34) # This should generate an error because the second arg is not a tuple of class types. X5 = type("X5", (3,))
a3d8178d3d9cd5166f52ef11be9060f61c192390
47e9f6cef4bfedf81a897d972cecfcf4616ae25f
/experiments/base_experiment.py
0fb506e33939575baf2a0db46f1c3a9691fa894c
[]
no_license
wz139704646/MBRL_on_VAEs
1d2b141f5a17746ffa527f3852dfe10bc73dcd27
b0e8f66b3ade742445a41d3d5667032a931d94d2
refs/heads/main
2023-04-09T04:35:34.306860
2021-04-27T03:59:54
2021-04-27T03:59:54
323,389,851
1
0
null
null
null
null
UTF-8
Python
false
false
1,633
py
import abc class BaseExperiment(metaclass=abc.ABCMeta): """base class for all kinds of experiments""" def __init__(self, exp_configs, hook_before_run=None, hook_after_run=None): """initialize the experiment :param exp_configs: the configurations needed in this experiment :param hook_before_run: hook function run before the main part run, take the experiment object itself as the only param :param hook_after_run: hook function run after the main part run, take the experiment object itself as the only param """ self.exp_configs = exp_configs self.hook_before_run = hook_before_run self.hook_after_run = hook_after_run @abc.abstractmethod def apply_configs(self): """apply the configurations""" pass @abc.abstractmethod def before_run(self, **kwargs): """preparations needed be done before run the experiment""" pass @abc.abstractmethod def run(self, **kwargs): """run the main part of the experiment""" pass @abc.abstractmethod def after_run(self, **kwargs): """cleaning up needed be done after run the experiment""" pass def exec(self, **kwargs): """execute the entire experiment""" # apply the experiment configuration self.apply_configs() self.before_run(**kwargs) if self.hook_before_run is not None: self.hook_before_run(self) self.run(**kwargs) if self.hook_after_run is not None: self.hook_after_run(self) self.after_run(**kwargs)