blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4058a4aba52d9076ba294a27d437eb8344f2cdb7 | 668cc2cd1109cf1c207a57ae7decc5ae5edc9728 | /backend/users/migrations/0002_auto_20201104_1426.py | d51fe7ac08131e041c8abbbf9f79c5410e4a4133 | [] | no_license | crowdbotics-apps/logictech-22290 | 7538661024c163c16881371468f84c181d1ee93f | f17151874e1fd60a1cc81b247a5e0599421ac6e8 | refs/heads/master | 2023-01-09T21:14:45.728461 | 2020-11-04T14:30:13 | 2020-11-04T14:30:13 | 310,025,912 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 627 | py | # Generated by Django 2.2.17 on 2020-11-04 14:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('course', '0001_initial'),
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='group',
field=models.ManyToManyField(blank=True, related_name='user_group', to='course.Group'),
),
migrations.AlterField(
model_name='user',
name='name',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| [
"[email protected]"
] | |
d47b3bb24581ca86d9a76530a019eaca62ae8e66 | f3b233e5053e28fa95c549017bd75a30456eb50c | /p38a_input/L2EE/2EE-2J_MD_NVT_rerun/set_4.py | 2b3fa06318de66ab34d51136748b9f7c26eaed64 | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 742 | py | import os
dir = '/mnt/scratch/songlin3/run/p38a/L2EE/MD_NVT_rerun/ti_one-step/2EE_2J/'
filesdir = dir + 'files/'
temp_prodin = filesdir + 'temp_prod_4.in'
temp_pbs = filesdir + 'temp_4.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#prodin
prodin = workdir + "%6.5f_prod_4.in" %(j)
os.system("cp %s %s" %(temp_prodin, prodin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, prodin))
#PBS
pbs = workdir + "%6.5f_4.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"[email protected]"
] | |
a0b3baaacb54b0e27beb93b36ee98ef7b92beb83 | 8654435d89790e32f8e4c336e91f23250da0acb0 | /bullet3/examples/pybullet/numpy/humanoid_running.py | d15a68c8b3843917870a8ca017c69d0db13adfdc | [
"Zlib"
] | permissive | takamtd/deepmimic | 226ca68860e5ef206f50d77893dd19af7ac40e46 | b0820fb96ee76b9219bce429fd9b63de103ba40a | refs/heads/main | 2023-05-09T16:48:16.554243 | 2021-06-07T05:04:47 | 2021-06-07T05:04:47 | 373,762,616 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 502,206 | py | import os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
os.sys.path.insert(0, parentdir)
import sys
import numpy as np
import argparse
import pybullet as p
import time
gui = True
cid = p.connect(p.SHARED_MEMORY)
#DIRECT is much faster, but GUI shows the running gait
if (cid < 0):
if (gui):
cid = p.connect(p.GUI)
else:
cid = p.connect(p.DIRECT)
#p.setGravity(1,2,-9.8)
#p.setDefaultContactERP (0.4)
p.setGravity(0, 0, -9.8)
#numSubSteps=4 and fixedTimeStep=1.0/60. is an effective internal fixed step of 1./240
#recommended to not go below 50 solver iterations
p.setPhysicsEngineParameter(fixedTimeStep=1.0 / 60., numSolverIterations=550, numSubSteps=8)
#this mp4 recording requires ffmpeg installed
#mp4log = p.startStateLogging(p.STATE_LOGGING_VIDEO_MP4,"humanoid.mp4")
#p.loadSDF("stadium.sdf")
p.loadURDF("plane_implicit.urdf")
objs = p.loadMJCF("mjcf/humanoid_symmetric_no_ground.xml",
flags=p.URDF_USE_SELF_COLLISION_EXCLUDE_ALL_PARENTS)
human = objs[0]
for j in range(p.getNumJoints(human)):
jointInfo = p.getJointInfo(human, j)
print("joint(", j, "qIndex=", jointInfo[3], "uIndex=", jointInfo[4], ")=", jointInfo)
ordered_joints = []
ordered_joint_indices = []
parser = argparse.ArgumentParser()
parser.add_argument('--profile')
jdict = {}
for j in range(p.getNumJoints(human)):
info = p.getJointInfo(human, j)
link_name = info[12].decode("ascii")
if link_name == "left_foot": left_foot = j
if link_name == "right_foot": right_foot = j
ordered_joint_indices.append(j)
if info[2] != p.JOINT_REVOLUTE: continue
jname = info[1].decode("ascii")
jdict[jname] = j
lower, upper = (info[8], info[9])
ordered_joints.append((j, lower, upper))
p.setJointMotorControl2(human, j, controlMode=p.VELOCITY_CONTROL, force=0)
motor_names = ["abdomen_z", "abdomen_y", "abdomen_x"]
motor_power = [100, 100, 100]
motor_names += ["right_hip_x", "right_hip_z", "right_hip_y", "right_knee"]
motor_power += [100, 100, 300, 200]
motor_names += ["left_hip_x", "left_hip_z", "left_hip_y", "left_knee"]
motor_power += [100, 100, 300, 200]
motor_names += ["right_shoulder1", "right_shoulder2", "right_elbow"]
motor_power += [75, 75, 75]
motor_names += ["left_shoulder1", "left_shoulder2", "left_elbow"]
motor_power += [75, 75, 75]
motors = [jdict[n] for n in motor_names]
class Dummy:
pass
dummy = Dummy()
dummy.initial_z = None
def current_relative_position(jointStates, human, j, lower, upper):
#print("j")
#print(j)
#print (len(jointStates))
#print(j)
temp = jointStates[j]
pos = temp[0]
vel = temp[1]
#print("pos")
#print(pos)
#print("vel")
#print(vel)
pos_mid = 0.5 * (lower + upper)
return (2 * (pos - pos_mid) / (upper - lower), 0.1 * vel)
def collect_observations(human):
#print("ordered_joint_indices")
#print(ordered_joint_indices)
jointStates = p.getJointStates(human, ordered_joint_indices)
j = np.array([
current_relative_position(jointStates, human, *jtuple) for jtuple in ordered_joints
]).flatten()
#print("j")
#print(j)
body_xyz, (qx, qy, qz, qw) = p.getBasePositionAndOrientation(human)
#print("body_xyz")
#print(body_xyz, qx,qy,qz,qw)
z = body_xyz[2]
dummy.distance = body_xyz[0]
if dummy.initial_z == None:
dummy.initial_z = z
(vx, vy, vz), _ = p.getBaseVelocity(human)
more = np.array([z - dummy.initial_z, 0.1 * vx, 0.1 * vy, 0.1 * vz, qx, qy, qz, qw])
rcont = p.getContactPoints(human, -1, right_foot, -1)
#print("rcont")
#print(rcont)
lcont = p.getContactPoints(human, -1, left_foot, -1)
#print("lcont")
#print(lcont)
feet_contact = np.array([len(rcont) > 0, len(lcont) > 0])
return np.clip(np.concatenate([more] + [j] + [feet_contact]), -5, +5)
def relu(x):
return np.maximum(x, 0)
class SmallReactivePolicy:
"Simple multi-layer perceptron policy, no internal state"
def __init__(self): #, observation_space, action_space):
#assert weights_dense1_w.shape == (observation_space.shape[0], 256)
#assert weights_dense2_w.shape == (256, 128)
#assert weights_final_w.shape == (128, action_space.shape[0])
pass
def act(self, ob):
#ob[0] += -1.4 + 0.8
x = ob
x = relu(np.dot(x, weights_dense1_w) + weights_dense1_b)
x = relu(np.dot(x, weights_dense2_w) + weights_dense2_b)
x = np.dot(x, weights_final_w) + weights_final_b
return x
def demo_run():
pi = SmallReactivePolicy()
t1 = time.time()
timinglog = p.startStateLogging(p.STATE_LOGGING_PROFILE_TIMINGS, "humanoidTimings.json")
frame = 0
while 1:
obs = collect_observations(human)
actions = pi.act(obs)
#print(" ".join(["%+0.2f"%x for x in obs]))
#print("Motors")
#print(motors)
#for m in range(len(motors)):
#print("motor_power")
#print(motor_power[m])
#print("actions[m]")
#print(actions[m])
#p.setJointMotorControl2(human, motors[m], controlMode=p.TORQUE_CONTROL, force=motor_power[m]*actions[m]*0.082)
#p.setJointMotorControl2(human1, motors[m], controlMode=p.TORQUE_CONTROL, force=motor_power[m]*actions[m]*0.082)
forces = [0.] * len(motors)
batch = True
for m in range(len(motors)):
forces[m] = motor_power[m] * actions[m] * 0.082
if (not batch):
p.setJointMotorControl2(human, motors[m], controlMode=p.TORQUE_CONTROL, force=forces[m])
if (batch):
p.setJointMotorControlArray(human, motors, controlMode=p.TORQUE_CONTROL, forces=forces)
p.stepSimulation()
humanPos, humanOrn = p.getBasePositionAndOrientation(human)
if (gui):
time.sleep(1. / 60.)
print("frame=", frame)
camInfo = p.getDebugVisualizerCamera()
curTargetPos = camInfo[11]
distance = camInfo[10]
yaw = camInfo[8]
pitch = camInfo[9]
targetPos = [
0.95 * curTargetPos[0] + 0.05 * humanPos[0], 0.95 * curTargetPos[1] + 0.05 * humanPos[1],
curTargetPos[2]
]
p.resetDebugVisualizerCamera(distance, yaw, pitch, targetPos)
frame += 1
#if frame==1000: break
t2 = time.time()
print("############################### distance = %0.2f meters" % dummy.distance)
print("############################### FPS = ", 1000 / (t2 - t1))
#print("Starting benchmark")
#logId = p.startStateLogging(p.STATE_LOGGING_PROFILE_TIMINGS,"pybullet_humanoid_timings.json")
#p.stopStateLogging(logId)
print("ended benchmark")
print(frame)
p.stopStateLogging(timinglog)
# yapf: disable
weights_dense1_w = np.array(
[[
-0.3857, -0.7450, +0.1473, +0.1996, +0.0570, +0.4522, +0.1172, +0.7558,
-0.2530, -0.7948, +0.3120, +0.6216, +0.1044, -0.4347, +0.0921, +0.0187,
+0.2030, +0.2042, -0.2928, +0.2717, +0.5117, +0.1115, -0.0278, -0.0675,
-0.2967, +0.3128, -0.1434, -0.0476, +0.0561, -0.1874, +0.1675, -0.1028,
+0.0712, +0.8653, -0.5565, -0.1207, +0.0982, -0.2875, -0.0755, -0.3057,
-0.0841, +0.5336, +0.2068, -0.0132, -0.2131, -0.5910, +0.1329, +0.2789,
+0.0036, +0.0048, +0.2655, +0.3585, +0.2861, -0.2288, -0.0646, +0.0569,
-0.4701, -0.0116, +0.4616, +0.0947, -0.1013, -0.0256, +0.2854, +0.3382,
+0.0822, +0.1160, -0.4913, +0.3991, +0.0121, -0.3392, +0.2515, +0.7188,
+0.3412, +0.1247, +0.1422, +0.0420, +0.1401, +0.5830, +0.0226, +0.6080,
+0.3019, -0.7696, -0.0580, +0.2104, +0.3786, +0.2968, -0.3726, +0.2792,
+0.3572, -0.4362, +0.5368, +0.0068, +0.4366, -0.4799, +0.2688, +0.1115,
+0.0251, +0.4107, -0.0358, +0.0198, +0.2563, +0.0315, -0.1143, +0.6191,
+0.1694, +0.4175, +0.1873, +0.0678, +0.1324, +0.3038, +0.0610, +0.2491,
-0.2730, +0.2933, +0.1704, +0.1746, +0.1444, -0.2578, +0.3743, +0.3837,
+0.3294, +0.0433, +0.5242, -0.3465, +0.1618, -0.5255, -0.1703, +0.0420,
+0.5505, +0.4046, +0.1596, +0.3973, -0.5158, -0.2922, +0.3183, -0.0244,
+0.3496, +0.4069, -0.1961, +0.2705, -0.1008, -0.4008, -0.1443, -0.2113,
+0.2064, -0.3466, +0.2565, +0.0279, +0.5785, -0.2918, +0.7262, +0.0285,
-0.1779, +0.2811, -0.2066, +0.5471, -0.7668, +0.1909, -0.5684, -0.0002,
+0.2291, -0.0631, +0.0823, +0.5520, -0.3267, +0.0143, +0.4295, +0.2594,
+0.3523, -0.1266, +0.4412, -0.2685, +0.0863, +0.1779, +0.1280, +0.5087,
+0.0809, +0.1118, -0.5754, -0.3300, -0.0032, +0.7060, -0.5172, -0.6553,
-0.0951, -0.0129, +0.1382, -0.0504, -0.2634, +0.2659, -0.1932, -0.3762,
+0.3072, +0.1612, +0.2942, -0.1438, -0.0850, +0.1877, +0.4726, +0.2751,
-0.0787, +0.3128, -0.1351, +0.2552, -0.4833, -0.0322, +0.3641, +0.3715,
-0.1938, +0.5040, -0.0598, +0.0341, +0.6252, -0.2977, +0.2694, +0.0525,
+0.6295, -0.1007, -0.0406, +0.1028, +0.5181, +0.2201, +0.2269, +0.3724,
-0.1985, -0.1614, +0.0093, +0.2851, +0.0191, +0.0620, +0.1989, +0.5905,
+0.5589, +0.1896, +0.3859, +0.1207, -0.0323, +0.3912, +0.2950, +0.3255,
+0.2765, -0.3384, +0.4286, +0.2692, +0.2889, +0.5955, +0.0918, -0.3228,
+0.3828, -0.0196, -0.0484, +0.3256, -0.1361, +0.5124, -0.1782, +0.2984
],
[
-0.3305, -0.5427, +0.1033, -0.8093, -0.6948, +1.0265, +0.2688,
-0.4297, -0.3887, -0.0335, -0.4224, +0.0902, -0.0416, -0.3621,
+0.4824, -0.5138, -0.6160, +0.0404, -1.0360, +0.2734, +0.3415,
-0.7259, -0.3395, -0.2803, +0.6166, +0.3448, +0.3657, +0.0495,
+0.8924, +0.1673, +0.2726, +1.2509, -0.0482, +0.2982, -0.0817,
-0.6190, -0.7550, -0.6310, +0.1912, +0.2475, -0.4001, +0.2715,
-1.3571, -1.2735, +0.4491, +0.0965, +0.3893, -0.1449, +0.5919,
-0.7355, -0.2579, -0.6447, -0.8400, -1.0339, -0.7232, +0.2704,
-0.2094, -0.9082, +0.0719, -0.9922, +0.7732, -0.1924, -0.7696,
-0.3170, -0.3350, -0.3462, +0.3686, +0.7608, -0.3624, +0.0222,
+0.0152, +0.0056, -0.6305, +0.3649, +0.4987, -0.2182, -0.6930,
-0.0010, -0.2901, -0.1365, -0.4998, +0.2176, +0.4334, -0.2511,
+0.5084, -0.4405, +0.8559, -0.4784, +0.0126, -0.1106, +0.8800,
-0.7713, +0.2210, +0.1882, +0.0233, -0.0679, +0.0100, -0.6957,
+0.4229, +0.4257, +0.1985, +0.9668, -1.0394, +0.1009, -1.1962,
-0.6810, -0.8522, -0.9797, -0.4765, +0.6148, -0.1001, +0.1577,
-0.5535, -0.6540, -0.0717, -1.4224, -0.4940, +0.3266, -0.3978,
+0.2954, +0.2281, -0.5369, +0.2422, -0.2924, +0.2163, +0.7444,
+0.4338, -0.5244, +0.2214, +0.5298, -0.1770, -0.3549, +0.0541,
-0.1783, -0.1656, +0.9927, -0.9395, +0.6691, -0.2921, -1.1986,
-0.2086, +0.1251, +0.3633, -0.1052, -0.4214, +0.1419, +0.7749,
-0.0831, -0.8905, +0.5360, +0.0565, -2.0616, -1.4039, -0.7739,
+0.2181, +0.3649, +0.2573, -0.1620, -0.2045, -0.4774, +0.4206,
+0.4216, +0.0844, -0.3038, +0.2918, -1.4121, -1.0846, -0.5310,
+0.2190, +0.3286, -0.1058, -0.8251, -0.0417, -0.3994, -0.0012,
+0.4315, +0.1682, -1.1433, +0.2849, -0.5689, -0.3473, +0.6981,
-0.1699, -0.1203, -1.3225, -0.2436, -0.2331, +0.2366, +0.0377,
-0.4367, -0.0645, -1.3493, -0.3995, -0.4136, +0.4007, -0.4532,
+0.1397, -0.4860, +0.7116, -1.0520, -0.7300, +0.9354, +0.2490,
-0.0118, -0.6467, +1.0208, -0.0185, -0.1010, +0.0259, -0.4621,
-0.5824, -0.5307, -0.3225, +0.1271, -0.0694, -0.0902, +0.4922,
+0.2764, -0.1927, -0.2009, -0.8424, +0.0237, -0.0958, +0.3260,
-1.3182, -0.7006, +0.2267, -0.9277, +0.1478, +0.1729, +0.3861,
-0.6517, -0.3447, +0.4089, +0.1253, -0.3610, +0.7556, -0.5048,
-0.8110, +0.1085, -0.9362, -0.8233, +0.4134, -0.0085, -1.5413,
-0.4102, +0.7793, -1.2224, -0.7392, -0.3367, -0.0849, +0.0131,
+0.8377, +0.4575, +0.8130, -2.0800
],
[
-0.5030, +0.4311, -1.1143, -0.2571, +0.0500, +0.6580, +0.1807,
+0.3972, +1.0069, -1.9235, -0.4153, +1.2305, -0.0986, +1.6550,
-2.0094, -0.5650, +0.1596, +2.2019, +0.1256, +1.7433, -1.8320,
+1.2385, +0.2217, +1.9671, -0.0651, -1.5623, +0.6551, -1.0514,
-0.2327, -0.7264, +1.1247, +0.7084, -3.1100, +2.5875, -0.6701,
+0.5488, +0.2897, +0.3663, +0.0783, -0.2380, -0.9809, -1.4906,
-0.0467, -0.3069, -1.3131, -1.1489, +0.4462, -0.0183, -0.7812,
-1.0135, -0.8092, -0.3989, -0.0717, -2.4097, -0.4086, -1.3601,
+0.4654, -1.2662, +0.9914, -1.6910, -0.4650, -1.1016, +0.0476,
+0.4011, +0.1599, -0.2772, +0.1293, -0.1329, +2.4128, +0.3290,
-0.0782, -1.8737, -2.1851, -0.2125, +1.0481, -2.0785, -0.7290,
-0.7160, -0.0253, -0.7524, +0.2129, -0.2267, -0.8073, +0.1843,
+0.7838, +1.0147, +0.1327, -0.0788, +0.3903, +2.0292, +2.5541,
+1.6453, -0.1778, -2.1681, -0.1496, -0.3376, +0.4545, +0.2194,
-1.5197, -1.2230, +3.2199, +1.0545, +0.1571, +0.8699, -4.2024,
+0.9999, +1.8670, +0.3410, +2.0595, -0.1076, +1.5986, -0.3768,
-0.7450, -1.0840, -0.0150, -3.7617, +1.4254, +0.8361, +0.2078,
-0.8808, -0.2463, -3.4380, +1.4160, -1.9439, -1.0356, +2.8675,
+0.9860, -3.6350, -0.8313, +0.2089, +1.3341, -2.8791, +0.4524,
+0.4555, -0.9851, +0.6086, +0.7343, +1.3275, +1.0979, -2.4916,
+0.0722, -0.7323, +0.9880, +1.5356, +0.5431, -0.3248, -0.9631,
+0.7564, +0.7160, -0.0709, -1.5644, +1.2694, +2.5064, -2.5448,
-1.7748, -1.1803, -1.1250, -0.2383, -1.1826, -1.1312, +1.7279,
-0.3852, +0.0540, -0.9594, -0.7022, -2.7155, +1.2207, -0.4283,
+0.2370, -1.1960, +1.0917, -0.3953, -0.5567, -2.5472, -2.0418,
-0.6104, +3.4862, -0.4001, +0.9669, -0.7622, +0.1008, -1.7957,
+1.0803, -0.3787, -0.5001, -0.8955, +0.8044, +2.5939, +1.5200,
+0.9680, +0.4177, -0.2482, +2.2624, -1.1416, -1.5353, +2.2794,
-1.8803, +0.2573, -0.3707, +2.0442, +2.7544, +1.6080, -0.6924,
-0.8825, +2.8865, -0.7160, -1.4308, -0.0376, +0.3018, +1.8101,
+0.3378, -0.3874, -0.3636, -0.5764, -1.0234, +2.6209, +1.1319,
+1.4093, +0.6829, -1.3812, +1.8284, +0.1921, -0.0356, -2.3225,
-0.8761, +1.8518, -1.0584, -0.8935, -0.6575, +0.3690, -0.9779,
+0.2513, +0.4220, +1.2618, +2.0158, +0.1686, +1.9862, +0.6183,
-1.2881, -1.7737, +0.0968, -0.8762, -0.9781, -1.3425, -2.6438,
+0.8928, +0.3356, +0.1711, +1.9928, +0.5683, +2.1339, -0.5671,
-0.2999, +0.2771, +1.8581, -0.7244
],
[
-0.8237, -2.4723, -0.0093, -1.0811, +0.5365, +0.6595, -0.5366,
+1.1555, -0.8875, -1.3330, +1.4121, -0.0944, -0.0463, -0.9496,
-2.8476, -0.7136, +0.6020, -0.0054, -1.4527, +0.9208, +0.3012,
-0.1480, -0.8642, +0.1982, +0.1088, +1.2782, -0.7724, -1.3168,
-0.9594, -1.1935, -0.3237, -0.6884, -0.1389, +0.7907, -2.4444,
-0.4292, -0.1315, -1.2616, -0.0916, -1.7440, -1.5036, +0.1978,
+1.2539, +0.7862, +0.0931, -1.4788, +0.1313, -0.0527, -1.4072,
+0.4968, -0.2224, +0.3658, +1.6503, -0.2057, -0.6812, -0.2703,
-1.6274, +1.0684, +1.2275, +0.1127, -2.2420, -0.1408, -0.2346,
-0.1119, +0.1498, -0.0589, -1.4065, -0.1043, +1.4940, -0.2420,
-0.2223, -0.0973, +0.5512, -1.6769, +0.3286, +0.5443, -0.1542,
+0.9522, -0.8605, +1.4236, +1.6691, -1.5443, -1.4476, -0.2985,
+0.5213, -0.6793, -1.4328, -0.4417, -0.0097, -0.3373, -0.0559,
-0.3530, +0.5015, -2.2177, -0.8512, -1.4107, -0.2278, +1.0842,
-0.6454, -0.6701, +0.2807, -2.3556, +0.6937, -0.1830, +0.2528,
+1.4582, +0.9585, +0.7474, -0.2290, -0.3547, -0.0505, +0.8672,
-1.3728, +0.3593, -1.5079, +1.2051, +0.1671, +0.1287, -0.1089,
+0.9447, +0.2902, -0.0274, +2.6386, -0.4357, -1.0866, -0.7725,
-0.7474, -0.2920, -0.7512, +0.2729, -0.2291, +0.4386, -0.2122,
-1.0203, +0.5233, -0.3802, -0.1169, +0.1006, -0.5432, -0.2080,
+0.6461, -1.4341, +0.0192, -1.2795, +0.8708, -0.3168, -0.4657,
+0.5388, +0.8268, -0.9466, -0.4054, +1.5877, +0.1454, +0.5291,
-0.8998, +1.3563, -1.8375, -1.0953, -1.6413, +0.2750, +1.0708,
-0.9597, -0.3052, -0.0953, -2.2925, -0.0300, +1.2808, +1.3430,
-0.0976, -0.7264, +1.1371, -0.7336, +0.1982, -0.6328, -0.0308,
+0.8406, -0.1227, -0.1331, -2.1831, -0.5677, +0.4565, -0.1594,
-1.8026, -0.1020, +1.3437, -0.2881, +1.0612, -0.9352, -0.2676,
+0.2737, +0.6647, -0.4582, -0.3532, -0.2741, -0.5150, -0.2847,
+0.3100, +1.0055, +0.6360, +1.2014, -1.0370, +0.3126, +0.2799,
+1.4690, +0.2299, -0.9563, +0.1604, +1.0580, -0.0236, +0.5362,
-1.1913, -1.6834, -0.5451, -0.2479, +0.5986, +0.9416, +0.8835,
-0.8778, -1.5741, +0.2702, +0.9200, -1.2569, -0.0010, +0.7474,
+1.0404, +0.0567, +0.7431, -0.5104, +0.2774, -0.3255, +0.0748,
+0.5445, +2.7912, +0.1370, +0.0845, -0.2513, -0.2607, +0.5337,
-0.9225, +1.1601, +0.3889, -1.9067, +0.0405, +0.8032, +0.3716,
+1.0054, -0.1264, -1.2699, +1.0490, -0.4518, -0.4633, +0.2149,
-0.0493, +0.4784, -0.4567, -1.1741
],
[
-0.0944, +0.0480, +1.3354, +0.4432, +0.4545, +0.3836, +0.7112,
-1.3257, -0.6328, -0.0922, +0.1074, +0.9008, +0.8560, -0.7031,
+0.6321, -0.5160, -0.0619, -2.3403, +0.1938, -1.3679, -0.0181,
-0.1057, -0.2195, -1.2982, +0.9032, +0.4164, -0.7188, -0.5110,
+0.6287, +1.9699, -1.0438, -0.3658, +0.3573, -1.1268, +0.8508,
-0.2637, +0.4056, +0.0488, -0.1575, -0.0724, +1.3003, +0.7745,
-1.0815, -0.1154, +0.3653, +0.4608, -0.6923, +0.1087, +0.1252,
+1.0462, -0.8961, +0.3603, -0.5450, +1.1358, -0.2845, +0.3265,
-0.8838, +1.1137, -0.6003, -0.6661, +0.6010, +0.3586, +0.8275,
+0.7424, +0.8698, +0.4970, +0.5513, +1.1435, -0.3027, -0.6154,
-0.1741, +1.3238, +0.2132, -0.2333, -0.1694, +0.7055, -0.8932,
-0.0664, +0.3277, -0.5222, -0.3499, -0.3942, -0.4391, +0.0751,
-0.7457, -1.1832, -0.4438, -0.3663, -1.0466, +0.1051, -1.1153,
+0.4985, -0.2054, -0.0507, +0.3941, -0.1353, -0.4284, +0.2639,
+0.4999, +0.8301, -0.7917, -0.3978, -0.0342, +0.1796, +0.3845,
-1.4847, -0.4932, -0.1793, -0.9531, +0.5409, -0.7341, +0.7626,
+0.3836, +0.5465, +0.1335, -0.2535, -0.4195, -0.7734, -0.0282,
+0.0965, +0.0656, +0.9656, -0.8877, +0.8172, -0.1439, -1.4527,
-0.0562, +1.5419, +0.0539, -0.5125, +0.5689, +0.1007, -1.2620,
+0.3370, -0.1986, -0.2692, -1.2472, -0.7832, -0.6892, +0.6072,
+0.0229, +0.4541, -0.4522, +0.0858, -0.8327, -0.0613, +0.3183,
-0.3064, -1.0261, +0.5405, +1.0349, -0.2815, -0.6725, +0.4688,
+1.6724, -0.0363, -0.0079, -0.0928, +0.3130, -0.2153, -0.5456,
-0.0559, -0.2529, +0.3921, +0.3518, +0.2280, -0.1720, +1.0129,
+0.7512, +0.6854, -0.4686, -0.1534, +0.8615, +0.8109, +0.7310,
+0.3680, -1.4893, -0.7378, -0.4477, +0.0875, -0.4890, +1.8208,
-0.0533, -0.3616, +0.4427, +0.3344, -1.6178, -0.3798, -1.3912,
-0.4389, -0.6612, +0.0819, -1.6541, +0.8210, +0.3600, -0.7912,
+0.8786, -0.6454, -0.3264, -1.2999, -1.7624, +0.1224, -0.1934,
+0.5783, -1.7141, +0.8077, +1.2373, -0.6583, -0.5147, -0.3372,
+0.4648, +0.9727, +0.6322, -1.1757, +0.0907, -0.7186, -0.0748,
+0.5319, -0.7838, +0.7031, -1.4109, -0.2312, +0.0961, +1.0103,
-0.3424, -0.2508, +0.4101, +0.7785, +0.6908, +0.1522, +0.8434,
-0.3437, -0.1315, -0.5243, +0.1131, -0.0426, -0.4195, +0.5145,
+0.4746, -0.1487, +0.3564, +0.6381, +0.7912, -0.5876, +0.4997,
-0.0617, -0.3899, +0.5708, -1.2090, +0.4058, -0.7794, +0.3661,
+0.4308, -0.3041, -0.0436, -0.7230
],
[
-0.1693, +0.2109, +0.3784, -0.0080, +0.5547, +0.4148, +0.3266,
-1.0521, +0.3823, -0.1225, -0.8964, -0.1654, -1.3043, +0.2666,
-0.0568, -0.1568, -0.0791, -0.4838, -0.8760, +0.4993, +0.6268,
-0.7398, -0.6804, -0.2253, +1.2625, -0.2067, +0.0057, +0.9870,
+0.8815, +1.1994, +0.3070, +1.1701, -0.3489, -0.0767, +0.2292,
-0.4519, -2.2981, -1.1613, -0.1678, -0.6078, +0.2043, -0.5179,
-0.5619, -0.2558, -0.9022, +1.1423, +0.1024, +0.6167, +0.3965,
-0.8278, -0.2011, -0.8241, -1.6596, +0.0758, -0.0408, +0.2526,
+0.1655, -1.0185, +0.4132, -0.4839, +1.2156, -1.1229, -1.2566,
+0.7611, +0.5421, -1.0482, +0.8612, +0.9921, -1.1233, +1.7774,
-0.1655, -0.2706, -1.0794, +0.9931, -0.1433, -0.3584, +0.0190,
-0.3132, +0.0309, +0.1503, -0.3054, +0.9918, +0.3700, -0.7727,
+0.1072, -1.0991, +0.4594, -0.7141, +0.6146, -0.0139, +0.8529,
+0.5119, +0.7132, +0.4453, +0.5105, +0.0267, -0.4190, -0.7954,
+0.8213, +0.6556, +0.5070, +0.6732, -0.2794, +0.9429, -0.3095,
-0.5547, -0.4120, -1.4048, +0.2273, +0.4487, +0.3343, +0.7247,
+0.3450, -0.6724, +0.6049, -0.3514, +0.3779, -0.6072, -1.0495,
+0.5256, +1.0763, -0.2961, +0.1332, -0.5312, +0.1100, +0.3449,
+1.6291, +0.2253, +0.2368, +0.0290, -0.2767, -1.1455, -0.2682,
+0.1284, -1.4564, +0.6327, -0.1539, +1.2590, -0.3506, -0.9105,
-0.1498, +0.1575, +0.6813, -0.2192, -0.2872, +0.1151, +0.8868,
+0.1692, -0.6037, +0.4819, +0.3007, -1.5199, -0.8464, -1.0408,
+0.4265, +0.7844, +0.8532, -0.0920, +0.1953, -0.5603, -0.4884,
+0.5512, -1.2749, -0.2217, +0.3123, -0.9690, -1.2862, +0.7008,
+0.7091, +0.3228, -0.4245, -0.4376, +0.6493, -0.2635, -1.2255,
-0.1248, -0.1569, +0.9362, +1.1395, -0.2800, +0.0824, +1.1488,
+1.2098, -0.4093, -0.9025, +0.0509, +0.0269, +0.1081, +0.3318,
+0.3225, +0.1275, -0.0008, -1.0040, -1.1738, +0.7737, -0.3590,
-0.2654, -0.4784, +1.0984, -1.1707, -0.6749, +0.8187, -1.0614,
-0.1445, -0.4563, +0.0654, +0.2446, -0.5677, +1.0999, -0.1202,
-0.8633, -0.2968, +0.2704, -1.0857, +0.1737, -0.5571, +1.1680,
+0.8176, +0.7732, -0.8523, +0.1559, +1.4784, -0.9508, +0.2607,
-0.4858, -0.4288, +0.3398, -0.2658, +0.4940, -0.1560, -0.4237,
-1.1838, -0.5357, +0.7169, +0.2918, +0.4518, +0.1140, -0.7701,
+0.4624, -1.1948, +0.0489, -0.2978, +0.4910, -0.1722, -1.0723,
-1.0176, +1.2704, +0.5973, -0.0866, -0.1371, +0.0481, -0.3902,
+0.0107, +0.3683, +0.4267, -1.4834
],
[
+0.5232, -0.8558, +0.5185, -0.1749, +0.1769, -0.5573, -0.0458,
-0.4127, +1.3763, +2.5431, -0.5299, +0.3071, -0.5284, -0.4808,
+0.2318, -0.0218, -0.1686, -0.1167, -0.0031, -0.3821, +0.9205,
+0.0218, +0.7872, -0.5842, +0.0058, +0.2999, -0.2260, -0.1462,
+1.3377, +0.5496, -0.3178, -0.8289, +1.1450, +1.0358, -0.7660,
-0.5572, +0.3902, -1.2106, -0.1539, +0.2933, +0.6901, -1.1992,
+0.5752, +0.0957, -0.2819, -0.5681, +0.2146, +0.0067, -0.9133,
+0.9878, -0.1733, -0.3514, -0.8065, +0.9410, +0.2219, +0.3498,
-0.5299, +0.3124, -0.8900, +0.6115, +0.2797, +0.9970, +0.3009,
-0.0738, -0.3954, -0.4603, +0.7079, +0.2928, -0.5789, -1.0670,
+0.7549, -0.7836, +0.4045, +0.3377, -0.4876, +1.7466, +1.9039,
-0.5488, -0.4373, -0.2601, -0.3506, -0.0796, +1.1116, +0.0505,
-0.1337, -0.2355, -1.6805, +0.1413, -0.3198, -1.0743, +0.5475,
-0.8052, +0.8536, +1.1319, -0.0467, -0.6715, +0.2528, -0.3115,
-0.5555, +0.4093, -0.1936, -0.3740, +0.2118, +0.0130, +0.1481,
+0.8102, +0.8727, -0.4478, -0.1795, -1.2396, +0.5329, +0.7156,
-0.1103, +0.9139, +0.2145, +0.7780, -0.1246, -0.9007, +0.5015,
-1.2858, +0.5447, +1.3758, -0.5856, +0.4267, -0.1301, -0.9774,
-0.6882, -0.1422, +0.6529, -0.5675, -0.5422, +0.9215, -0.2504,
+0.3921, +0.9338, +0.0692, +0.4588, +0.0834, -0.4489, +0.0373,
+0.3061, +0.0813, -1.3657, -0.2746, -0.3013, -1.1962, +0.1209,
+0.9623, +0.0926, +0.0567, +0.0956, -0.2540, -0.0818, +0.0936,
+1.1011, -0.6436, -0.8282, -0.5360, -0.2208, +0.5188, -0.3779,
+0.3434, -0.0911, +0.6764, +0.2386, +0.0013, -0.9257, +0.6741,
-0.1466, +0.2445, +0.1111, +0.3185, +1.1240, +0.8707, +0.0378,
-0.9029, -0.9465, -0.6231, -1.4129, +0.3802, -1.5123, +1.3141,
+0.1999, +0.4264, -1.0309, -0.0267, -0.5697, +0.4476, -2.1773,
-0.5022, -0.5318, -0.1031, -0.4144, +0.2167, +1.0164, -1.5997,
-0.1081, -0.3293, -0.2674, -0.7626, -1.0667, -0.8751, +0.0664,
+0.4146, -1.3921, +0.5493, -0.7975, +0.0980, -0.5029, -0.1264,
-0.7898, +0.2919, +0.0012, -0.2293, +1.3743, -0.1972, -1.3811,
-0.0774, -1.0635, -0.3949, +1.1958, +0.3768, -0.2495, +0.8773,
-0.3418, -0.6583, +1.2778, -1.6794, +0.1734, -0.0181, +0.2225,
+0.6471, +0.2625, -0.6214, -0.3865, +1.0985, -0.8866, +0.4487,
+0.1762, +1.0691, +0.7759, +0.5970, +0.2840, -0.7712, +0.1730,
+0.0925, -0.1487, +0.0576, +0.0686, -0.1030, +0.6999, -0.3605,
-0.6520, -0.1534, +0.5036, -0.0735
],
[
-0.2289, -0.5922, -0.3208, -0.1333, -0.0326, -0.0578, +0.0332,
+0.0471, -0.1084, -0.4932, -0.0932, -0.0906, -0.0260, -0.2126,
-0.4305, -0.0188, +0.1235, -0.2268, +0.0852, -0.2105, +0.0018,
-0.0488, +0.1105, +0.0611, -0.3910, -0.0054, +0.1663, +0.0056,
+0.0980, -0.0026, +0.1730, -0.0223, -0.1192, +0.0819, -0.0443,
+0.1055, +0.2771, +0.1749, -0.4834, +0.0447, -0.4144, +0.1763,
+0.2071, +0.1313, -0.1478, -0.3675, -0.4603, -0.0393, -0.1476,
-0.4405, +0.2786, +0.0533, -0.0090, -0.1548, -0.3037, -0.0474,
-0.2303, +0.1435, -0.4198, -0.2340, -0.3181, +0.0761, -0.1041,
-0.0830, +0.0821, +0.0260, -0.3217, +0.0803, +0.1699, -0.4164,
-0.1374, -0.1893, +0.0264, -0.2450, +0.1259, -0.6180, -0.1789,
+0.1294, -0.2350, -0.1580, +0.2364, +0.0720, -0.1429, -0.0719,
-0.3570, -0.0641, -0.2126, +0.1802, -0.1192, +0.0961, -0.0488,
-0.3722, -0.1681, -0.1850, -0.1174, -0.0995, -0.0689, -0.1762,
+0.2204, -0.4273, -0.3115, +0.0453, -0.1812, +0.0243, -0.1017,
-0.0409, -0.0154, -0.0384, +0.0726, +0.5732, -0.1037, -0.2238,
-0.0214, -0.0157, -0.0765, -0.1075, -0.0497, -0.0572, +0.3680,
-0.2548, -0.1833, -0.1734, -0.0024, -0.0409, -0.2650, -0.3656,
-0.2709, -0.4396, +0.0812, +0.0568, -0.3316, -0.1572, -0.2154,
-0.1327, -0.1297, -0.3840, -0.4213, -0.3873, +0.3184, -0.1843,
-0.5567, -0.2090, -0.2135, +0.0507, -0.0770, -0.4947, -0.3724,
+0.0858, +0.0379, -0.2661, +0.0211, -0.1906, +0.2820, +0.1454,
+0.0023, -0.2778, -0.2379, -0.0416, -0.2564, +0.1671, -0.2432,
-0.1838, -0.1781, -0.3161, -0.3932, +0.1783, +0.1303, +0.1332,
+0.0891, -0.1433, +0.0342, +0.0357, -0.1301, -0.0396, +0.0971,
+0.0341, +0.0115, -0.3819, +0.2581, -0.4620, +0.3078, -0.0279,
-0.2815, +0.0667, -0.1623, -0.1194, -0.0260, -0.2250, +0.1046,
-0.3312, -0.2797, +0.0327, -0.0621, -0.2257, -0.0914, +0.1443,
+0.0216, +0.2057, -0.1393, +0.4458, +0.2710, +0.3774, -0.1885,
+0.0797, -0.0635, -0.3876, +0.1444, -0.0729, -0.2443, -0.1203,
+0.4256, -0.0877, +0.0331, -0.3111, -0.1889, -0.0357, +0.1845,
-0.1711, +0.0514, -0.1889, +0.0969, -0.0028, -0.1255, -0.1160,
-0.0326, -0.1921, -0.4856, +0.4974, -0.2119, -0.1067, +0.2061,
-0.0208, -0.3160, -0.1110, -0.2838, -0.2922, +0.1144, +0.0072,
-0.0288, +0.1297, -0.0054, +0.2679, -0.3201, -0.2886, +0.0570,
+0.1230, +0.0826, -0.1899, -0.0796, -0.1337, +0.0620, -0.4111,
-0.4262, +0.1163, -0.0294, -0.0575
],
[
-0.0622, +0.1620, +0.1952, -0.1080, +0.4558, +0.2408, -0.3618,
-0.4013, +0.7836, +1.3814, -0.2123, -0.3030, +0.1139, +0.1433,
+0.2230, -0.0583, -0.1621, +0.4591, -0.4137, +0.4395, -0.3145,
+0.1856, -0.4073, +0.2994, +0.1968, +0.5244, +0.3641, -0.0631,
+0.6546, +0.1779, +0.1935, -0.1469, +0.9319, -0.0075, +0.0064,
+0.1900, +0.5569, +0.0826, +0.0453, -0.0783, +0.4028, +0.0221,
-0.0692, +0.4296, -0.2226, +0.3554, -0.0285, -0.4677, -0.1184,
+0.3233, +0.1011, +0.1190, +0.5385, +0.1720, -0.5215, -0.2803,
+0.2099, -0.0718, -0.1586, +0.1376, +0.0903, +0.1490, +0.4136,
+0.2823, +0.5320, +0.1268, -0.3858, +0.6468, -0.0144, +0.4940,
-0.1289, +0.1769, +0.1861, +0.0321, -0.0865, +0.4818, +0.3785,
+0.0222, -0.0553, -0.3994, -0.2557, +0.2507, -0.0072, +0.0473,
-0.4798, +0.0217, +0.5227, +0.2150, +0.0384, -0.2661, +0.2132,
-0.1044, +0.5976, -0.2044, +0.1120, -0.0102, -0.2942, -0.3944,
+0.4103, -0.1596, -0.1556, +0.0462, -1.0610, +0.5723, +0.4564,
-0.2365, +0.3736, +0.1133, +0.1969, -0.2941, +0.5913, -0.0584,
+0.3066, +0.0832, -0.3040, +0.4964, -0.1276, +0.0732, +0.0802,
-0.7690, +0.2472, -0.1118, -0.4396, +0.4151, -0.1141, -0.2248,
+0.2854, +0.3040, +0.3152, +0.6751, -0.2495, -0.1024, +1.0877,
+0.2063, +0.1743, -0.2615, +0.6156, +0.0471, -0.5015, +0.6820,
-0.1908, +0.3957, -0.0110, -0.2385, -0.5311, +0.4914, +0.0466,
+0.2728, +0.1332, +0.0846, +0.2048, -0.2136, -0.2074, +0.2046,
+0.0061, +0.2663, +0.0967, +0.0476, +0.0986, +0.3985, +0.5587,
-0.5540, +0.0154, +0.4506, -0.0422, +0.0927, +0.2275, +0.3403,
+0.5497, +0.0195, -0.2267, -0.0105, -0.0707, +0.4026, +0.0864,
+0.3935, +0.3245, +0.6428, -0.3807, -0.0543, +0.5879, +0.0241,
+0.6732, -0.2620, +0.1640, -0.0437, -0.1938, +0.5656, -0.3021,
+0.8425, -0.0940, +0.8219, -0.0001, +0.2326, +0.1968, -0.0967,
+0.1712, -0.1926, +0.0581, -0.4711, -0.1671, +0.2421, +0.3986,
+0.1873, -0.6380, +0.1934, +0.1938, -0.4676, +0.3012, +0.0563,
-0.1293, -0.3734, -0.2708, -0.2509, -0.3866, +0.3017, -0.5500,
+0.6813, -0.0220, +0.4279, -0.0009, +0.7408, +0.1708, -0.1965,
+0.4782, -0.0380, -0.5077, +0.0028, +0.5409, +0.4656, -0.0188,
+0.2003, -0.1777, -0.1324, +0.5119, +0.5334, -0.0911, -0.0135,
-0.3527, +0.3077, -0.1689, -0.6573, -0.2195, -0.3638, +0.0081,
+0.1452, +0.3054, -0.6117, -0.5621, +0.3031, +0.0552, +0.3948,
+0.5129, -0.2373, +0.0839, +0.5478
],
[
-0.2677, -0.3924, -0.1111, -0.0630, +0.0402, +0.8159, -0.7826,
-0.3290, +0.4201, -0.3960, -1.1485, +0.5117, +0.2460, -1.0035,
+0.1106, -0.6896, +0.9486, -0.3650, -1.0935, -0.3699, +0.3832,
-0.5691, -0.6172, +0.7244, -0.1093, +0.4114, +0.5099, -0.6623,
+0.6719, +0.3053, +0.8786, -0.3956, -0.6784, -0.2966, -0.1117,
-0.3018, +0.2616, -0.4997, +0.2619, +0.5151, +0.4021, +1.0451,
-0.1027, -1.5431, -0.0887, +0.0385, +0.2563, +0.8701, +0.0441,
-0.2206, -1.3057, -0.4979, +0.9920, +0.4251, -0.5946, -0.6684,
-0.0290, -0.8045, -0.1703, +0.3256, -0.2287, -0.1461, +0.0372,
-0.0408, +1.7013, -0.7573, -0.4167, +0.8182, -0.1155, -0.2996,
-1.2980, +0.4730, -0.5008, +0.6145, -0.2813, -0.0028, -0.1076,
+0.0624, +0.5987, -0.4668, +0.3587, -0.1747, +0.1252, +1.1977,
-0.2709, +0.2823, -0.1866, +0.3410, -0.5528, +0.2268, +0.5243,
-0.4282, -0.5827, -0.4805, -0.5156, -0.0079, -0.1744, -0.7665,
-0.0406, -0.5617, -0.0789, +0.2851, -0.8815, -0.0942, -0.0398,
-0.5599, +0.8206, -0.4598, +0.1249, +0.0113, +0.3342, +0.4045,
+1.0031, +0.0616, -0.7941, -0.5623, -0.5799, -0.3538, +1.4136,
-0.0132, +0.0433, +0.0970, +0.4327, -0.6650, -0.3680, +0.0627,
+0.6853, +0.0673, +0.4382, -0.0087, -0.1974, -0.4244, -0.7625,
+0.6924, -0.1693, -1.1118, -0.2853, -0.2515, -0.1511, -0.0270,
-0.1421, -0.4663, +0.3691, -0.4666, -0.7505, +0.6488, -0.4772,
-0.0213, +1.2589, -0.2793, +1.1358, -0.5494, +0.0434, +0.5151,
+0.0317, -0.4004, -0.2366, +0.6840, +0.1423, +0.2131, -0.8648,
+0.2816, +0.0387, -0.4625, -0.1181, -0.2421, -0.8358, +0.0494,
+0.7998, -0.1886, -0.8349, +0.5409, +0.0161, +0.8034, +1.4518,
+0.3666, +0.7202, +0.4448, -0.5884, -0.3133, +0.7557, +0.0059,
+0.8145, -0.2922, +0.2949, +0.7668, +0.9744, -0.1347, -0.2402,
+0.9782, +0.3939, -0.1655, -0.1001, -0.3705, -0.0666, -0.1012,
+0.8532, -0.2305, -0.4295, -0.2271, +0.0515, +0.2608, -0.6333,
+0.8459, -0.1468, +0.9393, +0.3433, +0.3040, +0.0816, -1.2577,
-0.0702, -1.1087, -0.0060, -0.7152, -1.0524, +0.6505, -0.7323,
+0.3990, -0.5519, +0.6579, +0.1630, +0.9545, -0.2553, -0.6284,
+0.3982, -0.2823, +0.5572, +0.5401, -0.2477, +0.2319, -0.5750,
+0.7342, +0.0408, +0.5492, -0.2967, +1.4490, +0.3361, -0.2268,
-0.7317, +0.6182, -0.1477, -0.4826, +0.0556, -0.3748, +0.6668,
-0.0177, -0.5986, -0.7161, -0.6142, -0.2449, -1.2808, +0.8982,
-0.7086, -0.7748, -0.5544, +0.5172
],
[
+0.3264, -0.3646, -0.2821, +0.2518, -0.2395, +0.4445, +0.1520,
-0.6761, +0.0477, -0.0257, -0.1895, +0.3927, -0.2809, -0.9354,
+0.2936, -0.3648, -0.3078, +0.1713, -1.5407, -0.1606, +0.0767,
-0.1158, -0.8929, -0.2041, -0.1796, -0.1950, +0.0282, +0.1191,
-0.4838, +0.1555, +0.8111, +0.6948, -0.2704, +0.2824, +0.3988,
+0.2703, +0.0469, -0.2392, +0.0474, -0.1288, +0.4766, +0.4855,
-0.4078, -0.5256, +0.1170, +0.0866, +0.4342, -0.0211, -0.0795,
-0.0084, -0.2736, +0.2599, -0.0845, -0.2772, +0.5235, -0.2618,
-0.2152, -0.0467, +0.1013, -0.5947, +0.5272, +0.0357, +0.4374,
+0.1216, -0.3292, -0.9619, -0.2307, +0.4286, -0.3531, -0.2730,
-0.1754, +0.2060, +0.4299, +0.0241, -0.0986, -0.0976, -0.5591,
-1.1694, -0.3214, +0.5812, -0.3629, -0.3911, +0.6738, -0.7340,
+0.3937, +0.0766, +0.2025, -1.3811, +0.3293, +0.1468, +0.2015,
-0.0419, -0.4160, +0.2073, +0.0666, -0.2161, -0.6732, -0.2642,
+0.1835, +0.3544, +0.3416, +0.0349, -0.4627, -0.1447, -0.9931,
-0.0727, -0.2949, -0.2648, +0.2203, -0.6087, +0.3623, +0.3079,
-1.2249, +0.7935, +0.0456, -0.3232, -0.7579, -0.3691, -0.3644,
-0.3231, -0.5396, -0.4050, -0.0730, -0.4963, -0.0114, +0.2249,
-0.3597, +0.7511, +0.2707, +0.5135, -0.1835, -0.5691, +0.2467,
-0.6838, +0.1932, +0.5926, -0.9298, -0.1595, +0.0481, +0.4326,
-0.0335, -0.3318, +0.2815, -0.0922, -0.2933, -0.0080, +0.5774,
-0.0885, -0.4674, -0.2266, +0.0717, -0.6698, -0.4830, +0.1746,
+0.1206, +0.0057, +0.4676, +0.8745, +0.1639, -0.4449, +0.1793,
-0.3137, +0.6698, +0.9404, -0.1441, +0.0394, -0.6488, -0.3911,
-0.5945, +0.3869, -0.5025, -1.2525, -1.1017, +0.6362, +0.4449,
+0.2157, -0.1160, -1.1086, +0.7340, -0.0693, -0.5617, +0.4067,
+0.0116, +0.4172, -0.3053, +0.0775, -0.0651, +0.2388, -0.1879,
-0.4723, -0.1241, -0.4346, +0.0472, -0.0284, -0.1501, -0.2792,
-0.3968, -0.7242, -0.4812, -0.5336, +0.2079, -0.3344, -0.3619,
-0.5222, -0.3702, -0.0723, -0.0517, -0.4299, -0.0198, -0.1965,
-0.6650, -0.1824, -0.2982, +0.1540, -0.1657, -0.1400, +0.1720,
-0.2381, -0.7029, -0.0878, +0.0376, +0.0026, +0.3682, +0.5348,
-0.4287, -0.1807, -0.1908, -0.4586, -0.0709, +0.2970, -0.0811,
+0.0257, -0.2661, -0.6801, +0.5421, -0.3208, +0.3125, +0.1977,
-0.1810, -0.3915, -0.5920, +0.3147, +0.7049, +0.9834, +0.3962,
-0.8117, -0.2904, -0.7984, +0.4009, -0.5159, -0.1035, +0.1518,
+0.0063, -0.3525, +0.0744, -0.1268
],
[
+0.0625, +0.2454, -0.1680, -1.0803, -0.9500, +0.2996, +0.3217,
+0.0085, -0.0059, -0.3208, +0.5185, +0.1062, +0.3422, -0.3999,
+0.5857, -0.0080, -0.8149, -0.0158, -0.3960, +0.4506, +0.3388,
-0.3658, -0.2523, +0.6703, +0.5079, +0.1302, -0.4467, -0.5027,
-0.0643, +0.0363, +0.2143, +0.0472, +0.2093, +0.4312, -0.1742,
+0.4757, +0.0191, +0.4348, +0.1218, +0.3021, +0.3486, +0.2348,
-0.1680, +0.7838, +0.2344, -0.4433, -0.3828, -0.0978, -0.1935,
+0.4506, +0.2706, -0.8820, -0.1456, -0.0464, +1.2349, +0.3701,
-0.0550, -0.0462, +0.0159, -0.1349, +0.5415, -0.2748, +0.7589,
-0.7571, -0.2683, -0.2130, -0.2549, -0.0779, +0.1291, -0.0558,
-0.4440, +0.2060, -0.3370, +0.0437, +0.0959, -0.3270, -0.3595,
+0.4739, -0.6070, +0.3405, -0.6890, -0.3997, +0.9429, -0.2491,
-0.4467, -0.1436, +0.1208, +0.0807, +0.8200, -0.5003, +0.1874,
-0.4556, -0.1118, +0.1554, -0.1637, -0.6165, +0.0985, -0.6718,
+0.1694, +0.0411, +0.4699, -0.6709, -1.1974, -0.4875, -0.1075,
-0.3117, +0.6738, -0.0284, +0.4542, -0.9254, +0.3416, +0.5329,
-1.0175, +0.4751, -0.0045, -0.3945, -0.4542, +0.1306, +0.1225,
+0.2302, -0.5529, -1.3256, -0.0214, -0.2178, +0.8287, -0.0749,
-0.2770, -0.0777, -0.3252, +0.1128, -0.3628, +0.7585, -0.0378,
-0.2960, +0.6577, +0.3493, -0.0804, -0.1532, -0.5900, -0.0800,
+0.6437, -0.6141, +0.6726, -0.7391, +0.2646, +0.0756, +0.6011,
-0.0260, -0.8565, -0.1149, +0.2192, +0.4871, +0.0104, +0.2024,
-0.5196, -0.7092, +0.4307, -0.3232, +0.1580, +0.0137, -0.0342,
-0.2374, +1.0388, +0.7055, +0.4224, +0.0823, +0.8568, +0.4611,
+0.1539, +0.6464, +0.2207, -0.1220, -0.6761, +0.3361, +0.5564,
+0.6674, -0.3938, -0.2722, +0.3716, -0.3942, -0.2741, -0.0643,
+0.0216, +0.0490, -0.4106, +0.8186, +0.1605, -0.0186, +0.0375,
-0.6412, -0.0754, -0.9746, +0.6032, +0.2447, -0.3215, +0.8977,
-0.0854, +0.5343, -0.4140, -0.2407, -0.1262, -0.4800, +0.4799,
-0.4230, -0.2194, +0.2363, -0.4803, -0.2196, +0.0986, -0.7099,
-0.3558, -1.0284, -0.9217, -0.3823, +0.5392, -0.0932, -0.1111,
-0.0598, -0.7438, +0.2199, +0.6413, -0.2135, +0.3571, +0.3792,
-0.4850, -0.2320, -0.6774, -0.0164, +0.4418, +0.2769, +0.3637,
+0.0232, +0.8487, -0.9506, -0.1698, -0.5427, -0.2042, -0.5941,
+0.2801, -0.4724, -1.5630, -0.5589, +0.7907, +1.0127, -0.6128,
+0.3578, -0.5873, -0.3068, -0.0814, -0.0631, -0.0210, +0.9920,
+0.3312, -0.4430, +0.6246, +0.6557
],
[
+0.1172, -0.2783, -0.7095, -0.1262, +0.1530, -0.4792, +0.4528,
+0.4891, -0.0607, -0.0569, -0.6220, +0.0542, -0.6498, -0.2800,
+0.6819, +0.3944, +0.5009, -0.9332, -0.2285, -0.7853, -0.6265,
+0.1337, +1.1938, -0.1144, -0.3300, -0.0133, +0.5468, -0.2664,
-0.0629, +0.0819, -0.2319, -0.4802, +0.2619, -0.7073, +0.1089,
-0.0294, +0.1425, -0.2403, -0.0213, +0.8602, +0.0572, +0.1639,
+0.3086, -0.6500, +0.1558, +0.1301, +0.2429, -0.2941, -0.1814,
-0.3338, -0.6974, +0.3184, -0.5866, -0.1422, +0.2782, +0.0576,
+0.2659, -0.2389, -0.5860, +1.3488, -0.1118, -0.4311, -0.1720,
-0.2200, +0.1286, -0.1911, -1.0148, -0.0483, -0.3248, +0.3650,
-0.0782, -0.1176, +0.3009, -0.1670, -1.2346, -0.4646, +0.1518,
-0.8071, -0.3581, +0.4864, -0.3260, -0.3035, -0.2051, -0.6271,
+0.4720, -0.5316, -0.8055, +0.6693, +0.5715, -0.7599, -0.2398,
+0.1251, +1.0183, +0.2211, -0.9324, +1.5669, -0.3032, -0.0371,
-0.0379, -0.2408, +0.5500, -0.4923, -0.1783, -0.5724, +0.1722,
+0.2496, -0.0764, +0.5859, -0.1657, +0.2223, -0.2168, -0.0718,
+0.0140, -0.5691, +0.1387, +0.7260, -0.1042, +0.0874, +0.5520,
-0.6215, +0.5677, -0.1417, +0.1505, -0.1036, -0.6195, -0.5452,
-0.0128, +0.2492, -1.2844, +0.2443, +0.1171, +0.6839, +0.8262,
+0.6484, -0.0565, +1.0374, +0.3303, -0.5101, -0.2423, -0.4095,
+0.1344, +0.6334, +0.2040, +0.5704, +0.0024, -0.0590, +0.2815,
+0.4398, -0.0049, +0.0402, +0.1377, +0.1521, -0.2131, +0.0969,
+0.0767, +0.3693, -0.0257, +0.2920, -0.0683, -0.1724, +0.5336,
-0.2084, +0.2234, -0.5008, +0.4164, +0.7633, -0.0421, -0.1122,
+0.4551, +0.3941, +0.0674, +0.9635, -0.0118, +0.3610, -0.2044,
-0.0694, -0.6825, +0.3749, +0.1638, +0.5919, +1.0967, +0.1926,
+0.2287, -0.0137, -0.2464, +0.1192, +0.4823, -0.7307, -0.1790,
+0.7419, -0.4121, -0.7841, -0.5308, -0.1830, -0.7204, +0.0274,
+0.3211, +0.4353, -0.5871, -0.4515, +0.7361, +0.3886, -0.2943,
+0.1441, -0.0550, +0.4522, -0.0730, +0.1857, -0.0623, -0.0710,
+0.8215, +0.6196, +1.3767, +0.2375, +0.3282, -0.7874, +0.3915,
+0.3988, -0.5452, -0.0948, -0.8480, -0.6538, +0.6593, +0.1014,
+0.0919, -0.6544, -0.1404, +0.7558, +0.0326, -0.2532, +0.2364,
-0.3909, +0.4244, +0.0179, +0.1291, +0.7988, -0.1690, -1.0113,
+0.1032, +0.2351, -0.5359, +0.2737, +0.4163, +0.1660, -0.3177,
-0.8316, +0.4132, -0.6683, -0.0579, -0.5545, +0.1057, -0.0281,
-0.0024, -0.4894, +0.2741, +0.3701
],
[
-0.0647, +0.6531, -0.1032, -0.4992, +0.5037, -0.5282, -0.2488,
-0.1359, -0.2810, -0.1751, -0.7607, +0.1065, -0.9882, +1.0841,
+0.3367, -0.4327, -0.3151, -0.4300, +0.8063, -0.7392, -0.4708,
+0.2053, -0.0891, -0.1977, -0.3533, +0.6672, +1.0239, +0.2179,
-0.0560, -0.1074, -0.6713, +0.2259, -0.4024, -0.7552, +0.0620,
-0.2045, -0.1162, -0.1587, -0.0859, +0.1280, -0.0673, +0.1809,
+0.7352, -0.0873, -0.3025, -0.0806, +0.4108, -0.1462, +0.8900,
+0.0117, -0.9559, +0.2582, +0.3579, -0.1051, +0.1183, +0.1419,
+0.0427, -0.4753, -0.1908, +0.1446, -0.7479, -0.6421, -1.1942,
-0.3313, +0.6718, -0.2275, -0.0107, -0.6022, -0.5933, +1.5015,
-0.3277, -0.8475, -0.3529, -0.1143, -1.2357, -0.0210, -0.8484,
-0.4511, -0.4182, +0.0266, +0.8972, -1.0084, +0.2561, +0.2466,
+0.9909, -0.2399, -0.3424, +1.0800, -0.0557, -1.0021, -0.2742,
-0.7683, -0.3469, -0.8045, +0.3725, +0.8353, +0.6543, +0.3471,
+0.1389, -0.2780, +0.8554, +0.0150, +0.3343, +0.1361, +0.5095,
+0.4998, +0.3005, +0.0996, +0.0335, -0.3677, -1.0182, -0.2247,
-0.6468, -1.1545, +0.7807, -0.9215, -0.3781, +0.4275, -0.6639,
-0.4142, +0.8174, +0.0859, +0.1202, -0.1684, -0.2050, +0.2585,
+0.3646, -0.8418, -1.2981, +0.2840, +0.1240, +0.4204, +0.5400,
+0.3127, -0.2068, +0.2112, +0.4278, +0.0198, -0.5277, -0.7125,
-0.1634, -0.6394, +0.6822, +0.1249, -0.5505, -0.1739, -0.1031,
-0.2829, +0.2146, +0.3009, +0.5936, -0.0538, -0.5024, -0.3015,
+0.0261, -0.1504, +0.4943, +0.5050, +0.4151, -0.1256, -0.6758,
-0.6859, +0.8435, -0.8839, +0.0378, -0.6141, +0.3176, -0.2099,
+0.4002, +0.8110, +0.0505, +0.3271, -0.6036, +0.3374, +0.0504,
-0.3805, -0.6931, -0.1313, +0.0039, -0.3990, -0.6584, -0.5678,
-0.4761, -0.3674, -0.5052, +1.3493, +0.9539, +0.3604, -0.5794,
+0.6608, -0.4635, -1.5371, -0.6674, +0.2887, -0.7866, -0.6152,
-0.0875, -0.2175, -0.3096, -0.1594, -0.2646, -0.2216, -0.4797,
+0.9517, -0.3274, +0.0285, +0.6995, -0.2708, -0.5643, +1.2240,
+0.0139, +0.7940, +0.1366, +1.1820, -0.2451, -0.0172, -0.1148,
+0.4907, -0.2293, -0.5941, +0.1354, +0.6113, -0.0891, +0.2567,
+0.2524, -0.5225, -0.0711, +0.3287, +0.7458, -0.5293, -0.2398,
-0.2208, +1.0366, +0.6547, +0.5174, -0.0728, +0.1773, -0.6946,
+0.0987, +0.0746, +0.4341, +0.8016, +0.6499, +0.2621, -0.4212,
-0.2207, +0.8608, -0.0083, +0.2800, +0.2154, +0.0722, +0.5301,
+0.3713, -0.0081, +0.2663, +0.1345
],
[
+0.3130, -0.0585, -0.1601, -0.1024, -0.3747, -0.4581, -0.2575,
+0.1997, +0.5295, +0.0490, -0.2800, -1.0577, -0.0588, -0.0160,
-0.1494, +0.0341, +0.0715, -0.0382, +0.1771, +0.1815, -0.0765,
+0.2010, -0.1023, +0.5555, -0.4700, +0.0106, +0.1853, +0.4241,
+0.5859, -0.2705, +0.3113, -0.3753, -0.3432, -0.2268, -0.2882,
+0.0709, -0.0510, -0.1118, +0.1877, +0.3823, +0.1815, +0.3125,
+0.2013, -0.1324, -0.8493, -1.1203, +0.0683, +0.1163, -0.1035,
+0.1080, -0.5283, -0.4333, +0.1647, -0.5032, +0.0462, -0.2156,
+0.4899, -0.3746, -0.2878, -0.1702, +0.2101, -0.2306, -0.2466,
+0.0105, +0.2818, -0.2101, -0.4292, -0.0190, -0.2841, -0.0979,
+0.0887, +0.6823, -0.5044, +0.1998, -0.4433, +0.1215, +0.1917,
-0.2307, -0.1198, -0.0065, -0.3293, -0.4149, -0.1700, -0.2872,
+0.3405, -0.0418, +0.0654, +0.1995, +0.8670, -0.2927, +0.1775,
+0.0614, +0.3018, -0.2374, +0.0618, -0.3852, +0.0395, +0.2544,
+0.0949, -0.2742, -0.3114, -0.0646, +0.0232, -0.0231, -1.5945,
-0.2847, -0.1796, +0.3372, +0.2968, -0.1711, +0.0107, -0.3552,
+0.1396, +0.2219, -0.4459, -0.5304, +0.0420, +0.2320, -0.2626,
-0.2587, +0.1755, +0.1605, +0.3022, +0.1686, +0.2614, -0.1657,
+0.0606, +0.3014, -0.1068, -0.2048, -0.6107, -0.1257, -0.1822,
+0.3166, +0.2270, -0.0117, -0.7933, +0.1176, -0.0666, -0.1363,
-0.0265, +0.1240, -0.6695, +0.0981, +0.1127, +0.3905, +0.2705,
-0.4131, -0.4401, +0.2276, +0.3034, -0.0891, -0.4392, +0.0877,
-0.0618, +0.3465, +0.0433, -0.0895, -0.2266, -0.1432, -0.2860,
-0.3363, -0.2260, -0.0956, -0.2604, -0.2610, +0.1076, -0.1004,
+0.0107, +0.0907, -0.1411, -0.5052, +0.0195, +0.5423, +0.2808,
-0.6256, +0.3389, +0.0530, +0.0222, -0.0686, +0.5507, +0.0523,
-0.4677, -0.2825, +0.3375, -0.5367, +0.4671, +0.5011, -0.0104,
+0.0006, +0.4560, -0.1539, -0.1961, -0.0134, -0.3079, -0.0018,
-0.1379, -0.3216, +0.1171, +0.4003, -0.8743, -0.4253, -0.1950,
+0.2639, +0.2655, +0.3551, +0.1033, +0.1134, -1.0368, +0.0362,
+0.0625, +0.0248, -0.1044, -0.5546, -0.3031, +0.2400, -0.0652,
-0.0214, +0.0618, -0.0641, -0.2360, -0.0096, +0.0669, +0.1070,
+0.4031, +0.1504, -0.0330, +0.4363, +0.0873, +0.2462, -0.0825,
+0.2503, +0.0775, -0.0813, -0.4761, -0.5802, +0.0291, -0.2824,
-0.7243, -0.0158, +0.0532, -0.5854, -0.1676, +0.0203, -0.7103,
+0.3987, +0.3179, -0.0535, +0.1871, -0.4414, -0.3198, -0.1166,
-0.2955, +0.2740, +0.2244, +0.3489
],
[
-1.5291, +0.7509, -0.0074, -0.7377, -1.1521, -0.0855, -0.0966,
-1.1100, -0.0266, +0.4928, -1.3361, -0.7886, +0.1070, +0.4873,
-0.7260, +1.1879, -0.2253, +0.2718, -0.1084, -0.0427, -1.2677,
-0.6730, -0.3085, +0.5077, -0.2227, -1.7757, +0.3039, +0.5841,
+0.1047, +0.0855, +1.0019, -0.6676, +0.0868, -0.5888, -0.4921,
-1.9100, +0.2842, +0.8124, +0.3632, -0.6278, +0.2443, +0.3960,
+0.6543, -0.5543, +0.1428, -1.4196, +0.3199, +0.3195, +0.9092,
+0.2958, +0.7929, +0.3910, -1.2143, +0.9585, +0.0838, +0.2479,
+0.6162, -1.7836, -0.5629, +0.7295, +0.0387, +0.0682, +0.1486,
-0.4095, +0.5116, +0.5360, +0.3109, -0.7139, +1.7474, +0.3068,
-0.1707, -0.2102, -0.4153, +0.1328, -0.4242, +0.8287, +0.4251,
-0.1067, -0.0917, -0.4040, -0.0577, -1.4141, -0.4519, +1.0870,
+0.3584, -0.2708, -0.5739, +0.1603, +0.4197, -0.1262, -1.2906,
+1.2730, -0.4890, +0.1423, -0.3007, -0.1097, +0.0113, +1.2365,
+0.0550, -0.3075, +0.0122, +0.4935, -0.5145, -0.2649, +0.1376,
+0.4116, +0.2533, +0.1799, -0.3758, -1.1299, +0.0371, -1.0792,
+0.5395, -0.2699, +0.1940, +0.4513, +0.4543, -0.0455, -1.3227,
+0.6893, -1.0421, -0.0902, +0.0099, -0.5710, +0.0049, +0.1784,
-0.2424, +1.0698, -0.4470, -1.6596, +1.3418, +0.7313, -0.0859,
-0.0128, -0.0812, +0.0355, -0.6078, +0.3808, -1.0758, +0.1621,
+0.3418, +0.3347, -1.1316, +0.2234, -0.6019, +0.5632, -0.4485,
+0.4843, -0.5448, +0.4000, +0.8122, -0.1500, -0.7165, +0.1042,
-0.3117, +1.0720, +0.7967, -0.7584, +0.8979, -0.1655, -1.1808,
-0.3785, -0.2499, +0.1837, +0.2004, -0.9096, -0.2102, -0.2760,
-1.5241, -0.1974, +0.1111, +0.3633, -0.1225, -0.2235, -0.4475,
-0.5283, -0.2199, +1.0813, -0.3643, -0.8134, +0.2379, -0.5054,
-0.3266, +0.6273, -0.0715, -0.1116, +0.4867, -0.2660, -0.0761,
-0.0847, +0.4224, -0.7916, +0.3691, +0.0447, -0.1773, -1.0054,
-0.5019, +0.5543, -1.3049, +0.3457, -1.0282, +0.0240, -0.6582,
-1.2401, -0.4293, -0.3723, +0.3960, -0.0613, -0.0244, -1.3904,
+1.2358, +0.7765, -0.4009, -0.0309, -0.7966, -0.2476, -0.6584,
+0.6055, -0.2617, -1.0294, -1.8422, -1.2904, +0.2408, +0.0069,
-0.5984, -0.1944, -0.6952, -0.0202, -0.2899, +0.4219, -0.4417,
-0.5361, +0.6666, -0.9856, -0.7815, -1.3829, -0.3073, -0.2199,
-1.1877, +0.8324, -1.7394, -0.7193, -0.2104, -0.7743, -0.8404,
+0.6159, +0.6948, -0.4127, -0.3222, -0.9054, -0.9808, -0.1008,
-0.1919, -0.1492, -0.2286, -0.4747
],
[
-0.2811, +0.1011, -0.0610, +0.1050, +0.0698, +0.3638, -0.6212,
-0.7514, +0.6360, +0.3330, +0.1242, +0.0861, +0.1506, -0.1573,
-0.1668, -0.1629, +0.0568, -0.0068, -0.1079, -0.2086, -0.0798,
+0.3590, -0.0907, +0.2449, +0.0255, -0.8407, +0.1177, -0.4512,
-0.0412, -0.3018, -0.4712, -0.4733, -0.5295, -0.0837, -0.2969,
+0.5147, -0.6457, +0.2246, +0.4988, +0.2238, +0.1222, +0.3123,
-0.2500, -0.5234, -0.1785, -0.0682, +0.5842, -0.8358, +0.5900,
-0.4114, -0.7249, -0.2819, +0.0768, +0.1637, -0.3317, -0.0877,
+0.4097, +0.5530, +0.3368, +0.2629, -0.5228, -0.3268, +0.0520,
-0.5589, +0.3444, -0.2139, -0.7170, -0.4311, +0.5924, -0.1573,
-0.2035, -1.1198, -0.8246, +0.0195, -0.5881, -0.0686, -0.0860,
-0.2957, -0.2666, +0.0974, -0.5724, -0.8344, +0.3288, -0.1264,
+0.0128, -0.0728, +0.2257, +0.2318, +0.0079, -0.4364, -0.2328,
+0.2227, -0.4731, +0.2063, -0.4602, +0.3852, +0.1137, +0.4837,
-0.0091, +0.4234, +0.6562, -0.0626, -0.6597, -0.0398, -0.0536,
+0.3562, -0.5488, -0.6842, +0.0974, +0.0361, +0.4993, +0.1175,
+0.3602, +0.1439, -0.6109, -0.4640, +0.1863, -0.7654, -0.2980,
+0.2690, -0.2946, -0.1190, -0.4721, -0.2476, +0.1443, -0.0955,
+0.1548, -0.1863, -0.0491, -0.2371, +0.0805, +0.3213, -0.0869,
-0.4615, -0.4175, -0.4486, +0.3961, -0.1400, -0.5382, +0.0238,
+0.1956, +0.0548, -0.3000, +0.0894, -0.1157, -0.2935, -0.0645,
-0.4242, +0.0501, -0.4838, -0.1142, +0.4003, -0.5963, +0.1655,
+0.4490, +0.1399, -0.4131, +0.7218, +0.0651, -0.1533, +0.3304,
-0.1523, -0.3286, +0.5715, -0.1760, -0.5536, +0.9708, -0.0695,
-0.1759, +0.2954, -0.0463, +0.4200, -0.6787, +0.1121, -0.0979,
+0.7546, +0.0308, -0.4884, -0.0656, -0.5207, -0.4658, -0.1859,
+0.5848, +0.1012, -0.6946, +0.2289, -0.3811, -0.2526, +0.3721,
-0.1001, +0.0271, -0.3716, +0.6291, -0.0850, +0.6132, +0.0041,
+0.3856, -0.0917, +0.5693, -0.1875, -0.1611, +0.2646, -0.1155,
-0.0618, +0.2702, -0.1550, +0.3912, -0.2428, +0.5281, -0.5046,
-0.1638, -0.0070, +0.2807, +0.2511, +0.9921, +0.3561, -0.5066,
-0.2131, +0.0024, +0.7692, -0.0259, -0.1815, -1.1148, -0.2950,
-0.0485, -0.5774, +0.1976, +0.2171, -0.1562, -0.0544, -0.5389,
-0.5517, -0.2927, -0.4904, +0.4556, +0.2309, -0.8640, -0.7473,
-0.2229, +0.7439, +0.2569, +0.1446, +0.2150, -0.0239, +0.3427,
+0.1103, -0.2397, -0.3290, -0.2605, +0.6977, -0.0860, -0.7998,
-0.2629, -0.0009, +0.5513, -0.1890
],
[
-0.6315, -0.9363, +0.5395, +0.5106, +0.3953, +0.2615, -0.4221,
-0.4210, +0.4084, -0.3708, -0.8895, -0.7031, -0.2397, -0.0047,
-0.0392, -0.9907, -0.0518, -0.2610, -0.1791, +0.3649, -0.4118,
+0.5682, -0.4480, +0.6911, -0.8780, +0.2294, +0.4313, -0.6176,
+0.1673, +0.3952, -0.5156, -0.2230, -0.1931, +0.4439, +0.1103,
+0.3786, -0.2360, +0.2698, +1.0768, -0.2860, +0.3186, -0.3953,
-0.0972, +0.0864, -1.4468, -1.0603, +0.0658, -1.2611, +1.3845,
+1.1612, +0.0940, -0.0551, +0.7263, +0.6747, +0.0263, -0.3770,
-0.5943, +0.1021, +0.4164, +0.5683, -0.3671, -0.2409, +0.3109,
+1.0640, -1.0311, +0.5001, -0.1592, -0.1081, -0.2889, +0.6021,
-0.3917, -0.3445, -0.2361, +0.4034, -0.4031, -0.5691, -0.3715,
-0.4639, -0.7814, -0.9862, -0.4163, +0.1789, +0.3550, -0.0066,
-0.1438, -0.1465, -0.6079, +0.7529, -1.2862, +0.5823, +0.0574,
-0.5088, -0.5231, +0.0124, -0.1859, +0.4937, -0.7876, -0.6860,
-0.4469, -0.3876, +0.3195, -0.3171, -0.9781, -0.6090, -0.1717,
+0.2865, -0.6920, -0.1219, +0.1502, +0.5604, -0.3207, +0.0759,
+0.1306, -0.3244, +0.1978, +0.3598, +0.2209, +0.5365, +0.5977,
+0.6504, -0.7021, +0.2033, -0.1353, -0.2584, -0.5924, -0.3167,
-0.0117, +0.6623, +0.0350, +0.5381, +0.1891, +0.6693, +0.7502,
-0.4372, +0.5079, -0.0811, +1.2352, -0.4580, -0.5438, +0.4481,
-0.4331, +0.7399, +0.2847, -0.1288, -0.1061, +0.3820, +0.0009,
-0.7364, -0.0127, -0.5112, -0.2881, -0.4204, -0.3917, +0.5281,
+0.0792, +0.1718, -0.8185, +0.5843, +0.0028, +0.8819, +0.4506,
+0.5220, +0.8724, +0.9187, +0.0321, +0.5469, -0.2751, +1.0228,
-1.2236, +0.1936, -0.9670, -0.1699, +0.1720, +0.0485, +0.2123,
+0.8600, -0.2548, -0.4831, -0.0137, +0.6477, +0.9001, -0.3948,
-0.2098, +0.6062, -0.8639, -0.2834, -0.2457, -0.1975, +0.2417,
-1.0677, +0.0405, +0.1193, +0.6034, -0.2025, +0.1993, -0.4663,
+0.4749, -0.4462, +0.9196, -0.1786, +0.2205, +0.4837, +0.0870,
+0.7167, +0.2900, +0.1003, +0.2930, +0.4127, -0.1221, -0.1679,
+0.0162, +0.2090, +0.0474, +0.6412, +0.7725, +0.0576, -0.0105,
-0.1292, +0.2671, +0.6288, +0.0149, +0.1800, -0.2455, -0.0688,
+0.3809, -1.0889, -0.2251, -0.1113, +0.4618, +0.2450, -0.6848,
-0.8293, +0.3566, -0.0628, +0.0873, -0.4762, -0.8713, -0.9663,
-0.4618, +0.8200, +0.4479, -0.1278, +0.4284, +1.0482, -0.1372,
+0.3498, +0.0962, +0.1740, +0.7217, +0.2614, +1.2404, +0.2340,
+0.1609, -0.3998, -0.2072, +1.2226
],
[
+0.3659, -0.3277, +0.2886, -0.6295, +0.2202, +0.1956, +0.2046,
-1.0997, -0.3790, -0.2504, -0.2179, +0.4310, -0.0257, -0.5886,
-0.2192, +0.5606, +0.5691, +0.3139, -0.3161, +0.1774, +0.4663,
-0.4175, +0.1008, +0.1057, -0.1205, +0.2663, -0.8731, +0.5174,
-0.0934, -0.2885, -0.4381, -0.5083, -0.2518, -0.1219, -0.6879,
-0.0831, +0.3837, -1.2419, -0.5405, -0.2546, +0.7046, +0.3395,
+0.5555, -0.3908, +0.1154, -0.0987, +0.2476, +0.3723, +0.0672,
-0.5928, +0.0105, +0.1161, -0.2884, -0.0896, -0.4600, +0.0441,
-0.6273, -0.1487, +0.2347, +0.1172, +0.2784, -1.1121, +0.0912,
-0.0353, +0.0231, -0.3515, +0.2365, -0.4356, -0.0845, +0.2015,
-0.1389, +0.2026, -0.6346, +0.2443, +0.4571, -0.1172, -0.3613,
-0.4059, -0.7775, -0.7386, -0.1742, -0.4192, +0.1514, -0.1638,
+0.0724, +0.0156, -0.3456, +0.1126, -0.1153, -0.2124, +0.7085,
+0.4719, +0.3253, +0.3380, +0.9501, +0.4256, +0.1533, -0.6508,
+0.7624, +0.0468, -0.1102, -0.3088, +0.0204, -0.4028, +0.4368,
-0.4793, -0.0175, +0.0985, -0.9569, -0.0340, -0.1244, +0.3770,
-0.2597, +0.2212, +0.2798, +0.4668, -0.4196, +0.0917, -0.2541,
-0.2862, +0.5804, -0.3658, -0.5237, -0.7544, +0.5569, -0.4648,
-0.5293, +0.7809, +0.7766, -0.5682, +0.1964, -0.8185, -0.1124,
-0.2674, +0.0967, -0.1906, +0.2630, +0.1404, -0.1758, +0.1631,
+0.1065, -0.3872, +0.0061, -0.9510, -0.1729, -0.4610, +0.2472,
+0.1792, +0.0699, +0.3596, -0.1982, +0.2670, +0.2600, -0.4795,
-0.2844, -0.2111, -0.0606, -0.8002, -0.4567, -0.1884, -0.0098,
-0.2961, +0.2176, -0.0458, -0.0488, -0.6393, -0.5504, -0.2651,
+0.0190, -0.2114, -0.0548, -0.6148, +0.9049, -0.1097, -0.9733,
+0.0583, -0.4493, -0.0165, -0.0770, +0.0710, -0.4033, -0.4281,
-0.4991, -0.3947, -0.4954, -0.9631, +0.0378, -0.4770, +0.1315,
-0.3187, -0.7582, +0.3062, -0.5525, +0.7319, -0.2604, +0.0441,
-0.1059, +0.2834, +0.3660, -0.8329, +0.3057, -0.4196, +0.4101,
-0.1431, -0.0602, +0.0972, -0.4246, +0.0750, -0.2165, -0.0929,
-0.5216, -0.0272, +0.2421, -0.1263, +0.3659, -0.6390, +0.4958,
-0.0563, +0.1865, +0.4631, -0.7523, -0.1715, +0.5228, +0.0712,
+0.0542, +0.0209, -0.1205, +0.0727, +0.2380, +0.2908, -0.3024,
-0.6805, +0.0372, -0.2433, -0.0941, -0.2936, -0.4086, -0.2987,
+0.0643, -0.2925, -0.1958, -0.1803, +0.5245, +0.3150, +0.0475,
-0.7125, -0.0838, -0.0100, +0.1168, +0.0011, -0.1571, +0.1697,
+0.4557, -0.0282, -0.8599, +0.3087
],
[
+0.5293, +0.5316, -0.0075, +0.2349, -0.1925, -0.3699, +0.7133,
-0.4346, -0.3532, -0.5599, +0.6772, +1.0476, +0.1895, -0.1390,
+0.4312, +0.2892, +0.3951, +0.0404, +0.4593, -0.3496, +0.0116,
-0.6154, -0.0422, +0.3318, -0.3554, -0.1510, -0.1144, +0.3037,
+0.2126, -0.2470, -0.3374, -0.5273, -0.4472, -0.1286, +0.2244,
-0.3495, +0.3391, -0.1282, +0.0407, -0.0645, +0.2085, +0.2341,
+0.0162, -0.0732, -0.8081, +0.4448, +0.4809, +0.1450, -0.0442,
-0.0492, -0.5167, +0.5533, +0.1572, -0.1957, +0.1203, +0.5138,
-0.0965, +0.8614, -0.3152, +0.0709, +0.4340, -0.5613, +0.1492,
+0.1472, +0.1821, +0.1713, +0.1056, -0.6941, +0.4367, +0.2689,
+0.1627, +0.0061, -0.4394, +0.0959, +0.0825, -0.5954, -0.0809,
-0.5548, +0.4909, -0.2199, -0.1998, -0.2514, +0.0983, -0.0957,
-0.0226, +0.2085, -0.2957, -0.0954, +0.1584, -0.3026, +0.1529,
+0.2377, +0.4984, -0.1790, +0.3467, -0.4972, -0.1265, +0.1861,
+0.5680, -0.4604, +0.2749, +0.0564, +0.1688, -0.1455, +0.1582,
-0.7147, -0.1668, +0.7087, -0.6417, +0.2222, +0.2567, -0.3951,
+0.0252, -0.5056, -0.5749, -0.0059, +0.0058, -0.1503, -0.0303,
-0.1436, -0.1979, +0.0134, -0.1827, -0.5967, +0.1955, -0.3377,
-0.4552, +0.2332, +0.4630, -0.4029, +0.1100, -0.3156, +0.3175,
+0.1121, +0.6988, -0.1980, +0.2492, -0.1373, -0.0736, +0.0154,
+0.1906, +0.3765, -0.5100, -0.2742, -0.2842, -0.2733, +0.4817,
+0.6092, +0.1535, -0.0521, +0.6241, +0.6618, +0.0321, -0.5597,
-0.6581, +0.2474, +0.4140, -0.1778, -0.7715, -0.3870, +0.3461,
+0.2471, -0.0063, +0.0640, +0.0977, -0.5769, +1.1693, +0.3004,
+0.0837, +0.3198, +0.8379, -0.5325, -0.3676, +0.0443, -0.5164,
-0.5141, -0.3037, +0.4722, +0.0488, +0.8697, -0.1157, -0.2020,
-0.1556, -0.3323, +0.0794, -0.4998, -0.3284, +0.1285, +0.2727,
+0.6930, -0.1084, +0.4371, -0.1270, +1.1286, -0.5242, +0.2965,
+0.6526, +0.3987, +0.1444, -0.4337, -0.1283, -0.0356, +0.0632,
-0.0329, +0.0984, +0.3490, -0.0315, -0.4110, -0.3036, -0.3578,
-0.0063, +0.4834, +0.2061, +0.2501, +0.0401, -0.6852, -0.3525,
-0.4835, +0.1745, +0.3534, +0.0782, -0.1772, +0.5503, +0.2279,
+0.0390, -0.4885, +0.3623, +0.0601, -0.3582, +0.1909, +0.1955,
-0.9959, +0.1046, -0.7885, -0.1006, +0.4565, -0.2528, -0.5783,
-0.0917, -0.0401, +1.0514, +0.2831, +0.3431, +0.3726, -0.2791,
+0.5676, -0.3964, +0.2246, +0.1321, -0.0372, +0.4953, +0.1855,
-0.2201, +0.4205, -0.2678, +0.3705
],
[
-0.4383, +0.3767, -0.1398, -1.2589, -0.0351, +0.8848, -0.3821,
+0.7381, -0.1629, -0.2208, -0.0231, -0.1089, +0.2688, -0.1913,
-0.1701, -0.8031, -0.7192, +0.2047, +0.3901, +0.6492, -0.6799,
+0.2456, +0.0256, +0.1402, -0.8989, -0.3148, -0.3743, +0.1524,
-0.5692, +0.0725, +0.2199, +0.2573, +0.2630, -0.0155, -0.8638,
-0.3227, -0.0941, +0.0170, +0.4481, -0.0033, -0.3460, -0.4853,
-0.2198, +0.3081, -0.5793, +0.2964, -0.4996, +0.3581, -0.4945,
-1.0241, +0.3134, +0.0406, -0.3108, -0.0937, -0.6398, -0.4360,
-0.3965, -0.1980, +0.2188, +0.1330, -0.1319, -0.2892, +0.1846,
-0.2287, -0.3640, +1.3056, -0.7990, +0.0514, -0.1256, -0.0869,
-0.5219, -0.4011, -0.0828, -1.3749, +0.4000, +0.0481, +0.0219,
-0.1074, +0.2081, -0.0452, +0.4307, -0.8420, -0.9887, +0.6651,
-0.3451, -0.4989, -0.2157, +0.1838, -0.2326, -0.5271, -1.0106,
+0.0422, +0.1659, -0.6790, -0.4370, +0.0993, -0.4219, +0.2926,
-1.4655, +0.3299, -0.4630, -0.4127, +0.0650, -0.0907, -0.5557,
+0.5449, +0.2549, -0.8462, -0.4960, -0.2910, -0.1676, +0.3139,
-0.3498, -0.1617, +0.1417, -0.0890, +0.4634, -0.3460, -0.1623,
+0.3784, +0.2638, -0.0517, +0.0446, +1.2269, -0.2161, +0.0567,
+0.1503, -0.5337, -0.1629, +0.5830, +0.0807, -0.1728, -0.0075,
+0.5278, -0.7841, +0.4637, -0.2269, +0.1374, -0.3859, -0.9908,
-0.1801, +0.0038, +0.2798, +0.3467, -0.3097, -0.3130, +0.1028,
+0.0310, +0.0803, -0.3599, -0.1481, -0.4712, -0.1677, -0.8620,
+0.1360, -0.3232, -0.6546, -0.3307, -0.1443, +0.4429, +0.0644,
-1.0791, -0.3051, -0.1541, -0.4603, +0.1506, -0.0511, +0.1952,
-0.0245, -0.0150, +0.2477, -0.0090, -0.1937, -0.4797, +1.0447,
+0.0411, -0.2671, -0.0387, -0.8022, +0.2201, -0.3823, -0.3047,
+0.1043, -0.4240, -0.0245, +0.3587, -0.2156, -0.1411, -0.3159,
-0.0002, +0.0291, +0.0051, +0.5761, -0.4807, +0.6089, -0.2353,
+0.0092, -0.6888, +0.2516, +0.8081, +0.1554, +0.7692, +0.3867,
-0.2633, -0.2482, -0.3817, +0.2199, +0.5334, +0.3730, +0.1608,
+1.2073, -0.7426, +0.1119, -0.6921, -0.4162, +0.0102, -0.1226,
+0.2965, -0.6047, -0.4760, +0.2470, -0.1461, -0.2106, -0.1756,
-0.4781, -0.8994, -0.2296, -0.0045, -0.1405, +0.1110, +0.1516,
+0.4299, +0.1353, +0.3075, -0.1090, -0.3561, -0.4872, +0.1200,
+0.1690, +0.0664, +0.0576, -0.6695, -0.2393, -0.1848, -0.5203,
+0.0431, +0.0627, +0.1742, +0.2262, +0.3209, +0.3629, -0.2562,
+0.1898, -0.1370, +1.1583, +0.2679
],
[
+0.3879, -0.2526, +0.5881, +0.3189, +0.2070, +0.6931, -0.3306,
+0.0660, +0.1093, -0.4855, -0.0718, -0.3554, +0.3417, +0.2194,
-0.0519, -0.8023, -0.6628, +0.0588, -0.4819, +0.3613, -0.3590,
+0.1909, +0.1917, +0.2438, +1.0631, -0.6014, +0.0911, -0.5642,
-0.4269, +0.0198, +0.5636, +0.2587, +0.1228, +0.1936, +0.0986,
-0.2277, +0.0956, -0.0331, -0.2739, +0.9812, -0.7628, -0.6546,
-0.0186, +0.4016, -0.5244, +0.6846, +0.2533, -0.1366, -0.1932,
+0.3941, -0.2001, -0.6538, -0.1762, +0.1094, +0.0883, -0.1340,
-0.6598, +0.1996, +0.3834, +0.5194, -0.2248, -0.2921, -0.0777,
+0.2306, +0.0241, +0.1826, +0.0310, +0.4689, +0.1437, +0.0393,
-0.1703, -0.4118, +0.2480, -0.2397, +0.3582, -0.2542, -0.6081,
-0.0572, +1.3444, +0.0118, +0.2765, -0.6030, -0.2835, -0.0242,
-0.1264, -0.2328, +0.3599, +0.2655, +0.3077, -0.0469, +0.4283,
-0.4493, -0.0542, -0.0127, -0.2526, +0.5933, -0.4638, +0.3775,
-0.7117, -0.6042, -0.2770, -0.1601, +0.0737, +0.0611, +0.5926,
+0.6737, +0.0839, -0.3346, -0.1312, -0.0318, -0.1617, -0.0336,
-0.9100, -0.2009, +0.2274, +0.0688, -0.7166, +0.1150, -0.8140,
+0.1184, -0.5295, +0.0943, +0.4650, +0.8789, -0.0656, +0.1492,
-0.4394, +0.0036, -0.3073, +0.2290, -0.5697, +0.4223, +0.2011,
+0.2196, -0.1706, +0.4915, -0.8992, +0.0736, +0.3837, -0.4251,
-0.2159, +0.3671, -0.1270, +0.0207, +0.2042, -0.1387, -0.0351,
-0.1776, -0.1136, +0.0430, -0.0738, +0.0864, -0.5124, -0.5529,
+0.6100, +0.4673, -0.0771, -0.2835, +0.3972, +0.0485, +1.1983,
-0.1951, -0.6381, -0.0974, -0.4182, -0.3271, -0.2248, +0.3219,
+0.3027, -0.0334, +0.0823, -0.1266, -0.1004, -0.3568, +0.4390,
+0.6042, -0.4685, +0.9686, -0.1652, +0.4926, -0.2821, +0.3147,
+0.7419, -0.7130, +0.8363, +0.1439, -0.2505, -0.8304, -0.0077,
+0.0359, -0.3370, +0.0313, +0.3718, -1.3662, +0.3167, -0.4004,
-0.8605, -0.1397, -0.0067, +0.0608, +0.0011, +0.5468, +0.7826,
-0.7418, -0.5643, -0.3295, -0.3556, +0.9552, +0.0878, -0.7521,
-0.0417, -0.0411, +0.5019, +0.4218, +0.1044, +0.2018, +0.7178,
+0.0425, -0.5157, -0.4186, +0.1644, +0.0227, +0.1145, +0.0584,
-0.6852, -1.0077, +0.4781, +0.0082, -0.2462, +0.2854, +0.7114,
-0.4493, +0.6272, +0.4718, -0.7225, +0.4412, +0.2769, +0.0257,
+0.4356, -0.1177, -0.2400, -0.5487, +0.0604, +0.4203, -1.2504,
-0.5002, -0.1294, -0.1064, +0.4515, -0.0592, +0.4579, -0.5153,
+0.1722, +0.0011, -0.2838, -0.0426
],
[
-0.5000, +0.0939, +0.2115, +0.1138, -0.2612, -0.2688, +0.1126,
-0.1978, +0.0107, -0.0291, -0.6198, -0.3804, +0.3689, -0.5019,
-0.2919, +0.2962, -0.0346, +0.2433, +0.0477, -0.2796, +0.2503,
+0.2574, +0.1043, -0.1736, +0.0486, -0.2836, -0.1299, -0.4129,
+0.3905, -0.0475, +0.1315, +0.0154, +0.0612, +0.0488, +0.1056,
-0.3194, +0.0503, +0.0915, +0.1376, -0.0937, +0.2009, +0.1034,
+0.2468, +0.0418, -0.6219, -0.0482, -0.2431, -0.2583, +0.3053,
+0.6434, +0.5343, -0.1449, -0.3486, -0.3343, +0.4435, +0.3061,
+0.5672, -0.0846, +0.3410, -0.2485, +0.2576, +0.0540, -0.4661,
-0.3009, -0.1271, +0.1216, -0.2006, -0.2457, -0.4119, -0.5092,
+0.1415, +0.0494, -0.0872, -0.0839, +0.0892, +0.3253, +0.5367,
+0.1022, +0.1056, +0.0895, -0.1332, -0.3175, -0.4450, +0.1273,
-0.3015, +0.3787, -0.0738, +0.1450, -0.0459, -0.1222, +0.0501,
-0.4743, -0.0633, -0.1730, -0.0526, +0.1144, +0.1252, +0.0175,
+0.2888, +0.2878, -0.0642, -0.4326, -0.0421, +0.5604, -0.1325,
+0.1343, +0.4688, +0.3642, -0.3168, -0.3097, +0.1854, -0.1056,
-0.0041, -0.1558, -0.4009, -0.0136, -0.0572, -1.4377, -0.1642,
-0.0893, -0.1262, +0.5482, -0.1530, -0.1314, -0.1865, +0.1544,
+0.2510, -0.2994, +0.2177, +0.1859, +0.3517, -0.3309, -0.1467,
-0.2601, -0.0053, +0.2824, -0.0076, -0.1656, +0.5284, -0.2198,
-0.5152, -0.4911, -0.0592, -0.3420, +0.0482, +0.0769, -0.2824,
+0.1832, +0.3256, +0.2028, -0.1730, +0.0780, +0.1358, +0.1322,
-0.2479, +0.0363, -0.1282, +0.3515, +0.2663, -0.2877, +0.2531,
+0.0901, +0.4157, +0.2200, -0.0936, -0.4784, +0.1988, -0.1421,
+0.0299, +0.3992, -0.5075, +0.1705, -0.4994, -0.4354, -0.2757,
-0.3260, +0.1986, -0.0314, -0.2276, -0.0841, +0.1922, +0.0777,
-0.3740, -0.8162, -0.3210, +0.2908, -0.0066, +0.3830, -0.0275,
-0.0054, -0.5686, -0.3304, +0.2550, -0.1482, -0.1277, +0.1528,
+0.0152, +0.3266, -0.1639, -0.2498, +0.1536, -0.1135, -0.1109,
-0.0162, -0.1373, -0.3185, -0.1413, -0.4320, +0.0198, +0.0807,
-0.3152, -0.1504, +0.3467, -0.0254, +0.1652, -0.0769, -0.2225,
+0.2389, +0.1408, +0.5036, -0.1342, +0.3223, +0.5522, -0.5153,
+0.1950, -0.5411, +0.1289, -1.3593, -0.1572, +0.1246, +0.2857,
-0.5160, -0.0212, +0.1755, +0.2012, -0.1534, +0.0277, +0.0755,
-0.4493, +0.1839, +0.2857, -0.2879, -0.0920, -0.0917, +0.2131,
+0.5900, -0.4697, +0.2854, -0.7325, -0.0251, -0.5607, -0.5852,
+0.0302, -0.5642, -0.7150, +0.0296
],
[
-0.5366, -0.0531, +0.1124, -0.2017, -0.1809, -0.4269, +0.6671,
-1.3134, +0.0068, -0.3639, +0.6549, +0.0154, -0.0122, +0.7394,
-0.1614, +0.4954, -0.7794, +0.8172, -0.7611, -0.0067, +0.1521,
+0.1350, +0.3450, -0.1466, -0.0309, +0.7165, +0.4891, -0.1217,
-0.4430, +0.1883, -0.1012, -0.0586, +0.9688, -0.5793, -0.7512,
-0.7491, +0.3468, +0.2876, +0.3349, +0.1740, +0.4537, +0.4894,
+0.1092, +0.0501, +0.0156, +0.5487, +1.0158, -0.3966, -0.2654,
-0.0724, +0.5067, -1.0364, +0.3480, -1.2098, +0.1811, +0.1189,
-0.1088, -0.4454, +0.2028, -1.8978, +0.5481, +0.6970, -1.6196,
-1.6491, -0.3147, +1.4683, -0.9395, +0.1823, -0.3368, -0.0161,
+0.3138, +0.5338, -1.0571, -0.6651, +0.8359, +0.6429, -1.0306,
+0.1145, -0.1924, -0.8384, -0.5674, +0.2337, +0.2191, -0.0808,
+0.4547, +0.2254, +0.7611, +0.2956, +0.4412, +0.2042, -0.3485,
-0.8579, -0.0360, +0.4043, +0.2869, +0.2893, -0.6088, -0.0838,
-0.0675, +0.6638, -1.3170, -0.6425, +0.1490, +1.0714, +1.4623,
+0.1846, +0.8732, +0.1235, +0.0416, -0.6125, -0.4323, -0.4897,
+0.1530, -0.8170, +0.1582, +1.4972, +0.5751, -0.9646, -0.3476,
-0.8966, +0.0913, -0.2005, -0.0103, -0.3659, +0.1734, +0.7632,
-1.1119, -0.0537, +0.0060, -1.3084, -0.4407, -0.0468, -0.4416,
+1.1417, +0.4995, +0.2502, -0.3735, -0.2458, +1.3448, -0.8585,
+0.0501, -0.0543, +0.2666, -0.8573, +1.4857, +0.7025, -0.0231,
-1.3243, -0.4544, -0.2274, +0.4979, +0.3006, +0.6793, -0.1725,
-0.0746, +0.3645, +0.1898, -0.5180, +0.5533, -0.5042, +0.2358,
+0.4967, -0.2988, +0.1609, -0.0783, -1.1282, -0.2522, -0.6260,
-0.2272, +0.0970, -0.3885, -0.1422, -0.4530, -0.9385, +0.0407,
-0.7700, -0.2531, +0.5047, -0.3739, +0.6096, +0.0805, -0.3859,
-0.0817, +0.8843, -0.3196, -0.8929, -0.2564, +0.2006, +0.2478,
+0.3182, +0.7476, -0.7697, +0.9693, +0.2046, +0.3687, +0.1164,
+0.4698, -1.7755, -1.1123, +0.2230, -0.7335, -0.0206, -0.2223,
+0.3990, +0.5648, -0.9936, -0.8622, -0.1690, -1.1837, +0.6950,
-0.6163, +0.7241, -0.2630, +0.4631, +0.3327, +0.4036, -0.7987,
+0.4207, +0.4984, +0.2982, -1.2386, +0.4749, +0.4591, -0.7422,
+1.0875, +0.5354, -0.6738, +0.0386, +0.7613, +0.0585, +0.1570,
-0.7002, -0.5231, +0.4646, +0.3237, +0.6031, +0.2195, +0.8915,
-0.4667, +0.3789, +0.2301, +0.2010, -0.5623, +0.4832, +0.1143,
+0.7928, -0.3460, -0.0472, -0.2626, +0.6499, -0.8235, -0.2990,
-1.1168, -0.1905, -0.7103, +0.5388
],
[
+0.3133, -0.6316, +0.2023, +0.6097, -0.5400, +0.1520, +0.0771,
+0.1700, +0.5944, -0.1067, +0.0666, -0.0254, -0.4327, +0.2285,
+0.4536, -0.5574, +0.0504, +0.7510, +0.0809, +0.3838, -0.8439,
-0.1147, -0.1345, +0.0099, +0.1836, +0.3595, -0.2420, -0.0407,
-0.1447, +0.2181, -0.7152, +0.1702, +0.7404, +0.2821, +0.0276,
-0.9614, -0.4581, -0.5366, -0.2125, +0.0668, -0.0150, -0.2005,
-0.3165, +0.3662, -0.1285, -0.3924, -0.6074, -1.2115, -0.0836,
+0.3740, +0.0347, -0.5316, -0.4051, +0.4834, +0.4039, +0.5063,
-0.6241, -0.8050, +0.2015, -0.4484, -0.2013, -0.6292, -1.1865,
-0.8314, -0.0588, -0.3668, +0.2977, +0.3386, -0.1837, +0.0628,
+0.3704, +0.5527, +0.3007, +0.2247, +0.0288, +0.4336, -0.4317,
+0.1014, -0.0536, -0.1351, -0.0964, -0.5354, -0.2087, +0.1205,
-0.0931, +1.2910, -0.4756, +0.5848, -0.3123, +0.4850, -0.7045,
+0.2975, +0.4772, -0.5417, -0.6282, +0.2288, +0.3505, -0.4983,
-0.3921, -0.8337, +0.6614, -0.8303, -0.0417, -0.0454, +0.3398,
+0.5071, -0.2283, +0.8284, +0.8516, +0.1268, -0.1436, -0.7265,
-0.0281, -0.4421, -0.6513, +0.3358, -0.0248, +0.3102, -0.0737,
+0.1734, -0.0482, -0.3296, +0.1719, -0.0633, -0.1005, -0.2895,
+0.3284, +0.5012, +0.4542, +0.4629, -0.2717, -1.0846, +0.4166,
+0.5448, +0.0183, -0.2270, +0.3915, +0.3580, -0.2457, -0.2945,
+0.1370, -0.0692, -0.0832, +0.7713, -0.5414, +0.7029, -0.0502,
-0.2691, -0.2321, -0.0533, -0.1938, -0.1868, -0.0738, +0.6259,
-0.9198, +0.7292, -0.8877, -0.2662, -0.1584, -0.4681, -0.1216,
+0.0493, -0.5111, +0.2386, +0.3455, -0.4765, -0.9506, -0.5355,
-0.4193, -0.1604, -0.2862, +0.6543, +0.0157, +0.2146, -0.1735,
-0.3411, +0.2515, +0.0945, +0.2771, -0.8631, +0.8575, -0.1875,
+0.1667, -0.1624, -0.2783, -0.1305, +0.4377, -0.0527, +0.1919,
+0.2666, -0.5505, +0.2840, -0.3974, +0.1018, +0.2978, -0.3643,
+0.1362, +0.0297, +0.3011, +0.1747, -0.0179, -0.4801, +0.8440,
-0.6682, +0.4672, +0.6367, -0.1213, +0.3026, +0.4612, -0.1369,
-0.3340, +0.0787, -0.2030, -0.4573, +0.3517, +0.4259, -0.0845,
+0.1546, -0.2297, -0.2969, -0.0824, +0.1241, -0.4702, +0.0363,
+0.0921, +0.3557, -0.8600, -0.3158, +0.3729, +0.0638, -1.1007,
-0.2081, -0.2055, +0.1172, +0.0916, -0.0058, +0.1816, +0.0123,
+0.1429, -0.5018, -0.3023, -0.4876, -0.0388, -0.1106, -0.3108,
-0.8963, -0.6240, +0.6068, -0.1200, -0.3523, -0.5510, +0.2055,
+1.1523, +0.5612, -0.4251, -0.4462
],
[
-0.2078, +0.2159, -0.6899, +1.1732, -0.5647, -0.0560, -0.0740,
+0.1268, -0.0523, +0.3479, +1.4624, -0.4836, +0.7700, -0.5258,
+0.7166, -0.1354, -0.3892, +0.3247, +0.3710, -0.4606, -0.5177,
-0.0647, -0.1316, +0.0905, +0.1431, +0.7916, -0.0960, +0.7971,
-0.0478, -0.0183, -0.1254, +0.3246, +0.3927, +0.7055, -0.1469,
-0.0563, -0.5129, -1.2095, -0.4517, -0.3763, -0.2522, -0.6434,
-0.0896, +0.0748, +0.1881, +0.2351, -0.2307, -0.3198, +0.2472,
-0.6252, +1.0733, -0.9754, +0.9886, +1.6223, -0.0218, -0.0685,
-0.6506, -0.6508, +0.6577, +0.1807, -0.3100, -0.0847, +0.5665,
+0.1520, -0.4894, +0.1449, +1.0282, +0.3099, +0.3025, +0.0711,
+0.5635, +0.1188, +0.4385, +0.3915, -0.1982, +0.3720, -0.0862,
-0.3462, -0.3721, -0.1543, +0.1193, +0.5861, -0.0313, -0.1667,
+1.1074, +0.7065, -0.5241, +0.5442, +0.2790, -0.1370, +0.1128,
-0.5967, +0.0350, +0.0144, -0.6178, +0.2484, +0.2531, -0.6824,
+0.0965, +0.2046, +0.4401, -0.1291, +0.6294, -0.2014, -0.2637,
+0.1229, -0.9164, +0.0964, +0.9267, +0.1048, +0.8069, -0.8321,
-0.5052, -0.0693, +0.4893, -0.0462, +0.0229, -1.0707, +0.1171,
+0.5665, -0.8612, -0.0277, -0.2675, -0.2062, -0.3403, -0.2790,
+0.1097, -0.1200, +0.0495, +0.4543, -1.1329, +0.0464, -0.7925,
-0.7417, +0.7223, -0.0427, -0.0673, -0.0399, -0.9868, +0.1529,
+0.5369, +0.3171, -0.3458, +0.5491, -0.0901, +0.5131, +0.0404,
-0.2901, -1.0743, -0.4129, -0.7517, +0.2465, -0.4845, +0.1737,
-0.6957, +0.4938, -0.3270, +0.2558, +0.7118, -0.6806, -0.4913,
+0.3811, +0.5939, -0.0987, +0.4408, -0.6064, +0.4788, -0.3907,
+0.7597, +0.5543, +0.4437, +0.0443, -0.1223, -0.0524, -0.1103,
+0.7907, +0.3542, +0.8101, +0.4851, +0.4546, +0.0818, -0.4959,
+0.7695, -0.2585, -1.6009, +0.1457, +0.3954, +0.8016, -0.2011,
-0.8105, +0.7025, -0.7162, +0.4712, +0.4286, +0.6986, -0.6297,
+0.2641, +0.9476, +0.2917, +0.0902, -0.4511, -0.4805, +0.4719,
-0.5469, +0.5991, +0.3369, -0.9000, +0.7184, -0.3536, -0.1188,
-0.1574, -0.2844, +0.2864, +0.6465, +0.4639, +0.7739, +0.0324,
-0.0467, -0.0402, -0.0269, +0.4416, -0.0577, -0.5605, +0.2913,
-0.3087, -0.2643, -0.9488, +0.0121, -0.5815, -0.6524, -0.1886,
+0.9885, -0.3186, -0.0408, +0.7183, -0.0352, -0.2070, -1.0671,
+0.1828, +0.0276, -0.2088, -0.0692, +0.1481, +0.4491, -0.2706,
-0.5036, +0.3425, +0.3358, -0.3577, -0.1041, -0.5428, +0.5906,
-0.8141, +0.4981, +0.4530, -0.0505
],
[
+0.0883, +0.5745, +0.1241, +0.0386, -1.3220, +0.1391, -0.7444,
+0.1266, -0.4752, -0.0622, -0.0086, +0.4961, -0.9183, +0.2870,
-0.0505, -0.0834, -0.4600, -1.2942, +0.4397, -0.9405, -0.2526,
+0.0333, -0.5344, +0.0062, -0.3627, -0.0152, +0.1844, +0.4362,
-0.3528, -0.6949, -0.7837, +0.4124, -0.4609, +0.3771, -0.0652,
+0.1978, -1.2737, +0.0702, -0.9126, +0.4987, -0.3259, +0.3806,
-1.2165, +0.1094, -0.1793, +0.2912, +0.5028, -0.3525, +0.3053,
-0.1895, -0.3082, -0.4153, -1.0049, -0.2444, -0.0007, -0.6810,
-0.4850, -0.5223, -0.2507, +0.1435, -0.3542, +0.2929, -0.5005,
-0.3632, +0.2116, +0.6687, +0.0826, -0.0506, -0.2402, -0.4810,
-0.3074, -0.1164, +0.2267, -0.1956, -0.0909, -0.0020, +0.0059,
+0.7786, +0.1430, -0.0885, +0.0162, +0.1277, +0.4625, +0.3426,
+0.3616, -0.5267, +0.2048, +0.4234, -0.0988, -0.4840, +0.8986,
+0.3059, +0.1044, -0.0221, +0.1213, +0.2799, +0.2168, -0.3360,
+0.0820, -0.2899, +0.5913, +0.0276, +0.6040, -0.7100, +0.1102,
-0.7610, -0.9273, +0.2782, -0.4118, -0.5615, +0.3015, -0.3658,
+0.0120, -0.4225, -0.9050, -0.5561, -0.1952, -0.2364, -0.2842,
-0.4078, +0.4831, -0.3532, +0.2448, -0.1562, -0.2250, +0.1278,
-0.3951, -0.3186, -0.3552, +0.2271, -0.6228, +0.1711, +0.1144,
+0.5876, -0.0525, +0.1323, -0.1743, +0.4941, +0.2175, +0.1452,
-0.6939, -0.3579, -0.2105, +0.0473, +0.1207, -0.2609, +0.7277,
-0.6756, -0.9064, +0.3227, -0.2519, +0.3097, +0.2882, +0.0695,
-0.1104, -0.0890, +0.2658, -0.1416, +0.0912, +0.3849, +0.0381,
+0.1393, +0.2570, -0.1948, -0.0091, -0.3471, +0.4512, -0.9123,
+0.3797, -0.2508, +0.3012, +0.5242, +0.1647, -0.0376, -0.1543,
+0.1072, +0.3013, +0.3925, -0.5150, +0.3293, -0.4495, -0.5439,
+0.1226, -0.0503, +0.3166, +0.0229, -0.1003, +0.0704, -0.7325,
-0.4725, -0.1982, -0.0686, +0.2250, -0.5051, -0.1548, +0.7555,
-0.1745, -0.4507, +0.0182, +0.0478, -0.1299, -0.0160, +0.2467,
-0.1570, -0.0555, +0.4546, -0.0208, +0.6012, -0.5317, -0.3977,
+0.1079, +0.2253, -0.4173, -0.0367, +0.0345, -0.5867, +0.3191,
-0.0608, -0.2514, +0.3111, +0.5135, +0.8052, -0.4308, -0.1522,
-0.8264, +0.1884, +0.0839, +0.2944, -0.2407, +0.6267, -0.1679,
+0.7118, -0.3055, -0.0245, +0.4895, +0.0072, +0.8225, -0.1348,
+0.0345, -0.3597, +0.1860, +0.0213, +0.8171, +0.4510, -0.1827,
-0.2858, -1.2174, -0.3189, +0.0268, +0.6681, +0.4480, +0.3994,
-0.1562, -0.9024, -0.2326, +0.1346
],
[
+0.3084, +0.3326, +0.1054, +0.5372, -0.4462, +0.3715, -0.4384,
+0.4350, +0.4069, -0.1899, -0.2812, +0.3302, -0.5307, -0.0522,
+0.4385, -0.5310, -0.8525, -0.3197, +0.0000, -0.1851, +0.2713,
-0.1222, -0.0797, +0.1402, +0.1150, -0.3401, -0.5592, -0.0257,
-0.3595, -0.2370, +0.6668, +0.3255, -0.3060, +1.0412, -0.3289,
-0.0084, -0.7253, +0.5238, -0.2928, -0.0062, -0.4224, +0.4303,
-0.2587, +0.3176, -0.5448, +0.4752, +0.7116, -0.1239, +0.4182,
-0.5606, -0.0119, -0.4950, -0.1067, +0.5279, -0.0342, -0.8565,
-0.4620, -0.1194, -0.0360, +0.5641, +0.1143, +0.5987, -0.3046,
+0.3763, +0.3809, +0.0488, +0.2096, +0.3724, -0.1403, -0.1926,
-0.5922, +0.1727, +0.4608, -0.5037, +0.0887, +0.0214, +0.2735,
+0.5127, +0.1420, +0.8844, -0.3324, +0.2358, +0.7508, +0.1167,
+0.0459, -0.4334, -0.0198, +0.5772, +0.4406, -0.1656, +0.2223,
+0.2519, -0.3324, +0.4001, -0.3142, -0.1051, +0.3291, -0.0687,
+0.0978, +0.0169, +0.2624, -0.0020, -0.0262, -0.6117, +0.2776,
-0.2481, -0.7874, +0.0241, +0.4585, -0.3711, +0.3154, -0.4785,
+0.7140, +0.3236, -0.8819, +0.0408, -0.3777, -0.4275, -0.1939,
-0.5276, +0.1249, -0.6084, +0.0583, -0.0348, +0.7692, -0.1150,
-0.0375, -0.0562, -0.1039, +0.0838, -0.5024, +0.5375, +0.0474,
+0.3641, +0.6234, +0.2684, +0.4376, -0.0756, +0.1711, -0.1052,
-0.6423, -0.0710, +0.0612, -0.3986, +0.2754, -0.2364, +0.6701,
-0.1358, -0.1000, +0.7136, -0.1151, -0.5315, -0.2194, +0.6152,
+0.4980, +0.2460, -0.3362, -0.2858, -0.3295, -0.2964, +0.1795,
+0.0117, +0.3812, +0.3558, +0.3506, -0.2463, +0.3887, -0.3427,
+0.3082, +0.1121, -0.1937, +0.3812, -0.1235, +0.3321, +0.1251,
-0.0890, +0.4140, +0.5447, +0.3223, +0.1721, +0.4238, -0.0615,
+0.6814, +0.2036, -0.3634, +0.6447, -0.3903, +0.4421, -0.6876,
+0.4155, -0.1082, +0.3378, +0.2087, -0.6629, -0.5236, +0.9171,
-0.5905, -0.2467, -0.1317, +0.1906, -0.0160, +0.0688, +0.0536,
-0.3821, -0.0679, +0.2156, +0.3244, +0.3768, -0.2809, -0.0848,
-0.0698, +0.0940, -0.8873, +0.4357, +0.0218, +0.4299, +0.3018,
+0.1461, -0.4344, +0.2483, +0.4554, -0.0151, +0.4039, +0.3796,
-0.3374, -0.1993, -0.1437, -0.2406, +0.1328, +0.6773, -0.4468,
+0.4858, -0.7805, -0.0731, +0.4331, +0.4372, +0.3843, -0.1971,
+0.5076, +0.3756, +0.9754, +0.1496, +0.5236, +0.2684, -0.1820,
-0.5176, -0.3597, +0.1541, +0.1695, +0.6111, -0.1462, -0.4114,
+0.0234, +0.0209, +0.5436, +0.5810
],
[
-0.2500, +0.3077, +0.1558, +0.3010, +0.6922, +0.4368, +1.2611,
+0.4390, -0.2915, -0.0442, -0.0027, +0.3695, -0.1254, -0.5003,
-0.8550, -0.0819, +0.6677, +0.7455, -0.0776, -1.2166, +0.0246,
-0.8068, +0.1458, +0.1168, +0.3775, -0.0366, -0.4019, -1.0362,
+0.0472, -0.9426, -0.0675, -0.6501, +0.2171, -0.2521, +0.2897,
+0.0093, +0.7755, -0.1403, -0.0516, +0.2505, +0.2692, -0.1350,
+0.2832, -0.1298, -0.3193, +0.3324, +0.1854, -0.0759, -0.7190,
-0.2470, +0.1966, -0.4939, -0.1859, -0.2846, -0.9652, +0.5937,
-0.3156, +0.1543, +0.3539, -0.7877, -0.4272, +0.3107, +0.0315,
+0.5174, -0.0162, -0.0552, +0.1052, -1.0040, -0.2154, +0.8140,
+0.8001, -0.1447, +0.2047, +0.5197, -0.0189, +0.2863, -0.3463,
-0.2873, +0.1687, -0.3446, +0.2374, -0.1005, +0.0379, +0.3568,
+0.1127, +0.6150, +0.2781, +0.1287, -0.3984, -0.0689, +0.1423,
-0.4591, -0.1348, -0.8668, +1.0449, +0.1205, -0.7809, -0.4016,
+0.0480, -1.3975, -0.3335, +0.1350, -0.1440, +0.4579, +0.2722,
+0.2101, +0.2482, -0.0158, +0.0860, -0.4599, -0.5761, -0.1191,
+0.0220, -0.6415, +0.1869, +0.1274, +0.2244, -0.6932, +0.3727,
+0.1536, +0.3592, +0.1398, +0.0148, +0.4021, -0.1487, +0.1267,
-0.0019, +0.5425, +0.3965, +0.2555, +0.1843, +0.4653, -0.8533,
-0.0198, +0.0784, -0.6288, +0.1757, -0.0479, -0.3504, -0.2092,
+1.4140, -0.0574, -0.3833, -0.6488, -0.8540, -0.2835, -0.9608,
+0.5502, +0.6005, -1.5338, -0.1230, -0.9029, -1.3107, -0.1371,
-1.2359, +0.1221, +0.0265, +0.1265, -1.0465, +0.0593, -0.6191,
-0.2046, +0.3733, -0.0338, -0.6591, +0.1019, -0.0270, -0.2339,
-0.0395, -0.6103, -0.2356, +0.0017, +0.4525, +0.5458, +0.0776,
-0.2577, -0.0378, +0.2578, -0.4619, +0.3851, +0.0819, +0.0461,
-0.4399, -0.1668, -0.3137, -0.0088, -0.5462, -0.5962, +0.6968,
+0.2002, +0.3099, -0.3296, -0.0797, -0.1906, +0.3054, -0.4745,
+0.6503, +0.0542, +0.2144, -0.5482, -0.7552, -0.4890, -0.2945,
-0.2794, -1.2685, -0.0533, -0.8263, -0.7473, +0.5711, -0.1414,
+0.0348, -0.2394, +0.7934, +0.3019, +0.0283, -0.2052, -0.2160,
-1.2565, +0.1837, -0.5952, +0.2728, -0.4365, -0.0053, +0.3223,
+0.0588, -0.4739, +0.2493, +0.5917, -0.3267, -1.0887, +0.4630,
+0.2037, +0.4404, -0.3157, -0.3922, -0.2117, +0.0694, -0.3869,
+0.1015, +0.0267, +0.0393, -0.2944, -0.5654, +0.0621, -0.1336,
+0.1256, +0.1415, -0.5144, -0.8333, -0.5362, -0.2697, +0.1414,
-0.4653, +1.0042, +0.0541, -0.2995
],
[
-0.3617, +0.6119, +0.1803, -0.7268, +0.1873, +0.1849, +0.3978,
-0.2046, -0.4021, -0.0068, -0.4464, +0.1547, -0.1373, -0.0781,
-0.7765, -0.3411, +0.4319, -0.0365, -0.2461, +0.0698, -0.1267,
-0.8131, +0.1605, +1.1406, -0.0785, -0.1109, +0.0651, -0.1175,
+0.2334, +0.0754, -0.1100, -0.2610, +0.1067, -0.4293, +0.6595,
+0.0901, +0.4231, +0.0030, +0.4254, -0.3673, -0.1324, -0.0250,
+0.4087, -0.2628, -0.5321, -0.7347, +0.7819, -0.0667, -0.3807,
+0.2666, -0.1121, +0.3202, -0.3344, +0.5674, -0.0790, +0.3794,
-0.2026, +0.5088, +0.4095, -0.0543, -0.1254, +0.0291, +0.8022,
+0.9585, +0.2550, -0.9030, -0.1392, -0.2231, +0.2384, +0.3346,
+0.4932, +0.5478, -0.4844, +0.5016, +0.0050, -0.3516, -0.1467,
-0.3614, -0.3191, +0.0918, -0.6497, -0.8179, -0.1203, +0.2838,
-0.8380, -0.2386, +0.2764, +0.2012, +0.2537, -0.2596, -0.1523,
-0.2144, -0.4428, -0.2279, +0.3967, -0.0666, +0.4540, +0.1012,
-0.1424, -0.2584, -0.4035, +0.3236, +0.0341, -0.1242, +0.3707,
-0.0029, -0.0806, -0.4114, +0.6835, -0.2080, -0.3752, +0.4889,
-0.7170, +0.4745, -0.4807, -0.6897, -0.8512, +0.1095, -0.5387,
+0.2555, -0.3730, -0.2401, +0.3897, +0.1526, +0.4394, -0.0330,
-0.0074, -0.2117, +0.0445, +0.5541, -0.4439, +0.6947, +0.5867,
+0.0232, -0.3013, -0.4952, -0.0544, +0.5246, -0.4008, -0.8797,
+0.7381, -0.7058, +0.2255, -0.0161, -0.7893, -0.1669, -0.3674,
+0.2584, +0.0972, -0.6752, -0.0116, -0.4167, -0.1269, -0.5116,
-0.3789, +0.1586, +0.3960, +0.2958, -0.6671, -0.2630, -0.6302,
+0.3825, +0.0924, -0.4380, -0.4249, -0.5170, +0.3245, +0.4881,
-0.7935, -0.5931, +0.2993, -0.5733, -0.7339, +0.1648, +0.3424,
+0.4922, +0.3099, +0.1607, -0.1968, -0.0296, -0.1659, -0.2224,
-0.8748, +0.6691, +0.2408, -0.4505, -0.2609, +0.5525, +0.5123,
+0.1637, +0.9391, -0.2949, +0.5058, -0.0878, +0.7430, -0.5601,
+0.1722, -0.1415, +0.2439, -0.2886, -1.1566, +0.2533, -0.4539,
-0.2170, -0.0570, +0.3258, +0.0173, -0.3232, +0.1866, -0.8982,
-0.4922, -0.4564, -0.3910, +0.1259, -0.2820, -0.3271, -0.0819,
-0.4024, -0.3912, -0.1096, -0.0386, -0.1337, +0.4817, +0.5591,
+0.3636, +0.1442, +0.2077, +0.2385, +0.0564, -0.8489, +0.4890,
-0.5965, +0.4944, +0.7034, +0.2359, +0.9821, -0.2933, +0.4627,
-1.0236, +0.1961, -0.0180, +0.0697, -0.1304, -0.0903, -0.2365,
-0.4505, -0.5489, -0.4525, +0.3125, -0.7338, -0.6284, +0.9569,
-0.1139, +0.1486, -0.3127, -0.4513
],
[
+0.1374, -0.5428, -0.8100, -1.0265, -0.2423, +0.1350, -0.1682,
+0.0583, -0.2582, +0.1550, +0.0280, -0.2915, +0.3467, +0.3599,
+0.0055, +0.1305, -0.1833, -0.1642, +0.1030, -0.2892, +0.0507,
-0.3570, +0.2098, -0.1168, +0.0693, +0.1814, +0.0658, -0.0190,
-0.4673, +0.0659, -0.4218, +0.0543, -0.4458, -0.2527, +0.4815,
-0.0955, +0.0052, -0.0228, -0.3353, -0.3469, -0.6349, -0.1985,
+0.1107, -0.1253, +0.2029, -0.3126, -0.1510, -0.1179, -0.1855,
-0.1230, -0.3620, -0.0249, +0.1285, -0.2405, +0.3999, -0.1774,
-0.0395, +0.5645, -0.2328, +0.0150, -0.3253, -0.0695, -0.6269,
-0.3334, +0.0779, -0.0437, +0.1131, -0.1575, +0.3053, -0.1468,
+0.0352, -0.3982, +0.1718, -0.2638, -0.0177, +0.2462, -0.1478,
-0.2315, +0.0713, -0.1853, -0.0738, +0.0536, +0.2119, +0.2691,
+0.0546, -0.5131, +0.4333, -0.4216, -0.3035, -0.1188, -0.4189,
-0.2094, -0.1047, +0.1408, +0.0974, -0.2719, -0.1249, +0.1946,
-0.0849, -0.1044, -0.0510, +0.1056, +0.0175, +0.0413, +0.1502,
-0.0845, -0.4606, +0.0749, +0.5350, +0.0060, +0.0318, -0.3788,
+0.1055, -0.3184, -0.2421, -0.1276, -0.1251, +0.2900, +0.2509,
-0.3171, -0.6557, -0.2008, -0.5711, -0.1254, +0.0869, +0.3087,
-0.1056, -0.1393, -0.2747, -0.0783, -0.4774, -0.4621, +0.1936,
+0.0284, +0.5479, -0.5869, -0.2269, -0.0711, -0.2774, +0.0409,
+0.1552, +0.1656, +0.2633, +0.1758, +0.0518, -0.3006, -0.0855,
-0.0324, +0.1547, +0.3259, -0.1615, -0.0038, +0.0764, -0.2870,
+0.1677, -0.1411, -0.6105, -0.2463, -0.2647, +0.2542, +0.1947,
+0.0514, -0.3308, -0.3148, +0.1937, +0.0342, -0.2227, +0.0185,
-0.0816, -0.0406, -0.2300, -0.1025, -0.3797, -0.3689, +0.0250,
-0.1930, +0.1759, -0.0086, -0.2773, -0.1413, -0.2596, -0.1380,
-0.0023, -0.0578, -0.1519, +0.2800, -0.0472, +0.1733, +0.0858,
-0.4109, -0.1821, -0.2069, -0.0812, -0.0292, -0.1067, -0.1145,
-0.7581, +0.0988, -0.1573, -0.0543, -0.0192, +0.1357, +0.1829,
-0.3401, +0.1644, +0.0492, -0.5190, +0.0267, -0.3055, +0.1544,
-0.0713, +0.1133, -0.4940, -0.2757, -0.2218, -0.4367, -0.0063,
+0.0133, -0.1323, -0.0167, -0.1582, -0.0865, -0.3382, +0.5587,
+0.0266, +0.0899, -0.4496, -0.4377, -0.3201, +0.0885, -0.1241,
-0.2890, +0.1006, +0.0440, +0.0513, -0.5927, -0.2212, -0.1547,
-0.3500, +0.3094, -0.4577, +0.0900, +0.2294, +0.2151, +0.1440,
-0.2790, -0.1258, -0.3533, -0.1199, +0.5167, -0.0921, -0.0455,
-0.1775, +0.2137, -0.2639, +0.1466
],
[
-0.5276, -1.3438, +0.3831, -0.9253, +0.8576, +0.0124, -0.2658,
-0.2487, -0.1611, +0.3085, -0.5858, +0.5871, -0.9179, +0.5947,
+0.7145, +0.6081, -0.7345, -0.4929, -0.1306, +0.6165, +1.1511,
-1.1945, -0.7667, -0.3825, -0.0375, +0.1871, +0.7079, -0.0321,
-0.0640, -0.4706, +1.0184, +0.6239, +0.2742, -0.5054, +0.2348,
+0.4013, +0.5829, +1.2272, -0.0240, +0.1746, -0.1047, +0.8335,
-0.0198, -0.0762, -0.5627, -0.1122, +0.3310, +0.1170, +0.2169,
-0.0871, +0.0091, -0.3329, +0.5467, -0.5080, +0.5069, +0.5804,
-0.8041, +0.9737, -0.3245, +0.1729, -1.0782, -0.3868, +0.2157,
-1.2449, -0.7842, +0.0555, -1.1963, -0.5074, +0.0428, +0.6962,
-0.1599, +0.4788, -0.2649, +0.8472, +0.4923, +0.3957, +0.2921,
+0.8468, +1.2065, +1.0259, +0.3230, -0.3249, +0.1635, -0.0607,
-0.4183, +0.3428, +1.2379, +0.6218, -0.0018, -0.6459, -0.0654,
+0.6206, +1.1501, +0.5924, +0.0803, -0.4396, +0.1987, +0.7395,
+0.0809, +0.3726, +0.7010, +0.4642, +0.0059, -0.8903, +0.4905,
+0.7753, +0.2928, +0.1696, +1.1438, -0.6232, -0.8458, -0.5917,
+0.8345, -1.1192, +0.4379, -0.0981, -0.6179, +1.0763, -0.1587,
-0.0143, -0.3366, +0.3584, +0.6000, -1.7890, +1.5939, +0.5541,
-0.8919, -0.3719, +0.1024, +0.0394, -0.2871, +0.1840, +0.4360,
+0.3877, +0.7105, -0.5494, -0.2323, -0.8096, -1.4210, -0.0810,
-0.4973, +0.8591, +0.3779, +0.4127, -0.1358, -1.2560, +0.0892,
+0.0455, -0.2307, +1.3972, +1.4152, -1.0996, -0.0079, +0.1609,
+0.1529, +1.2245, -0.7034, -0.0371, +1.0239, -0.1093, -1.2783,
-0.1734, -0.1558, -1.2371, +1.1509, +0.2892, +0.3994, +1.4772,
+0.2275, -0.1904, +0.1494, -0.6271, -0.2104, -0.6753, +1.2702,
-0.6296, -0.3667, +0.1936, -0.5729, +0.0596, -0.9871, -1.0458,
+0.1219, +0.0623, -1.0077, +0.3999, +0.2206, -0.6258, -0.7348,
-1.5555, -0.3696, +0.3870, +0.0152, +0.1681, -1.2091, -0.3437,
-0.8404, +0.9248, -0.1502, +0.4254, -0.5557, -0.3428, +0.3771,
-0.3366, +0.1037, +0.3911, -1.4632, +0.1639, -0.8553, +0.3079,
-0.0169, +2.2567, -1.9198, +0.3675, -0.4832, +0.4359, +0.6070,
-0.7928, -0.0742, -0.5579, -0.1285, +0.1660, -1.0886, +1.1281,
-0.2143, +0.9417, +0.3212, -0.0355, +1.6218, -0.5859, -0.1306,
-0.6681, +1.0604, -0.5643, -0.5695, -0.8043, -0.8766, -0.4868,
+0.9869, +0.9962, +0.3336, -1.1848, -0.3094, -0.3334, +0.2218,
-0.4546, -0.5356, -0.8120, -0.6476, -0.6687, -0.5322, +0.1139,
+1.3689, -0.0917, -0.0398, +0.9746
],
[
+0.4746, +0.6490, -0.0563, +0.6347, -0.1950, -0.1855, -0.0127,
-0.1160, +1.1050, +0.0292, -0.1313, +0.7417, +0.0635, +0.5027,
-0.4095, +0.5781, +0.3729, +0.1721, +0.3567, +1.1599, -0.5712,
-0.6483, -0.4589, -0.5270, +0.1732, -1.2591, +0.4863, -0.9328,
+0.0039, +0.2550, +1.4603, -0.7980, -0.3856, +0.0379, -0.2163,
+0.6541, -0.4396, +0.5536, -0.0282, -0.4969, +0.2063, -0.8435,
+0.3136, -0.6127, -0.1036, +1.1152, +0.5326, -0.6026, +0.2343,
-0.3199, -0.3462, -0.2891, +0.3153, -0.4382, +0.2658, +0.5144,
-0.1004, +0.5722, +0.6198, -0.1634, +0.6600, -0.3581, -0.5464,
-0.2069, +0.2790, -0.0548, -0.2250, +0.5619, -0.3516, +0.5768,
+0.0918, -0.0851, -0.1171, -0.4577, +0.2275, -0.0844, +0.9257,
-0.0400, -0.1302, -0.2833, -0.2913, +0.3435, -0.6581, -0.5410,
-0.1167, +0.1244, +0.9532, +0.3339, -0.1696, -1.0204, -0.8628,
+0.4471, +0.6016, -0.0928, -0.5537, +0.9510, +0.2470, -0.0080,
-0.1268, +0.7720, -0.0184, -0.3384, +0.2185, -0.8863, -0.0140,
+0.0291, -0.3500, +0.1724, +0.7346, -0.5315, +0.5823, +0.2847,
+0.1213, +0.5961, -0.5800, -0.4652, -0.7573, +0.1346, +0.0915,
-0.5366, +0.1836, +0.8365, -0.5010, +0.0161, -0.4858, +0.2502,
+0.8965, -0.2293, -0.1035, -1.1650, +1.1383, +0.0969, -1.4010,
-0.0933, +0.4967, +0.4204, -0.6597, +0.1957, +0.5147, +0.3877,
-1.0364, +1.7310, -0.7283, -0.5552, -0.9958, +0.1301, -0.2359,
-0.6026, +0.8743, -0.0038, -0.8864, -0.6011, -0.9583, -1.2237,
-0.5029, -0.0788, +0.6273, -0.1550, +0.0569, +1.0592, -1.0994,
-1.3958, +0.7132, -0.3483, +0.0321, +0.2592, +0.6079, +0.1530,
+0.8406, +0.3430, -0.0344, -0.1821, +1.5350, -0.2418, -0.5004,
-0.1613, -1.2243, +0.3113, +0.5512, -0.1078, +0.2478, -1.0035,
-0.4421, +0.2409, +0.2840, +0.0553, +0.5921, +0.5255, +0.7057,
-0.1598, +0.9546, +0.0526, +0.3708, +0.2987, -0.2607, -0.3898,
+0.6809, -0.3427, -0.6318, -0.2139, -1.0395, +0.5142, +0.3191,
-0.5015, +0.3427, -0.8764, +0.2258, -0.6226, -0.3563, -0.0095,
+0.1180, -0.0894, -0.0828, +0.4943, -0.9282, -0.6606, -0.4134,
+0.1086, -0.0555, -1.1844, +0.6912, +0.4039, -0.1119, +0.0709,
-0.0453, -0.7530, +0.0188, -0.2012, +1.1507, -0.4265, -0.1570,
-0.5780, +0.5714, +0.7797, +0.1656, -0.4177, -0.2286, +0.3263,
+0.3342, -1.0660, +0.9417, +0.0318, +0.4741, +0.0620, -0.3524,
-0.3416, -0.5109, -0.0235, +0.4831, +0.0348, +0.3290, +0.8699,
-0.1673, -0.3973, +0.4861, -0.4140
],
[
+0.9079, +0.7905, -0.8778, -0.6168, -0.6697, -0.0622, +0.7345,
-0.0286, +0.8464, -0.5082, -0.0811, +0.6639, +1.0680, +0.1035,
-0.2131, +1.1516, -0.2689, -0.6938, +0.8596, +0.0857, -0.8148,
+0.3525, -0.6048, -0.9561, -0.4924, -0.0850, +0.1967, -0.7057,
-1.1881, +0.8330, +0.2124, -0.7318, -0.1144, +0.2397, +0.3227,
+0.9644, -0.4401, -1.3300, +0.3038, +0.2791, +0.3383, +0.5044,
+0.6904, -0.6599, -0.6532, -0.6436, +0.6976, +1.3793, -0.3978,
+0.7100, -0.2195, -0.3812, +0.3690, -0.1254, -1.0735, +1.2677,
-0.2901, +0.0035, +1.6962, -0.6103, +0.6258, -0.9371, -0.7765,
+0.0440, +1.6579, +0.1763, +0.5664, +0.5588, +1.0481, +0.6091,
-0.0223, -0.9908, -0.4076, -0.2800, -0.0961, +0.1303, +0.0567,
+0.2445, +0.0284, +0.5881, +0.5713, +0.5311, -1.2043, +0.3325,
+0.7527, +0.2728, +0.7195, +1.2142, -1.3905, -1.0492, -0.4155,
-0.5728, +1.0382, -0.3996, -0.1396, +0.5156, -0.2043, -0.6742,
-0.5733, +0.2610, +0.4607, -1.3460, +0.1924, +0.3357, +0.5231,
+1.0669, -0.5209, -1.6138, -0.0863, +0.7761, -0.9102, +0.1233,
-0.9402, +1.2671, +0.4664, -1.2182, +0.2402, -0.7552, +1.8687,
+0.0714, +0.9773, +0.8177, -0.3350, -0.0919, -0.5721, -0.8396,
+1.1060, -0.1265, +0.0583, +0.4129, +0.5108, +0.2976, -0.6591,
-1.6417, +0.7939, +0.5756, -0.7724, -0.5524, +0.5851, -0.5469,
+0.1929, +1.2866, -1.1102, -0.6085, -0.2798, +0.3116, -0.3718,
-1.3797, +1.5620, -0.1876, +0.6959, -1.3734, +0.3470, -0.6543,
-0.1204, +0.8051, +1.1058, -0.2433, +1.0471, +1.3998, -0.6581,
-1.3997, -0.1491, -1.2863, -0.5144, -0.0742, -0.9420, -0.5590,
-0.8803, -0.3925, +0.3924, +1.0412, -0.6053, +1.0904, -0.7781,
+0.2669, -1.2796, -0.3787, +1.4656, +1.0314, -0.2491, -0.8925,
-0.0463, +0.6171, +0.8608, -0.3372, +0.5386, -0.9977, +0.4165,
+0.2014, +0.8115, +0.0647, -0.2797, +0.2055, +0.3337, +0.8143,
-0.1816, -0.2726, -0.7037, -0.0720, +0.0258, -0.5450, -0.6288,
+0.8187, -0.3132, -0.3859, -0.8743, -0.2493, -1.5649, +1.1263,
+0.2945, +0.3289, -0.4554, -0.1755, -0.4170, -0.8453, -0.1841,
+0.7881, +0.6946, +0.2777, +0.1883, +0.2721, -1.1722, -0.4219,
-1.4592, +0.6907, -0.6756, -0.2404, +1.1636, -0.0088, -0.4367,
+0.1144, +0.6787, +0.4052, +0.4296, -0.1470, -1.3242, +0.0790,
-0.3254, -0.5657, +0.5947, -1.4787, -0.0701, +0.9545, +0.6431,
-0.6002, -1.3457, +0.8740, +1.3852, +0.1384, -0.8537, +0.2547,
+0.7683, +0.2669, +0.5810, +0.2507
],
[
+0.7794, -0.3995, +1.0293, +0.2487, -0.1426, -0.0694, -0.4230,
+0.5372, -0.8684, -0.2010, +0.6707, -0.0952, -0.3988, +0.3726,
-0.4713, +0.8838, +0.1794, +0.0997, +0.4971, -0.0439, +0.5980,
-1.1781, +0.7389, +0.2720, +0.6640, +0.9361, +0.5272, -0.9933,
+0.5313, +0.6200, -0.5337, -0.3758, +0.7092, +0.3955, -0.5755,
+0.4248, +0.1049, -0.1665, -1.3629, -0.8256, +0.1741, -0.4460,
-0.0687, +0.2652, +0.0616, -0.3573, -0.3689, +0.2120, -0.3601,
+0.0945, -0.3972, +0.6231, -0.9564, +0.0331, -0.0692, +0.8024,
+0.8491, -0.4224, +0.2185, -1.0869, +1.0811, -0.4962, +0.2517,
+0.6918, -0.9458, -0.8809, -0.0807, +0.8267, -0.8325, -0.6024,
-0.6169, -0.4712, -0.5702, +0.5511, +0.6871, -0.2050, +0.3387,
+0.5156, +0.0601, -0.5919, -0.0874, -0.2232, -0.5864, +0.5779,
-0.9416, -0.1933, -0.1205, -0.3783, -0.1681, +0.7948, +0.5512,
-0.8296, -1.2560, -0.5598, -0.7089, +0.5243, -1.2546, +0.0505,
+0.0337, -0.0770, +0.5568, +0.8405, -0.3165, +0.1296, +0.0145,
+0.1948, +0.3414, -1.0293, -0.3926, +0.3601, +0.5924, -0.0225,
+0.2293, +0.5509, -0.4721, +1.0890, +0.0656, -0.8053, -0.4318,
-0.2686, -0.3589, +0.3525, -1.6364, +0.1975, -1.2318, -0.0322,
-0.0529, -0.1288, -0.5292, +0.8793, -0.3240, +0.4082, +0.8765,
-0.1184, -0.2652, +0.4349, -0.1954, -0.9002, -0.1542, +0.1900,
-0.3657, +0.6107, -1.3319, +0.3832, -0.7073, -1.0964, +0.0979,
-0.0593, +0.2025, -0.7053, +0.8264, -0.5835, +1.0835, -0.2843,
-0.0467, -0.2718, -0.8686, -0.9170, +0.2143, -0.2596, -0.3218,
+0.2698, -1.1382, -0.3152, +1.2738, +0.4709, -0.4124, -0.0938,
-0.1260, +0.1779, -0.1146, +0.5035, +0.1219, -0.5944, -0.7660,
-0.0158, -0.6721, -0.4598, +0.3024, -0.6472, +0.2719, -0.3481,
+0.1938, +0.5082, -0.1786, +0.6289, -0.1406, -0.4831, +0.0925,
+1.3774, +0.2169, +0.6540, +0.5974, -0.2561, -0.3918, +0.2433,
+0.4119, -0.4269, +0.6912, +0.1498, -0.2791, +0.2986, -0.7919,
+0.4985, +0.1458, +1.0800, -0.0015, -0.1555, -1.0081, +0.1507,
-0.8582, -0.0885, +0.1456, +0.2427, -0.3740, +0.2295, -0.6934,
-0.6045, -0.3527, +0.7148, +0.3243, -0.5983, -1.0345, +0.2636,
-0.5150, +0.2554, +0.7108, -0.3012, -0.5825, +0.9396, -0.3506,
+0.6580, +0.3133, -1.0601, +0.0265, +1.2558, -0.8227, -0.6875,
+0.5125, +0.3614, +0.3186, +0.3714, +0.1321, +0.2640, -0.3857,
+0.2002, -0.0581, -0.1428, -0.3079, -0.1789, -0.2079, -0.4076,
-0.3201, +0.6586, +0.1977, +0.3799
],
[
+1.0152, -0.4272, +1.5968, +0.6085, +0.0382, -0.0320, -0.1601,
+0.2294, -0.5811, +0.3067, +0.3063, +0.5515, -0.1223, -0.7526,
+0.2072, -0.4560, +0.1842, +0.0665, +0.0936, -0.3308, +0.1486,
+0.1245, -0.2952, -0.3373, +0.5287, +0.2801, -0.3682, -0.2865,
+0.1819, +0.0894, +0.2098, -0.1739, +0.9808, -0.0154, +0.8304,
-0.1405, +0.2339, -0.4588, -0.1539, +0.2594, -0.4761, +0.4449,
-0.6506, -0.9120, +0.9332, +0.3959, +0.5257, -0.2585, -0.0116,
-0.0331, -0.5957, -0.3119, -0.1755, -0.3625, -0.0883, +0.1531,
+0.5778, +0.2146, +0.9642, +1.1435, +0.5163, -0.1991, -0.8677,
+0.8288, -0.2488, +0.6972, +0.2705, -0.1435, +0.0735, +0.8515,
-0.5041, +0.1183, +0.8504, +0.2412, +0.5877, -0.1952, -0.8772,
+0.5466, -0.3558, -0.9457, +1.2632, -0.7621, -0.0547, -0.1342,
+0.0964, -0.5433, -0.4726, +0.0467, -0.2131, -0.2369, -0.2428,
-0.9630, -0.4432, -0.0143, -0.2546, -0.0641, +0.5841, -0.7195,
+0.0329, -0.3998, +0.1326, -0.1076, -0.0649, +0.1647, -0.0183,
+0.4472, -0.2438, -0.7648, -0.4508, -0.4282, +1.1088, +0.4030,
+0.0970, -0.3259, -1.3072, +0.0813, -1.1233, +0.6596, +1.3132,
-0.0910, -1.4535, +0.1034, -0.6101, -0.5647, -0.1968, -0.1917,
-0.3228, +0.4269, -0.0175, +0.1723, +0.7586, +0.2725, +0.2984,
+0.0471, +0.3097, +0.5126, -0.0390, -0.2827, +0.1326, -0.5409,
+0.2052, +0.1947, -1.2951, -0.1683, +0.0238, +0.0519, +0.2857,
+0.3279, -0.0465, -0.2380, +0.6940, +0.0863, +0.7705, +0.0415,
-0.1736, -0.0368, +0.0479, -0.4177, +0.6509, -0.7612, -0.4984,
-0.2082, -0.0499, -0.4223, +0.1914, -0.9028, -1.0545, +0.5095,
-0.1387, -0.3916, -1.0241, -0.2819, +0.6037, +0.5648, -0.0705,
+0.0947, +0.6769, +0.0673, +0.2948, -1.3944, -0.6531, -0.4202,
-0.1783, +0.0268, +0.0839, +0.1930, +1.2632, -0.1694, +0.1956,
+0.0704, +0.1328, +1.1654, -0.0036, -0.2800, -0.1723, +0.1906,
+0.6107, -0.7420, -0.1529, -0.0184, +0.3008, -0.3166, -0.4631,
+0.8368, +0.0610, +0.5651, +0.3451, +0.1413, +0.4791, +0.7318,
-0.4487, +0.5503, -0.1893, -0.9050, -0.0371, +1.0320, -0.2752,
-0.2402, +0.1983, +0.1691, -0.1743, +0.0262, -0.3012, -0.2686,
-0.5917, +0.4833, +0.2275, -1.0397, -0.7766, +0.1059, +0.9856,
-0.7496, +0.0454, -0.1811, -0.5061, -0.2086, +0.4762, -0.0301,
+0.3655, -0.8493, -0.3589, +0.2206, -0.1718, +0.4509, -0.6658,
-0.1114, +0.4450, +0.1507, -0.7038, +0.1654, +0.1350, -0.1161,
-0.6521, +0.4614, -0.2002, +0.7790
],
[
+0.2031, +0.3577, +0.0810, +0.3648, -0.4617, +0.3751, -0.1235,
-0.1538, +0.2726, +0.1242, -0.1705, +0.1270, +0.2021, -0.2672,
+0.0658, -0.1336, +0.1313, +0.3237, +0.4461, +0.1122, +0.1621,
-0.1401, +0.2924, +0.5848, +0.0638, -0.3302, +0.1234, +0.2454,
-0.1130, +0.1128, +0.1852, +0.0273, -0.1360, +0.1931, +0.0309,
-0.0501, +0.0395, +0.2461, +0.0056, +0.8994, +0.2007, +0.0656,
+0.1160, -0.5716, -0.3491, -0.3657, +0.3145, -0.1593, -0.1912,
-0.0888, +0.4174, +0.1221, +0.3682, +0.0124, +0.4657, +0.2084,
+0.1841, +0.1167, -0.0739, -0.2235, -0.1317, -0.1059, +0.0005,
-0.3553, +0.0478, -0.2670, -0.3048, -0.0239, -0.3234, +0.2460,
-0.1938, +0.0174, -0.1066, +0.0227, -0.0453, -0.2925, +0.2515,
-0.0553, +0.6641, +0.1934, -0.0582, -0.0747, +0.1598, -0.0864,
+0.6230, -0.2330, +0.1452, +0.0687, +0.3855, -0.0319, +0.0463,
+0.3423, -0.1462, -0.5494, -0.2447, +0.2213, +0.2288, -0.0392,
+0.1759, +0.1252, +0.2653, -0.2289, +0.2565, -0.2434, -0.3535,
-0.1131, -0.3191, +0.0205, -0.0284, +0.0183, +0.0784, -0.2673,
+0.1299, -0.0790, -0.1518, -0.0730, +0.1813, +0.3683, -0.1695,
+0.1237, +0.3745, +0.3690, -0.6881, -0.1880, +0.0629, -0.0592,
-0.0248, -0.0414, +0.4730, +0.1303, -0.0482, +0.0112, -0.1361,
+0.1190, -0.0087, +0.1654, +0.0218, +0.3632, +0.4050, +0.0237,
-0.1225, -0.0510, +0.2677, +0.1058, -0.4330, -0.2446, +0.0680,
+0.0084, -0.1428, -0.0318, -0.3880, -0.0480, +0.1241, -0.2249,
-0.0057, +0.3902, +0.0009, +0.4903, -0.0056, +0.0552, +0.0670,
+0.2352, -0.3515, +0.2444, +0.1034, +0.1594, +0.1691, +0.1958,
-0.0890, +0.4285, -0.4465, +0.2052, +0.1434, +0.0566, -0.1894,
-0.1330, +0.3394, -0.0723, -0.2799, +0.5040, +0.3874, +0.2093,
+0.1213, +0.4494, +0.0671, +0.2415, -0.0311, +0.1968, +0.1401,
+0.2103, -0.2451, +0.0012, +0.3245, -0.1060, +0.5344, -0.0518,
+0.0953, +0.1246, -0.0098, +0.0767, +0.1818, -0.2596, +0.0499,
-0.1394, +0.2061, +0.0765, -0.3988, -0.1717, -0.1285, +0.3542,
-0.0598, +0.0791, +0.2980, -0.1230, -0.0267, -0.1042, +0.0878,
+0.0063, -0.0769, -0.6503, +0.3040, +0.5716, +0.0099, +0.4971,
-0.1512, -0.1700, +0.0815, +0.3126, +0.1712, +0.2337, -0.0923,
+0.3189, +0.0818, +0.1253, +0.3160, -0.0565, +0.1453, +0.1487,
+0.1412, -0.0413, +0.1806, +0.3415, -0.2231, +0.1603, -0.0205,
+0.0133, -0.1991, -0.0934, -0.2162, +0.1570, +0.5228, +0.0982,
+0.1629, -0.0007, +0.0671, +0.6305
],
[
-0.2587, +1.4717, -1.2581, +1.1689, -0.8211, +0.7375, -0.5318,
+0.7533, +0.9086, +0.5498, +0.4367, +0.4467, +0.7623, -0.5282,
+0.1765, +0.3681, +0.5341, -0.1767, +0.1712, +0.0251, +1.2736,
+0.3875, +0.4250, +1.0980, -0.9987, -0.7445, +1.4084, -0.1555,
-0.0065, +0.7734, -0.0383, -0.7819, -0.5530, +1.3777, +0.1985,
-0.1046, +0.7596, -0.5529, -0.7634, +1.6740, +0.6101, +0.9633,
-0.5998, -0.5364, -0.5572, +0.4762, +0.6979, -0.3609, +0.1236,
+1.5710, +0.1581, +0.2865, -0.6993, +0.0855, +0.3278, +0.3518,
-0.0146, -0.6892, -1.5353, -0.0312, +0.1722, -1.0445, -0.1581,
-1.0708, +0.3866, +0.7016, -0.6134, -0.6641, +1.1412, +1.3183,
-1.6420, -0.2347, -1.0429, +0.7793, +1.1163, +0.3310, +1.1847,
+0.1195, -0.4370, +2.6725, +1.7559, -0.0730, +0.1806, +0.8852,
+0.4111, -0.0167, -1.1111, -0.2998, +0.6224, +0.8074, -0.6231,
-1.3434, -0.1981, -0.3998, +0.5584, +2.2487, +0.8766, +0.3173,
+0.1901, +0.4814, -0.2815, +0.2213, +0.1415, -0.1233, -0.8500,
+0.5884, +0.7851, +0.3084, -1.3487, +0.5780, +0.2632, +0.0860,
+0.6677, +0.4589, +0.5602, +1.3973, +0.1368, -0.5945, +0.3506,
+0.4958, +0.1421, -0.0624, -1.9660, -0.1511, +0.0767, +0.1952,
-0.7139, -0.1675, -0.2837, +0.3031, -0.8060, +0.4746, +0.9971,
+0.9237, -0.6620, -0.4858, -0.0629, +1.3183, +0.0561, -0.6683,
-0.0898, +0.8873, -0.4117, +1.0612, +0.3377, +0.6326, -0.0532,
-1.0591, +1.4041, +0.7255, +0.2089, -0.3014, +0.4921, +0.0062,
+0.3732, -0.8475, +0.6848, +0.0985, +0.3710, -0.2956, -0.4557,
-0.1119, -0.3930, -1.0178, -0.8864, -1.1830, +0.0800, -1.2098,
+0.0087, +1.1755, +0.1675, +0.9011, +0.2116, -0.6979, +0.3733,
-0.4422, +0.4411, -0.2016, +0.0778, +0.5067, +0.7934, -0.6171,
-0.3097, +1.4559, -0.1367, -0.2373, +1.1098, -0.6761, +0.6733,
+0.3182, -1.1651, -0.8602, -0.0873, -0.1958, +1.1926, -0.2270,
+0.7515, -0.0983, -0.8228, +0.4345, +1.0731, +0.5583, +0.7641,
-0.9911, -0.9689, +0.9484, -1.5987, -0.7247, +1.2311, -0.0236,
+0.3943, -0.1350, +0.3385, -1.1370, +0.0704, -1.0800, +2.6736,
-0.3049, +0.3406, +0.3617, +0.2617, -0.4761, -0.2080, -0.7092,
-1.1429, -1.1093, +0.6757, -0.7916, -1.6909, +0.1712, -1.2448,
-0.0885, -1.1596, -0.9892, +1.1010, -0.1438, -1.1564, -1.1641,
+0.8402, +0.4540, +0.3659, +0.5764, -0.6388, +1.3488, -0.1266,
-0.1939, +0.7784, -0.8709, -1.7219, +0.5963, +0.2587, +0.7790,
+0.8252, +0.2516, -0.6644, +1.2204
],
[
+0.3592, -0.5473, -0.3097, -0.1018, -0.2525, +0.2532, +0.1852,
-0.0088, +0.0655, -0.1855, +0.5385, -0.3589, +0.1838, +0.0969,
-0.2628, +0.5441, -0.5624, +0.5361, -0.0491, -0.7181, +0.1281,
+0.8338, -0.5089, +0.1088, +0.2301, +0.7786, +0.2525, -0.1566,
+0.1209, -0.0165, -0.5423, -0.0324, -0.4267, +0.1346, +0.5207,
+0.3439, +0.2450, -0.3079, +0.3215, -0.2746, -0.0576, +0.3982,
+0.2115, +0.5541, +0.0766, -0.4200, +0.3545, -0.6151, +0.3697,
-0.4220, -0.9963, -0.0358, +0.4634, -0.5239, +0.4562, +0.6207,
-0.3370, +0.2265, +0.3755, +0.3904, -0.2075, -0.2598, -0.0633,
-0.4194, +0.5470, +0.5681, +0.1983, -0.0904, +0.5055, -0.6471,
-0.0167, -0.0303, +0.2334, -0.0423, +0.2169, +0.6253, -0.6199,
+0.2927, +0.5569, +0.5446, -0.5548, -0.4428, -0.1779, -0.6061,
+0.3867, -0.0211, +0.4600, +0.3529, +0.1077, -0.4499, +0.0824,
+0.7483, -0.0086, -0.4067, +0.9282, -0.1023, -0.1676, +0.1965,
+0.3254, -0.1279, -0.1506, +0.1424, +0.1603, +0.8137, -0.3727,
-0.4988, +0.1526, -0.2360, +0.3461, -0.0394, +0.4962, +0.0610,
+0.1735, -0.0205, +0.7807, +0.1340, +0.5147, +0.0883, +0.3427,
-0.2549, +0.0896, -0.1998, +0.3312, +0.2516, -0.0669, +0.4445,
+0.1057, +0.2584, +0.0754, +0.9614, -0.2724, +0.0970, +0.3488,
-0.2546, -0.1988, -0.2053, +0.2207, -0.6095, +0.1199, -0.1256,
-0.7693, +0.0747, +0.0203, +0.1841, +0.3277, -0.0653, +0.5064,
-0.0817, +0.0032, +0.0842, -0.0084, +0.1438, +0.4430, +0.0838,
+0.4066, -0.5317, -0.4929, -0.7820, +0.6759, -0.0047, +0.0779,
+0.1832, +0.7314, -0.1949, -0.2592, -0.1778, +0.2129, -0.1953,
+0.2811, -0.3692, -0.8135, -0.1123, -0.3494, +0.3097, -0.0034,
-0.0640, +0.3743, -0.1830, +0.3457, -0.2519, +0.1464, +0.2380,
+0.9953, +0.1149, +0.2285, +0.3024, +0.1707, +0.4957, -0.0838,
+0.2673, -0.1883, +0.5624, -0.3179, +0.1476, -0.3644, -0.1482,
-0.3382, +0.5344, +0.3742, -0.2313, +0.0031, +0.3530, +0.6537,
-0.6092, -0.2391, -0.1602, -0.0471, -0.4469, -0.2055, +0.0156,
+0.4789, -0.1673, -0.4586, +0.0224, +0.6343, -0.5151, +0.5684,
-0.2783, -0.4272, +0.0596, +0.3467, -0.5315, -0.3208, +0.7246,
-0.2494, -0.5467, +0.5609, +0.6399, -0.0605, -0.4482, -0.3522,
-0.2714, +0.0103, +0.1283, -0.3618, +0.4830, +0.2682, -0.3372,
-0.9003, +0.3099, +0.5265, -0.1855, +0.3999, +0.2228, +0.2785,
+0.2410, +0.3890, +0.1071, +0.4037, +0.0730, -0.1131, -0.5232,
-0.0883, +0.4060, +0.5556, +0.8502
],
[
-0.1343, -0.2508, +0.4466, +0.1928, -1.3764, +0.5608, +0.5246,
+0.1057, -0.5098, -0.2481, +0.3235, +0.5815, +0.6031, -1.3475,
+0.3300, -0.4184, -0.6097, +0.1800, -0.2128, +0.8702, -0.3855,
+0.8850, -1.2823, +1.7465, +0.1154, +0.3326, +1.5720, +0.3796,
+0.6865, +0.3970, -0.4212, -0.6553, +1.1901, +0.1061, +0.4295,
-0.2777, +0.7180, -0.3106, -0.1774, -0.1792, -0.7945, +0.5230,
+0.3777, -1.5737, +0.5463, -1.1414, -0.3024, -0.2935, +1.6658,
+0.1355, -0.3055, +0.1280, +1.0071, +0.6983, -0.1000, +0.8603,
-0.4452, -0.0920, -0.0800, +0.4033, -1.3896, +0.3404, +0.3678,
-0.8422, +0.9925, +0.6851, -0.1356, -0.8242, +1.6621, -0.2808,
-0.1091, +0.4584, +0.2921, -0.7004, +0.8090, +0.1422, -0.8455,
-1.1053, +0.0825, +1.1285, -1.2245, -1.0105, -0.3434, -1.6341,
-0.5537, -0.8407, -0.2705, -0.1580, +1.3811, +0.7322, -0.2809,
+0.6630, +0.0622, -0.9919, -0.2476, -0.5338, -1.0355, -0.4306,
+1.2108, -0.7038, -0.8522, -0.4159, -0.4341, -0.9691, -0.4149,
+0.2669, +0.1698, +0.6613, -0.5911, -0.0239, +0.2033, -0.0434,
-0.1534, +0.0713, -0.2211, -0.6775, -0.5210, -0.7939, -0.9760,
-0.3736, +0.4148, -0.0374, -0.5101, +0.2733, +0.1276, +1.1739,
+1.0531, +0.1120, -0.2605, -0.3248, -1.3695, +0.5495, -0.3836,
-1.2904, +0.0924, +0.1732, +0.7190, -1.8944, +0.5771, -0.7028,
-1.3076, +0.3683, +1.0602, +0.9195, +0.5505, +2.1586, -0.1119,
+0.2749, +0.0851, +0.1625, -0.5860, +0.7156, +1.0821, +0.6278,
+0.6332, +1.0299, +0.7335, -0.6378, -1.1269, -1.3456, -0.3317,
+0.2087, +0.2395, -1.2800, +1.3199, +0.8713, -0.1688, +0.7461,
+2.3621, -1.8532, -1.1558, -0.8474, -0.8858, +1.1107, -0.6988,
+0.5648, +0.5592, +0.0090, +0.9274, +0.0033, -0.2176, -0.2460,
-0.4024, -0.5599, +0.1274, +0.0351, +1.7716, -1.3724, +0.2129,
-0.5534, -0.2584, -0.2184, +0.1280, +0.2762, -0.8356, -0.1989,
+0.6781, +0.7678, -0.0635, -0.8494, -0.5063, +1.4836, +0.9417,
-0.8640, -0.9373, -0.0647, -0.4305, +0.3043, -1.3472, -0.6325,
+1.0122, -0.9094, -0.3081, +0.2821, +1.9533, -0.4383, -0.0838,
-0.4351, -0.2311, +0.6239, +1.3051, +0.9572, -1.2479, +0.5177,
+0.9639, +0.3607, +1.8729, +1.3819, +0.5825, -0.5135, -1.0385,
-0.9116, -0.0064, +1.0256, -2.7996, -0.2395, +0.7472, -1.3705,
+0.2986, -0.6054, +0.6847, +0.0719, -0.8614, +0.2998, +0.1512,
-0.6667, +0.2468, +0.6496, -1.2415, +0.8087, +0.6789, -0.2141,
+1.5373, +0.1668, +0.4588, +0.7919
],
[
+0.4301, -0.0051, -0.0875, +0.3149, +0.1445, -0.2641, -0.1754,
+0.5335, -0.0157, -0.2217, -0.0377, +0.1612, -0.0294, +0.3743,
-0.2604, +0.1637, -0.1160, +0.0079, -0.0766, +0.0414, -0.0153,
+0.2402, +0.3822, -0.3192, -0.6855, -0.4405, -0.4147, +1.1371,
+0.3184, -0.1308, +0.0473, +0.4313, -0.0454, +0.1781, +0.4002,
-0.8131, -0.3910, +0.2740, -0.0340, +0.7894, -0.0778, +0.1756,
-0.0962, -0.1095, +0.3778, -0.1233, +0.3710, -0.0951, +0.1384,
-0.1220, -0.0544, -0.9435, +0.2298, -0.2771, -0.3243, +0.0912,
-0.1642, -0.0708, +0.0869, -0.1744, +0.4446, -0.0291, +0.7510,
+0.8012, -0.3125, +0.5270, +0.2996, +0.4207, +0.6451, +0.4491,
+0.4604, -0.3475, -0.6101, +0.1729, -0.4379, -0.1075, +0.1705,
-0.3692, +0.8727, -0.0143, -0.3038, -0.0700, +0.3822, +0.6457,
-0.0217, +0.5809, +0.2763, -0.2581, +0.2419, -0.4404, +0.1733,
+0.0276, +0.6988, +0.8704, +0.1141, -0.3121, +0.3798, +0.2060,
-0.1407, +0.1235, -0.3681, -0.2431, +0.6762, +0.5232, -0.5515,
+0.5500, -0.3355, -0.4603, +0.4862, -0.1196, +0.0621, +0.5295,
+0.6918, +0.2692, +0.1853, -0.0964, +1.0014, +0.3324, +0.3988,
+0.5131, +0.2976, -0.2324, +0.5504, -0.0915, +0.2536, +0.0679,
+0.2896, +0.3822, +0.2914, -0.0889, -0.2608, -0.2823, +0.2455,
-0.1054, -0.2823, -0.0489, +0.7225, -0.5020, -0.1037, +0.4323,
-0.0741, -0.3735, -0.2149, +0.1601, -0.7872, -0.9418, -0.5975,
+0.5669, -0.1317, -0.1543, -0.1119, +0.7635, -0.2892, +0.2267,
-0.2927, +0.6342, -0.7640, +0.1373, +0.2916, -0.2643, +0.2569,
-0.3059, -0.0311, -0.3020, -0.6296, +0.0174, +0.0450, +1.0147,
+0.4829, +0.5843, -0.3074, +0.4011, -0.0653, +0.1264, +0.4231,
-0.5560, +0.1051, +0.1399, +0.2078, +0.3617, +0.3190, +0.1993,
-0.2986, -0.3466, -0.4155, -0.0597, +0.3793, +0.4466, +0.3959,
-0.3023, +0.0040, -0.2326, -0.4952, +0.0890, -0.0792, +0.0179,
-0.4617, -0.3461, -0.0826, -0.1708, +0.2706, +0.5588, +0.4019,
+0.3551, -0.4609, +0.3649, -0.1232, -0.1674, -0.1038, +0.1348,
-0.0660, +0.0727, +0.7005, +0.0865, -0.1129, +0.3601, -0.0603,
-0.1414, -0.0705, -0.0303, +0.6423, +0.1803, +0.4151, +0.0681,
+0.5287, +0.5347, -0.0853, +0.9422, +1.1476, -0.0179, +0.2478,
+0.0933, +0.2639, +0.2103, +0.4472, -0.1852, -0.4093, -0.3714,
-0.3393, -0.0699, +0.7782, -0.4494, +0.5415, +0.7094, +0.1251,
+0.5135, -0.4087, -0.2527, +0.4463, -0.0505, -0.2852, -0.8556,
+0.3942, +0.0987, -0.0077, +0.5979
],
[
+0.2845, +1.0812, -0.7494, +0.7928, +0.1685, +0.1597, +0.1420,
+0.0782, +0.0160, -0.1429, -0.3610, -0.6243, -0.1857, -0.3799,
-0.1864, -0.7237, +0.1977, -0.0277, +0.2529, -0.6009, +0.1861,
+1.2845, +0.0113, -0.1965, -0.4988, +0.9524, -1.1362, +0.0979,
+0.0978, +0.5577, -0.5694, +0.1245, -0.0096, +0.2110, +0.9419,
-0.9761, -0.1216, +0.1184, +0.6906, +0.0689, -0.2717, -0.1839,
+0.4812, +0.8658, -0.3780, -0.9149, -0.1993, +0.4785, -0.4723,
-1.1146, -0.4531, -0.8288, +0.8131, +0.3287, -0.3474, +0.0937,
-0.7731, -0.3929, -0.2429, +0.2462, +0.4287, -0.2803, -0.8852,
+0.5022, +0.1806, +0.0863, -0.1661, -0.9738, +0.7656, +0.4123,
+0.0462, -0.6986, -0.6324, +0.2546, -0.1041, -0.3495, +0.3546,
-0.2854, -0.0838, +0.1096, +0.1486, +0.4535, +0.5963, +0.5115,
-1.1775, -0.0183, +0.3368, -1.0411, -0.1538, -0.1992, -0.0637,
-0.4965, +1.3624, +0.0774, -0.1885, -0.1917, -0.1281, -1.5518,
+0.0393, +0.3687, -0.2982, +0.0824, +0.6070, +0.4407, +0.4273,
+0.3254, -0.0359, +0.0932, +0.0982, -0.1779, -0.4939, +0.4630,
-0.0588, -0.0963, -0.0445, +0.0383, -0.9067, -0.4780, -0.8662,
+0.6478, +0.1210, -0.0671, -0.0488, -0.4615, +0.5291, +0.0794,
-0.3347, +0.7754, -0.1222, +0.4951, +0.1517, +0.1694, -0.6412,
-0.0738, -0.2780, -0.2456, +0.2203, +0.0484, +0.0835, +0.6804,
-0.4793, -0.8170, -0.0797, -0.3415, -0.3049, +0.1274, -0.6019,
+1.2989, -0.2131, -0.0808, -0.4516, +0.4898, -0.2154, -0.0225,
+0.0655, +0.3478, +0.0185, -0.3479, +0.4347, +0.0999, +0.0793,
-0.9525, -0.0407, -0.0925, +0.6913, +1.4525, -0.4044, +0.0837,
-0.2553, +0.4982, -0.4324, -0.1525, -0.1488, -0.6428, +0.1974,
-0.5170, +0.2185, -1.3480, -0.0615, -1.3151, +0.3056, -0.1194,
+0.2072, -0.6148, -0.5407, -0.5680, +0.2053, -0.2356, -0.1255,
+0.6072, +0.2077, +0.1942, -0.3421, -0.0024, -0.0669, -0.1494,
+0.3493, -1.0341, +0.0665, +0.3289, +0.2825, +0.0613, -0.0985,
+0.1094, +0.3992, +0.0906, -0.2558, -0.0444, -0.2922, -0.2714,
+0.0269, +0.2303, -0.8004, -0.2422, -0.1758, +0.1609, -0.1121,
+0.0069, -0.2658, -0.0016, +0.4041, -0.4815, -0.1375, -0.3767,
+0.0661, +0.6412, -0.0630, +0.7240, +0.0774, +0.4729, +0.4779,
-0.6123, -0.4901, +0.0965, +0.2687, +0.6720, -0.7838, -0.8790,
-0.5969, +0.1599, -0.0727, +0.3581, +0.2928, -0.4210, +0.1209,
+0.5134, +0.8239, -0.2588, +0.5264, +1.4808, +0.2310, -1.1862,
+1.1727, -0.0640, +0.1582, +0.3120
],
[
+0.0172, -1.1238, -0.5016, -1.1918, +0.3910, +0.1795, +0.0503,
-0.4863, +0.1703, +0.1462, -0.1386, +0.3944, -0.6998, +0.2394,
+0.3020, +0.0680, +0.0091, -0.3455, -0.7739, -0.5416, -0.1543,
+0.0340, -0.5541, -0.4612, +0.0488, -0.1452, -0.3268, -0.0772,
-0.1545, -0.3110, -0.0585, +0.0679, +0.0346, +0.0893, +0.0122,
-0.6993, +0.0531, +0.1817, +0.0834, -0.1794, -0.0571, -0.4912,
+0.0552, -0.3705, -0.1491, -0.5484, +0.3888, +0.0526, -0.1392,
-0.3859, -0.3149, +0.1368, +0.0454, -0.2472, +0.0746, -0.2081,
-0.3802, -0.2701, -0.7529, +0.5921, -0.1139, +0.5151, +0.4211,
+0.2626, -0.1105, -0.5061, -0.4920, +0.2586, -0.0108, -0.1262,
-0.0240, +0.3779, -0.7462, +0.3654, -0.3487, -0.2882, +0.2652,
+0.4255, +0.1863, +0.5477, -0.5843, -0.3706, +0.1130, -0.3929,
-0.1854, -0.3030, +0.2220, -0.0894, -0.7535, -0.7834, -0.0204,
-0.5167, -0.2607, +0.3052, +0.0319, -0.2919, -0.1374, -0.1339,
+0.2415, +0.1194, +0.1944, +0.0716, +0.6676, -0.8269, -0.0466,
+0.1289, +0.1831, +0.3119, +0.3629, -0.5299, -0.0872, +0.3453,
+0.2217, -0.4897, -0.2563, -0.1342, -0.0896, +0.2592, -0.4204,
-0.0450, -0.6944, +0.0078, +0.0787, +0.0199, +0.4863, +0.1886,
+0.3215, -0.2611, -0.1486, +0.2414, -0.1718, +0.4281, -0.6486,
-0.4176, -0.3029, +0.4823, +0.8132, +0.1277, +0.0055, +0.3228,
-0.1253, -0.3521, -0.5403, -0.1949, -0.2421, -0.2463, +0.1556,
-0.3001, +0.1514, +0.0274, -0.7368, +0.0907, +0.1133, -0.1469,
+0.1039, +0.1180, +0.2917, +0.2558, +0.0316, -0.3933, -0.4351,
+0.4973, -0.2582, -0.2405, +0.3755, -0.1156, -0.2276, +0.2097,
-1.0808, -0.8002, +0.0366, -0.0562, -0.4894, -0.0145, -0.6245,
+0.4986, +0.5006, +0.2602, -0.0508, -0.3204, +0.1924, -0.0899,
+0.3825, +0.5341, +0.7027, +0.1131, +0.1199, +0.0252, +0.0072,
+0.6830, +0.1546, +0.2466, +0.1135, +0.0015, -0.0177, -0.4254,
-0.0954, +0.0434, -0.0305, -0.1784, +0.2699, +0.2957, -0.2150,
-0.0487, -0.2437, -0.2200, +0.0523, -0.2779, -0.6089, +0.1060,
-0.2136, -0.2251, -0.3129, -0.0839, -0.2522, +0.1036, -0.2146,
-0.0584, +0.5832, -0.3026, -0.5945, +0.2099, +0.1080, +0.4115,
-0.0818, +0.2817, +0.0098, -0.3738, -0.0005, -0.2097, -0.7590,
-0.0630, +0.2055, +0.2783, -1.5297, -0.3210, -0.3086, -0.1355,
+0.0386, -0.3155, +0.1009, +0.1150, -0.3786, -0.4136, +0.4385,
-0.2626, +0.0942, +0.0446, +0.1205, -0.1513, -0.2796, +0.1553,
-0.0969, +0.0511, -0.1249, -0.2802
],
[
-0.1034, +0.2357, -0.4203, +0.1591, +0.3585, +0.1246, -0.0421,
+0.1255, -0.2100, +0.0055, -0.0512, +0.6803, -0.6657, +0.0623,
-0.0359, -0.2987, -0.6500, +0.0916, +0.1569, +0.5246, -0.2630,
+0.2165, -0.5772, +0.1229, +0.0293, -0.6651, -0.4447, +0.1485,
+0.2914, +0.3376, -0.3570, +0.1639, +0.1711, +0.2401, -0.3681,
-0.6154, -0.2828, -0.9889, +0.4287, +0.4902, -1.0063, -0.0716,
-0.2519, +0.2374, +0.0735, -0.0310, -0.0960, -0.3189, -0.0257,
-0.3756, -0.1699, -0.6484, -0.2126, -0.0991, +0.2516, +0.2817,
-0.2249, -0.5916, +0.0468, +0.1980, +0.2723, -0.2368, -0.1617,
-0.4406, -0.0696, -0.2598, -0.2409, -0.4176, -0.8485, +0.2568,
-0.1216, +0.2768, -0.3487, +0.0511, -0.1213, -0.5023, -0.3206,
-0.2718, -0.0845, -0.5767, +0.2253, +0.4208, -0.3365, -0.1327,
-0.3217, -0.6869, +0.0320, -0.2220, -0.1256, +0.0224, +0.5940,
+0.0939, +0.1967, -0.2259, -0.0278, -0.5220, -0.6105, +0.0110,
+0.0768, -0.2414, -0.0436, +0.1692, +0.1113, -0.9959, -0.0885,
-0.0330, +0.0898, -0.2107, -0.3087, -0.0716, -0.7209, -0.5470,
+0.1603, +0.0911, -0.3636, +0.2226, +0.5098, +0.0130, -1.0301,
-0.2137, -0.5319, +0.1819, +0.0335, +0.4514, -0.2537, -0.1274,
-0.2860, -0.2903, +0.1763, -0.7967, +0.1072, +0.0330, -0.0080,
-0.0305, +0.1275, -0.0920, +0.1267, -0.9726, -0.5605, -1.0906,
+0.0436, -0.1351, +0.2924, -0.0994, +0.1213, -0.0452, -0.1222,
+0.0212, -0.3013, +0.2962, +0.2039, +0.7498, +0.2355, -0.4564,
+0.0400, +0.4825, -0.2935, +0.4372, +0.6510, -0.2114, +0.4234,
+0.0511, -0.0598, -0.1777, -0.5762, -1.0183, +0.1683, -0.9535,
+0.0247, +0.0302, -0.1133, +0.2441, -0.2464, +0.3237, +0.0242,
+0.5588, -0.4974, +0.0399, -0.0217, -0.0068, +0.0193, +0.0620,
+0.6197, -0.9937, -0.0499, -0.1407, -0.1001, -1.1461, -0.3159,
-0.6738, +0.1658, +0.1644, -0.5083, -0.0085, +0.1327, +0.1629,
+0.4424, +0.0172, -0.3822, -0.0851, -0.2080, +0.3361, -0.1089,
+0.1334, +0.1965, +0.2532, -0.7738, +0.1240, +0.1797, +0.1834,
-0.7705, -0.0152, -0.7457, +0.6776, -0.2594, +0.0591, -0.0137,
+0.6278, +0.0465, -0.3670, +0.0655, -0.8144, -0.5686, +0.0183,
+0.0477, -0.0919, +0.0162, -1.1542, +0.4707, +0.2479, -0.2166,
-0.1802, +0.3401, -0.6682, -0.3063, -0.1307, +0.1714, -2.4957,
-0.1072, -0.3282, -0.7105, -0.8619, +0.3392, -0.3000, -0.1363,
-0.0181, -0.1055, +0.2948, +0.3553, +0.2409, +0.4209, +0.3848,
-0.7700, -0.5748, +0.0001, +0.3585
]])
weights_dense1_b = np.array([
-0.0562, -0.1282, -0.3753, -0.1119, -0.0136, -0.2313, -0.0345, +0.0894,
-0.0010, -0.0601, -0.0762, -0.1205, -0.1190, +0.0179, -0.0906, -0.0572,
+0.0797, -0.2050, -0.0318, -0.2306, -0.0182, -0.0847, +0.0607, -0.1987,
-0.1262, -0.1896, -0.0226, -0.0034, -0.0809, +0.0404, +0.0756, -0.1168,
-0.0733, -0.0739, +0.0161, -0.0528, +0.0067, +0.0332, -0.2382, -0.0745,
-0.0184, -0.0287, +0.1160, -0.0248, -0.1523, -0.0589, -0.3165, -0.0986,
-0.0604, -0.0633, +0.0549, -0.0209, -0.0474, -0.2358, -0.0890, -0.1201,
-0.0595, +0.1562, -0.1967, -0.0995, -0.1231, +0.0500, +0.0962, -0.1140,
+0.0393, -0.1390, -0.1273, -0.0866, +0.0868, -0.0849, -0.1148, -0.2904,
+0.0706, -0.1821, -0.1143, -0.2196, -0.0112, -0.0375, -0.1567, -0.0589,
+0.1257, +0.1125, -0.1020, +0.0818, -0.2838, -0.1653, -0.1676, -0.0244,
-0.0784, -0.0181, -0.0429, -0.0966, +0.0494, +0.0093, -0.0858, -0.0919,
-0.1196, -0.2181, +0.0292, -0.2283, -0.2595, -0.0925, -0.0134, -0.0093,
+0.0310, +0.0078, -0.0172, -0.0803, -0.0386, +0.1367, -0.1948, -0.0290,
+0.0785, -0.0464, +0.0371, -0.0128, -0.0377, -0.0724, +0.1637, -0.1972,
-0.1093, -0.1634, -0.0691, -0.0834, -0.2310, -0.2676, -0.0683, -0.0024,
-0.1401, -0.0378, -0.1633, -0.1665, -0.1098, -0.0746, -0.0684, -0.1914,
-0.1284, -0.0998, +0.0577, -0.0638, -0.0885, -0.0445, -0.0384, -0.1012,
+0.0067, -0.2128, -0.1312, +0.0526, +0.1238, +0.0742, +0.0261, +0.0444,
+0.0614, +0.1223, +0.1232, -0.1827, -0.1289, -0.0469, -0.1420, +0.0705,
-0.2220, -0.2401, -0.1121, -0.1697, -0.1621, +0.0688, +0.0688, -0.0040,
-0.0048, -0.0416, -0.1265, +0.0311, +0.0280, +0.0287, -0.1893, -0.0966,
-0.0876, -0.1778, +0.0000, +0.0074, -0.0978, -0.1932, -0.0790, +0.0037,
-0.1398, -0.1006, -0.0532, -0.1227, +0.1148, -0.1254, -0.1134, +0.1205,
-0.1588, +0.1093, -0.2470, -0.0915, -0.1715, -0.0036, -0.2943, +0.1375,
+0.1075, -0.0339, -0.0491, +0.0060, +0.0145, -0.1867, +0.0646, -0.1327,
-0.0529, -0.0830, +0.1433, -0.1924, -0.0285, -0.2420, -0.2244, -0.1451,
-0.1009, -0.1149, +0.0846, +0.0473, +0.1568, -0.0062, -0.1289, -0.2085,
+0.0600, -0.1314, -0.1569, +0.2043, -0.0302, -0.1666, +0.0473, -0.0075,
-0.2825, -0.0696, -0.2223, -0.2911, -0.0567, -0.0510, +0.0184, -0.2132,
-0.0371, +0.1715, -0.1301, -0.0310, +0.1523, -0.0142, +0.0185, -0.0030,
-0.0725, -0.1011, -0.0923, -0.1991, -0.1686, -0.1198, -0.2226, -0.1094
])
weights_dense2_w = np.array(
[[
+0.0445, +0.3258, -0.3897, -0.9250, -0.1552, +0.0457, -0.1724, -0.0763,
-1.4100, +0.1473, -0.1446, -0.3841, +0.1477, -0.0290, +0.1949, +0.3805,
-0.0565, +0.0721, +0.4733, -0.8596, +0.1278, -0.7788, +0.2252, +0.5076,
+0.3259, -0.4569, +0.0218, -1.1651, -0.6023, +0.5247, -0.0698, -0.5287,
-0.4980, -0.3742, -0.3540, +0.2404, -0.2737, -0.0001, -0.2909, -0.6940,
+0.1236, -0.4878, -0.2978, -0.0255, -0.2776, -0.0096, -0.0926, +0.3629,
+0.1967, -1.4511, +0.0184, -0.0039, -0.0859, -0.3284, +0.1288, -0.2185,
-0.4335, -0.0137, -0.4161, -0.1739, +0.1742, +0.1686, -0.7623, -0.4584,
+0.0873, -0.1496, -0.2064, -0.2601, -0.0554, +0.1165, -0.1830, +0.3370,
-0.3273, -0.0035, -0.5173, -0.4919, -0.5520, +0.4621, -0.3296, +0.2683,
-0.2363, +0.2684, +0.5559, -0.0760, -0.3864, -0.5297, -1.1667, -0.1952,
+0.1237, -0.0796, -0.2315, -0.1927, +0.0855, +0.1945, +0.2481, +0.3408,
+0.5076, +0.2756, +0.2805, +0.2590, +0.0864, +0.1665, +0.7472, +0.0074,
+0.1091, -0.7141, -0.1676, +0.0718, -0.2115, -0.1887, -0.4651, +0.3149,
+0.1988, -0.4582, -0.0331, -0.0648, +0.1369, -0.3698, +0.3056, +0.6038,
-0.4791, -0.8684, -0.3822, -0.4373, -0.3103, +0.1921, -1.2456, -0.1832
],
[
-0.1644, +0.0559, -1.1163, +0.4768, +0.0837, +0.1339, -0.7792,
-0.9042, +0.3480, -0.7278, -0.1942, -0.3883, +0.2595, +0.4492,
-0.2603, +0.2858, -0.5011, -0.4032, -0.5475, +0.4062, +0.3066,
+0.2421, +0.3979, -0.0046, -1.0351, -0.6019, +0.3501, +0.3352,
+0.4175, -0.5266, -0.6269, -0.5685, -0.3698, +0.5508, -0.4117,
-1.1622, -0.9162, +0.2534, +0.0436, -0.8803, +0.3523, +0.0850,
+0.3050, +0.8887, -0.3365, -0.3370, +0.1841, -0.4630, -1.1252,
+0.1474, -0.3641, -0.2109, -0.6057, +0.1948, +0.2263, -0.0045,
-0.0550, +0.6555, -0.3998, -0.4963, -0.5501, -0.2589, -0.4759,
-0.9856, -0.4430, -0.9262, +0.1264, -0.4759, -0.5656, +0.7274,
+0.6485, +0.7692, +0.5744, -0.1457, +0.1298, +0.1832, +0.3575,
-0.5757, -0.1918, +0.4286, -1.0246, -0.6236, +0.2141, -0.1002,
-0.3561, -0.3934, +0.7368, -0.2440, +0.1372, +0.1610, -1.0551,
-0.8087, -0.3633, -0.2956, -0.1642, -0.9371, -0.3696, -0.5521,
+0.0405, +0.0178, -0.5200, -1.3731, -1.1641, -0.9716, -0.9021,
-0.5449, -0.8470, +0.3427, -0.5285, +0.4033, +0.3367, -0.9577,
-0.3523, -0.2851, +0.7887, -0.2403, +0.0766, +0.6234, -0.4794,
+1.1367, +0.1354, -0.5252, +0.2428, -0.3933, +0.2007, +0.0248,
+0.3071, +0.0446
],
[
+0.0672, -1.0581, +0.3253, +0.3608, +0.4397, +0.4099, -0.1706,
-1.4956, +0.1242, -0.0741, -0.0724, -0.8454, +0.1054, -0.3769,
+0.0595, -0.8427, +0.1487, +0.1021, +0.0235, +0.2057, -0.3067,
+0.0910, -0.0292, -0.4893, +0.0259, +0.1918, -0.4798, -0.1641,
+0.2843, +0.6346, -0.1970, +0.4576, +0.1711, -0.7762, -0.9540,
-0.1866, -0.0695, -0.2133, -0.4258, -0.1893, -0.3393, -0.2800,
-0.4166, -0.4609, -0.1899, -0.0236, +0.1991, -0.2867, -0.3952,
+0.1985, -1.1685, -0.4137, -0.3359, -0.5354, -0.2031, +0.4196,
-0.6850, -0.1369, -0.2796, -0.5954, -0.1591, +0.0054, -0.0236,
-0.0944, +0.2996, -0.3397, +0.3878, -0.3327, -0.5361, +0.3334,
-0.5441, +0.0895, -0.6604, -0.1568, -0.4287, -0.6188, -0.4958,
-0.8847, -0.2318, +0.0979, -0.1458, +0.6555, -0.2316, -0.8410,
-0.1204, -0.5653, -0.7725, -1.4690, -0.0659, -0.6189, -0.8568,
+0.1092, -0.3853, -0.1022, -0.4768, +0.0461, -1.2451, +0.4257,
-0.3797, -0.8929, +0.0062, -0.1235, +0.7245, +0.3688, -0.4415,
-0.3380, -2.0255, -0.4822, -0.6014, -0.4930, +0.4081, -0.1571,
+0.3983, +0.1135, -0.3538, -1.1742, +0.4119, -0.2515, +0.2638,
-0.9096, -0.7660, -0.5376, +0.4075, -0.8097, +0.0791, +0.2727,
-0.1079, -0.9154
],
[
+0.3015, -0.0740, +0.4066, -0.5555, +0.5124, +0.3837, +0.2981,
-0.9054, +0.1914, -0.5646, +0.8334, -0.3740, -0.0627, +0.1642,
-0.6534, +0.6830, -0.2664, -0.8940, -0.0617, -0.0839, -0.6640,
-0.2262, +0.1927, +0.2503, -1.0157, +0.3276, +0.0375, +0.8604,
+0.0385, -0.0106, -0.1545, +0.6813, +0.5188, +0.0947, +0.0471,
+0.9269, +0.8989, -0.2302, -1.3604, -0.2539, +0.5975, -0.2451,
+0.3136, -0.6545, -0.5904, +0.6794, -0.7342, -0.2178, -1.0867,
+0.3340, -1.2048, -0.2076, -0.0349, +0.1764, -0.1004, -0.1311,
-0.2428, +0.4325, +0.1302, +0.0019, -0.0881, +0.3566, +0.0503,
-0.0759, +0.0366, -1.0691, +0.1273, -0.2615, -0.0779, +0.6359,
-0.6128, -0.4660, -0.3794, +0.1681, -0.3744, +0.0897, +0.3767,
+0.3950, +0.6027, +0.6578, -0.2572, -0.3049, -0.5333, -0.4376,
-1.8317, -0.9671, -0.4943, -0.3099, +0.2941, +0.1555, +0.3129,
-0.3870, -1.6067, +0.1901, +0.3682, -0.2147, -0.4661, +0.3253,
+0.0028, -0.9932, -0.7515, +0.3019, -0.3932, -0.1185, -0.1441,
+0.5607, -1.2272, -0.1169, +0.1525, -0.4974, +0.4858, -0.1839,
-0.3725, -0.1979, +0.2342, -0.0696, -0.0034, +0.1385, +0.9345,
+0.6038, -0.5267, +0.1343, +0.2124, +0.3444, +0.0326, +0.3010,
+0.1535, +0.8926
],
[
-0.6132, -0.0626, +0.1836, -0.0017, +0.3117, -1.2901, +0.2260,
-0.3219, +0.0400, -0.2747, +0.2144, -0.3827, +0.3195, +0.2954,
-0.3256, -0.3032, +0.6317, +0.4147, -0.2002, -0.1467, -0.9987,
-0.5831, -0.6824, +0.0135, +0.0619, +0.0365, -1.1740, -0.5402,
+0.1055, -0.4065, +0.2682, -0.7483, -0.0056, -1.5943, -0.6814,
-0.1541, -0.1458, -2.2840, +0.6172, -0.5811, -0.0055, -0.6315,
+0.3828, -0.2704, +0.5921, -0.1200, -0.1509, -0.2821, +0.1405,
-1.3506, -0.2311, +0.0215, -0.0361, +0.0953, +0.3615, +0.3187,
-0.3967, -0.0055, -0.1907, -0.0275, +0.0199, -0.5271, +0.0952,
-0.2753, -0.1653, +0.1166, +0.0769, -0.1883, -0.6950, +0.1223,
+0.5596, +0.1561, -0.3390, +0.2991, -2.3407, +0.0940, -1.1829,
+0.1751, +0.0089, +0.2114, -0.1418, -0.0711, -0.0123, -0.9252,
+0.5315, -0.5255, -0.1498, +0.6514, +0.0192, -0.7630, -0.2391,
-0.5887, -0.2899, -0.3057, +0.4777, -0.0072, -1.2469, -0.3501,
+0.1416, -0.3820, -0.0205, -0.5110, +0.0706, -1.0889, -0.3870,
+0.1781, -1.2873, +0.1878, -0.0433, -0.7868, -0.4801, -0.0985,
-0.4878, +0.0957, +0.1303, +0.2984, +0.1435, +0.4211, +0.5997,
-0.5081, -0.3256, -0.4320, -0.9251, -0.4870, -0.7026, +0.0313,
+0.1807, +0.3195
],
[
-0.4618, -0.1217, -1.4947, +0.6432, -0.0286, -0.0113, -0.1610,
+0.3059, +0.1557, -0.3754, -0.2707, +0.1685, +0.0389, -0.3181,
+0.5652, -0.1861, -0.1956, +0.4189, +0.1113, +0.1597, -0.0383,
+0.3372, +0.2067, +0.0350, +0.3312, -0.2489, -0.4308, -0.4614,
-0.4210, +0.1451, -0.1855, -0.4838, +0.4251, +0.2819, +0.0486,
+0.2659, -0.0646, +0.3865, +0.2074, +0.0339, +0.4180, -0.0953,
+0.0472, +0.0316, -0.2890, +0.4763, +0.5133, +0.2853, -0.4433,
+0.2192, +0.3042, -0.1120, +0.0379, -0.6806, +0.0723, +0.0475,
+0.0077, -0.0484, -0.5169, +0.3545, -0.2064, +0.0259, +0.2864,
+0.0194, +0.1699, -0.0441, -0.1463, +0.1760, +0.3086, -0.4820,
-0.2502, -0.4887, +0.0559, +0.4278, +0.1996, +0.1614, +0.2339,
+0.0800, -0.6270, -0.2786, -0.4181, -0.1674, -0.2827, -0.4783,
+0.1192, -0.0594, -0.6800, -0.0548, -0.1755, -0.1158, +0.3345,
+0.2706, -0.3446, +0.0566, -0.6105, +0.1354, -0.3801, -0.0385,
-0.2697, -0.0631, -0.1239, -0.1411, +0.0484, -0.6900, -0.2176,
-0.1499, +0.0073, -0.3549, -0.4897, -0.0909, -0.0063, +0.1598,
+0.2158, -0.9459, -0.2283, +0.3218, +0.1076, +0.0235, +0.4750,
-0.1916, -0.0259, -0.3111, +0.1156, +0.0414, +0.0901, -0.2362,
+0.2898, +0.3987
],
[
+0.1194, +0.2215, -0.7395, -0.6066, -0.2603, -0.6324, +0.0280,
-0.0233, +0.2191, -0.0356, +0.1409, -0.0238, -0.3462, +0.5912,
+0.0531, -0.2901, +0.0148, +0.2699, -0.1726, -0.1041, +0.1550,
+0.3988, -0.1391, +0.0865, -0.1727, -0.0615, -0.3390, -0.2071,
+0.5588, -0.1679, -0.6875, +0.1969, -0.0317, +0.0969, +0.4025,
+0.2154, -1.1675, +0.3348, +0.3878, -0.1976, +0.4689, +0.1378,
+0.2840, +0.3174, +0.3507, +0.0292, +0.4537, +0.1683, +0.3744,
+0.0121, -0.0917, +0.0866, +0.3034, -0.5841, +0.2858, -0.6695,
-0.4560, +0.6588, +0.4572, +0.0193, +0.3867, -0.6039, -0.0019,
+0.2913, +0.0934, +0.4742, +0.0944, -0.3412, +0.3524, +0.0335,
-0.6695, +0.7083, +0.5825, -0.1013, -0.0832, +0.6263, +0.5189,
-0.3299, -0.0619, +0.2237, +0.0813, -0.0122, +0.2237, +0.9135,
+0.3399, +0.1430, -0.0912, -0.0395, +0.3444, -0.6128, -0.0136,
+0.0827, +0.2395, -0.5342, +0.0654, +0.0967, -0.1721, -0.1917,
+0.1808, -0.1465, -0.1908, +0.2995, -0.1450, +0.3075, +0.5168,
+0.7156, -0.0292, -0.2514, -0.1955, -0.4305, +0.2834, -1.6873,
+0.3317, -0.2397, -0.5233, -0.1526, -0.2658, +0.1916, -0.0949,
+0.2834, -0.1353, +0.2761, +0.3861, -0.1116, -0.7916, -0.0785,
+0.1260, -0.0051
],
[
-0.4166, -0.6567, +0.2667, +0.1786, +0.3510, -0.0422, +0.7925,
-0.0154, -0.1518, -0.0187, +0.0379, -0.1076, +0.3849, -0.2907,
+0.0690, +0.3741, +0.5237, +0.2914, -0.4741, +0.5339, +0.2658,
+0.3419, +0.6672, -0.0275, +0.1312, +0.2843, +0.5785, -0.5382,
-0.9244, +0.3275, +0.1660, +0.0649, -0.0301, -0.7872, +0.3756,
+0.3165, -1.1756, -0.3764, +0.3109, -0.2525, -0.1304, +0.1871,
-0.3203, +0.7052, -0.1682, -0.2009, +0.1683, +0.3273, +1.6270,
+0.2042, +0.1354, -0.2825, +0.1715, -1.6118, +0.1005, -0.0738,
-0.1032, +0.3228, +0.2159, -0.3190, -0.5356, +0.3108, +0.0485,
+0.4706, +0.3739, -1.1787, +0.3236, -0.6023, +0.1504, +0.1740,
+0.2444, -0.3717, -1.2288, -0.2562, -0.0348, -0.0171, -0.5948,
-0.9390, +0.0392, -0.7069, -0.2203, +0.3624, -0.2812, -0.2286,
-0.0412, +0.0528, -0.3069, -0.0803, -1.6197, +0.8269, +0.1908,
+0.8159, +0.3059, +0.1064, -0.5850, +0.4684, +0.2937, -0.0045,
-0.1077, -0.7226, -0.0540, -1.5225, -0.2574, +0.8567, +0.1914,
+0.6329, -0.5773, +0.0707, -0.4777, +0.0377, -0.0070, -0.1626,
-0.5968, -0.2404, -1.2759, -0.2537, +0.1132, -0.1214, -0.9898,
-0.2443, -0.6741, -0.6288, +0.1556, -0.3358, +0.1861, -0.5565,
+0.7699, -0.1238
],
[
-0.9355, +0.9826, +0.1094, +0.6101, +0.5008, -0.0418, +0.3574,
-0.4915, -1.0313, -0.2750, +0.4640, +0.1685, +0.1614, -0.5493,
-0.0395, +0.0826, -0.4272, -0.2077, +0.7788, -0.4189, +0.0903,
-0.4165, -0.1848, +0.0345, -0.3355, +0.0290, -0.2494, +0.2498,
+0.2195, -0.3544, +0.3827, +0.5353, +0.3030, -0.3058, +0.1684,
+0.0712, +0.1870, +0.0772, -0.6504, +0.1901, +0.1617, -0.0998,
-0.1492, -0.5738, +0.2446, +0.0092, -0.5506, -0.2370, +0.1765,
-0.1482, +0.5713, +0.0210, +0.0355, +0.1140, -0.7140, -0.0040,
+0.4767, +0.2202, -0.3633, +0.1525, -0.1511, +0.5307, +0.5185,
+0.3934, -0.1424, +0.4631, -0.3201, -1.2858, -0.0077, -0.1245,
-0.0614, -0.4914, -0.4229, +0.3751, +0.0951, -0.4626, +0.0338,
+0.0951, -0.1898, -0.0954, -0.0744, -0.5868, +0.3834, -0.8412,
+0.3414, -0.4893, +0.2224, -0.6829, -0.2101, -0.4551, +0.6793,
+0.1760, -0.4434, +0.3216, +0.1542, +0.0045, +0.4077, -0.4215,
-0.2623, +0.0514, +0.0329, -0.1839, -0.4462, -0.2559, -0.1739,
-0.0286, +0.0429, -0.7608, -0.4764, -0.1833, +0.2059, -0.9402,
-0.3059, -0.0848, -0.3453, -0.3135, +0.1000, -0.0187, +0.7457,
-0.8470, +0.1928, +0.4921, -0.1373, +0.4402, +0.4008, -0.3943,
-0.1944, +0.1398
],
[
+0.1872, +0.9816, +0.0851, +0.1390, -0.8130, -0.1898, -0.3910,
+0.3106, -0.2167, +0.6273, +0.5541, -0.0014, +0.2877, -0.1589,
-0.2232, -0.1208, -0.2884, -0.9505, +0.4753, -1.7450, -0.3683,
-0.7012, +0.1969, -0.5218, -0.1690, +0.4793, -0.4462, +0.0265,
-0.2971, -0.0282, +0.3979, -0.7204, +0.0335, -0.6340, -0.4683,
+0.4272, -0.7263, -0.4165, +0.1131, -0.1232, -0.0161, -0.5044,
-0.3839, +0.0541, +0.5817, -0.2580, -1.3879, -0.1213, +0.3584,
+0.0509, -1.0074, -0.0474, -0.3855, +0.2602, -0.3497, -0.5726,
+0.5644, -0.1663, +0.3701, -0.5592, -1.5091, -0.3567, +0.4544,
+0.6069, -0.8949, +0.6127, -0.7491, +0.7633, -0.4563, -0.8731,
-0.4802, +0.1802, -0.8905, +0.1056, -0.2027, +0.1881, -0.3565,
-0.2124, -0.3840, +0.0482, +0.3754, -0.6157, -0.1367, -0.1701,
-0.2833, -0.5350, -0.4995, +0.2334, -0.7944, -0.9158, +0.0448,
-2.6868, -0.0335, -0.0847, +0.4522, -0.4106, +0.1345, -0.5642,
-0.8447, +0.4460, +0.4215, -0.1796, -0.0094, -0.6337, +0.4704,
-0.6282, -0.1135, +0.8338, +0.2184, -0.1924, -0.0502, -0.1903,
+0.2182, -0.1559, -0.5885, -0.4098, -0.0150, -0.6609, +0.5288,
-0.6010, -1.1834, +0.5740, -0.5148, +0.7980, +0.5594, -0.0867,
-1.3654, +0.6106
],
[
-0.4394, +0.2310, -0.3018, -0.3480, -0.9776, -0.2720, +0.1813,
+0.3134, +0.1339, -0.0724, -0.8018, +0.1788, -1.0079, -1.1364,
-0.8186, -0.1026, +0.2480, +0.0618, -0.0769, -0.2584, +0.4019,
-0.3211, -0.1836, -0.1351, -0.4099, -0.2835, -0.5408, -0.7063,
-0.2228, +0.0479, +0.2686, -0.3299, -0.7873, +0.0996, -2.0968,
+0.4859, +0.1783, -0.5059, -0.5944, +0.1206, +0.1465, -0.1475,
-1.4468, -1.1564, -1.9459, -0.0934, +0.5490, -0.1651, +0.3982,
-0.2141, -0.4545, -0.1564, -0.1334, +0.2570, +0.0896, -0.1352,
-0.5934, +0.3121, -0.6738, +0.0189, -0.1665, -0.8223, -0.0091,
-0.4308, +0.3138, -0.5113, -0.0708, -0.2009, -0.6023, -0.4238,
-0.1099, +0.3639, -0.0279, +0.1326, -0.1242, +0.0337, +0.2367,
+0.0850, -0.0917, -1.2237, -0.6782, +0.5698, +0.1095, -0.1615,
-0.8337, +0.2228, -0.5700, +0.0232, -0.0210, -0.3978, +0.4321,
-0.0272, -0.1537, -1.2350, -0.5124, -0.3720, -0.2994, +0.2628,
+0.4213, -0.7631, -0.2937, -0.0566, -0.3422, +0.2419, +0.0051,
-0.4876, +0.2640, +0.0167, -0.4600, +0.4592, -1.0462, -0.2274,
+0.0501, -1.3160, -0.6923, +0.2786, -0.3156, -0.4447, -0.3842,
+0.7620, -0.6217, +0.1690, -1.3204, -0.2791, -0.5546, -0.0060,
-0.0596, -0.1868
],
[
-0.5495, +0.3365, -0.7584, -0.2572, -0.2659, +0.4092, +0.1424,
-0.2120, +0.3421, +0.0521, -1.4783, -0.1153, +0.3200, +0.9175,
+0.5001, -0.2805, +0.2878, +0.3383, +0.0098, -0.7964, -0.0732,
+0.0894, +0.4826, +0.5428, +0.0961, +0.0482, -0.4127, -0.4349,
+0.1313, +0.5661, -0.5111, +0.3665, -0.3487, -0.2787, -0.8896,
+0.0938, -0.5667, +0.1014, -0.1201, -1.3133, -0.5152, +0.1094,
-0.2116, -0.3066, +0.3762, +0.7020, +0.9420, +0.5691, +0.0542,
-0.7900, -0.6320, -0.1268, +0.0348, +0.6443, -0.3499, -0.1280,
+0.4221, +0.0023, -0.8830, -0.9870, +0.5297, -0.0939, +0.4132,
-0.0311, +0.5731, -0.2274, -0.6287, -1.1691, -0.2294, +0.4159,
+0.2609, -0.1928, +0.1094, +0.1801, -0.0305, +0.1778, -1.4363,
-0.5287, -0.2018, +0.7580, -0.4102, +0.2158, -0.0664, +0.1027,
-0.0060, +0.5857, +0.1354, -0.0279, +0.2287, -0.3410, +0.1904,
-1.1251, +0.3415, -0.2569, -0.6138, -1.1146, +0.0824, +0.1870,
-0.6945, -0.7618, +0.3157, +0.3979, +0.1496, +0.2627, +0.0947,
-1.2781, +0.0487, +0.3630, -0.2052, -0.7007, -0.0074, +0.3732,
+0.4000, +0.0503, -0.3131, +0.4917, -0.3112, -0.2290, -0.1998,
+0.1941, -0.7223, +0.2580, -0.3792, -0.1586, -0.4023, -0.6316,
+0.0302, +0.3170
],
[
-0.2819, -0.1824, +0.5367, -0.5343, -0.6550, +0.3470, +0.3113,
+0.3216, +0.0234, +0.2092, +0.5161, +0.4814, -0.6742, +0.1774,
-0.4262, -0.1184, -0.3862, +0.3776, -0.0454, +0.3588, +0.2280,
+0.0591, -0.5632, -0.1272, -0.5212, -0.1646, -1.0133, -0.6814,
+0.3322, +0.2075, +0.6023, -0.6443, -0.5341, -0.2680, -0.0889,
-0.2357, -0.3847, -0.0124, -0.1986, +0.2168, +0.2596, -1.5922,
+0.0155, -0.4838, -1.1318, +0.0587, -1.1597, -0.3127, +0.0851,
-0.0218, -0.1821, +0.1180, +0.0146, -0.6130, -0.2081, -0.2196,
-0.7754, -0.0791, +0.4528, +0.0908, +0.3110, +0.2869, +0.4468,
+0.1967, +0.2325, -0.2844, +0.1799, +0.1444, +0.5616, -0.7208,
+0.4001, +0.3364, +0.3572, +0.3306, -0.1394, +0.0448, +0.0176,
-0.1806, +0.0376, -0.5749, -1.4323, -0.1710, -0.1247, -1.1199,
-0.0686, +0.3875, +0.0285, -0.1925, -0.6119, -0.3564, -0.2852,
+0.5464, -0.8385, -0.3253, -0.8248, -0.3679, -0.2737, +0.4797,
-0.5317, -0.0144, +0.1526, -0.1459, -0.3113, -0.1446, -0.0265,
-0.8188, +0.3409, +0.4165, -1.6213, +0.3362, -0.1435, +0.0679,
-0.3296, -1.2932, -0.1976, +0.0025, -0.3460, -0.2282, +0.0328,
+0.0128, -0.6903, -0.2594, +0.1898, -0.3305, +0.0718, -0.1354,
+0.3221, -0.2091
],
[
+0.7015, -0.0079, +0.0603, -0.1224, -0.0905, -0.0360, +0.1416,
+0.0313, -1.2110, -0.2915, -0.9822, +0.1289, -0.5388, -0.5899,
-0.3639, +0.1049, +0.2668, -0.6067, -0.0946, +0.2901, -0.6062,
+0.1575, -0.3185, +0.1406, -0.0531, +0.0853, +0.1109, +0.2724,
-0.9222, +0.0355, +0.4736, -0.4827, -0.7927, +0.0591, -0.3695,
-0.8606, -0.3656, -0.4858, +0.0656, -0.1895, -0.5924, -0.1785,
-0.0872, +0.0163, -0.3657, -0.1144, -0.7264, -0.0708, +0.3746,
-0.4169, +0.0849, +0.2061, +0.5279, +0.4709, -0.0896, -0.5886,
-0.1939, +0.1890, -0.1501, +0.2609, +0.2305, -0.2359, -0.4166,
-1.3491, +0.2312, +0.0627, -0.3370, -0.2057, -0.2985, +0.2800,
-0.0274, -0.4590, +0.2526, -0.3559, +0.1709, +0.2335, +0.3815,
-0.6601, +0.2350, -0.5104, -0.1831, +0.2343, +0.1400, -0.5159,
-0.0107, -1.0395, -0.1054, -0.5677, +0.1986, -0.5501, -0.7590,
+0.2544, +0.2979, -0.2088, +0.0723, +0.2322, +0.3433, -0.5151,
-0.1836, -0.2278, -0.3615, -0.0833, -0.4835, +0.5434, -0.1283,
+0.0631, -0.3564, +0.1727, +0.0701, -0.1985, +0.4197, +0.8758,
+0.1505, -0.7602, -0.1722, -0.3172, -0.1665, -0.0129, +0.1756,
+0.1746, -0.6835, -0.2638, -0.3309, -0.2754, +0.2760, -0.3245,
-0.4501, +0.1531
],
[
-0.5910, +0.4678, -0.8342, +0.0698, -0.3237, -0.1614, +0.3568,
+0.3391, -0.1352, -0.9422, +0.1731, +0.0649, +0.0359, +0.0780,
-0.0588, +0.1787, -0.2640, +0.1125, +0.1091, -0.9467, -0.1841,
-0.5780, +0.2515, -0.2598, +0.0299, +0.3900, -1.1704, -0.2194,
-0.5359, -0.0265, -0.0861, +0.4115, -1.0049, -0.0218, -0.2298,
+0.0651, +0.3290, +0.3422, -0.1790, -0.2639, +0.2790, -0.0778,
-0.0189, +0.5084, -0.5000, +0.1139, +0.3918, -0.6133, +0.0850,
-0.5195, +0.6430, +0.2377, +0.4246, -0.1079, +0.0031, -0.4855,
+0.1163, +0.1400, -0.8990, +0.0836, -0.1079, -0.1238, -0.8169,
-0.7508, +0.2860, -0.1766, -0.4766, -1.9178, -0.0434, -0.1472,
-0.1214, -0.4738, -0.4151, +0.1791, +0.4102, -0.3687, -0.4524,
+0.2226, -0.1458, +0.1435, -0.0020, +0.4678, +0.4075, -1.0137,
-0.3772, +0.0394, -0.2466, -0.7199, -0.0438, -1.0581, +0.2332,
-0.7536, -0.1882, -0.8108, -0.2119, -0.3047, -0.4887, +0.2553,
-0.2199, -0.7156, +0.1906, -0.0012, +0.0211, +0.1094, -0.2721,
-0.4634, -0.2296, -0.5068, +0.1374, +0.2166, +0.4215, -0.1778,
-0.0065, +0.6533, +0.0195, -0.2174, -0.8611, -0.0556, +0.3126,
-0.1544, -1.2263, +0.0302, -0.9812, +0.2193, +0.1708, -0.1093,
-0.2972, +0.1680
],
[
+0.3990, +0.0859, -0.1240, -0.8370, -0.4172, +0.2365, -0.0056,
+0.0501, -0.1428, -0.0168, +0.0601, -0.2195, -0.3317, -0.2262,
+0.1396, +0.1408, -0.3450, -0.3909, -0.2250, -0.5431, -0.9326,
-0.5343, -0.3074, -0.1953, -0.1808, +0.2521, -0.0151, +0.0151,
+0.3445, -0.0578, -0.2371, -0.1088, -0.0111, +0.0672, -0.1129,
-0.4050, -0.0916, +0.1445, -0.0713, -0.2679, +0.1176, -0.4107,
-0.1053, -0.2070, +0.7283, +0.0182, +0.0785, -0.3302, +0.4245,
+0.6522, -0.5767, -0.0863, -1.0398, -0.1261, +0.2347, -0.5232,
+0.2685, -0.1588, -0.4723, -0.3260, -0.4737, -0.7348, -0.4172,
+0.4803, -0.4773, -0.4487, +0.7380, -0.6768, -1.1271, -0.3248,
+0.2887, -0.0650, +0.8312, +0.4609, +0.2830, +0.0250, -0.1387,
-0.1422, +0.2407, -0.1626, -0.4312, -1.1603, +0.4189, +0.0939,
-0.3531, -0.2178, +0.2811, +0.1014, +0.3626, -0.0389, +0.3537,
-0.0242, +0.9483, +0.3594, -0.0648, -0.3060, +0.1765, -0.7881,
-0.1364, -0.4580, -0.0794, -0.2943, -0.5105, -0.1794, +0.1520,
-0.1179, -0.9498, +0.0333, +0.1983, -1.0507, +0.3775, +0.1208,
+0.1412, +0.2968, +0.2580, -0.1786, +0.3944, +0.0681, -0.8295,
+0.5816, -0.1301, -0.0029, -0.4506, +0.2168, -0.1471, +0.3748,
-0.1863, -0.0625
],
[
+0.1393, -0.3440, -0.1916, -0.6145, -1.0682, -0.0085, +0.0467,
-0.0013, -0.0962, -0.3870, -0.0871, -0.3778, +0.2254, +0.1940,
+0.1934, -0.7901, -0.4800, +0.2300, -0.6122, -0.5443, -0.2042,
+0.0812, +0.2239, +0.1593, +0.1071, -0.7654, +0.3019, -0.0838,
+0.4722, -0.2109, +0.2228, -0.2398, +0.1788, -0.0798, -0.3246,
+0.0000, -0.7649, +0.0329, +0.3764, -0.2840, -0.1474, -0.0116,
+0.3365, +0.5115, +0.5601, -0.7000, -0.2180, -0.0863, +0.2020,
-0.1458, -0.9690, -0.2748, -0.3088, -0.0826, +0.2325, +0.2183,
-0.1520, -0.1366, -0.3107, +0.2733, +0.4968, -0.0895, -0.1239,
+0.2910, -0.2267, -0.1839, -0.0321, -0.4823, +0.3016, +0.3723,
+0.3944, +0.1201, +0.3839, -0.0409, -0.2140, -0.0261, -0.2642,
+0.3141, +0.6421, +0.6582, +0.2082, -0.6195, +0.2912, +0.0357,
-0.5369, +0.0784, -0.1916, -0.1177, -0.1315, -0.3553, -0.0516,
+0.8580, -0.0791, -0.8106, -0.1678, -0.1375, -0.2407, -0.3142,
-0.1501, -1.1768, -0.0442, -0.2097, -0.7157, -0.3549, +0.0532,
-0.2223, -0.4742, +0.0961, -0.3822, -0.7533, +0.0197, -0.3242,
-0.7685, -0.4098, +0.4850, +0.1301, -0.5146, -0.2493, +0.0855,
+0.0385, +0.4716, +0.4514, +0.1462, -0.0489, -0.5990, +0.2224,
-0.3817, -0.0842
],
[
-0.3052, -0.1947, -0.2996, -0.0040, -0.1721, +0.2344, -0.3913,
+0.1915, -0.0813, +0.1120, +0.4848, +0.7501, -0.1336, +0.1784,
+0.2548, +0.0553, -0.2648, +0.5434, +0.1071, -0.7360, +0.4868,
-1.9705, +0.3370, -0.4066, -0.1121, +0.2190, -0.2057, -0.1045,
-0.3884, -0.1164, -0.3860, +0.3627, +0.0852, -0.0559, -0.0288,
+0.4844, -1.2247, -0.6384, -0.3676, -0.2889, +0.1286, -0.1623,
-0.4514, -0.9095, +0.3574, -0.8860, -0.1597, -0.0322, +0.3061,
-0.0241, -0.3640, +0.1712, +0.4706, -0.4074, -0.0463, +0.4842,
-0.2766, -0.1878, +0.2203, -0.0552, -0.5473, +0.3471, -0.4691,
-0.0022, +0.0363, +0.6001, +0.3843, +0.1338, +0.5381, +0.2268,
-0.3754, +0.2853, +0.1396, -0.2906, -0.5988, +0.6063, +0.3165,
+0.3628, +0.5851, -0.8569, -0.1414, -0.2864, -0.3541, +0.5733,
+0.1381, -0.7078, +0.4508, +0.0572, -0.6422, -0.2782, +0.4011,
+0.8227, +0.1561, -0.3120, -0.1448, -0.3317, +0.3289, -0.2140,
+0.0195, +0.6493, +0.0570, -0.4285, -0.4959, +0.4498, -0.2399,
-0.8054, -0.9527, +0.1554, -0.2333, -0.7512, -0.3255, +0.0679,
+1.1837, +0.4886, +0.0729, -0.6745, -0.2946, +0.4916, +0.0207,
+0.3546, -0.4189, -0.4255, -0.6069, -0.6768, +0.1386, -0.6672,
-0.6658, -0.8829
],
[
+0.0797, -0.5054, +0.0136, +0.1980, -0.1277, +0.0739, +0.2982,
+0.1720, +0.2600, -0.4211, -0.2309, -0.4136, +0.0334, +0.0354,
+0.5589, -0.1356, -0.1200, -1.5159, -0.3698, +0.4210, -0.7663,
-0.3039, -0.2075, +0.0265, +0.2961, -0.3017, +0.5159, -0.1434,
+0.2200, +0.1586, +0.2846, -0.0269, -0.0215, +0.3860, +0.2604,
-0.5857, +0.2478, -0.0333, -0.4322, -0.7724, -1.0079, +0.0702,
+0.0804, -0.2026, +0.2561, +0.1934, +0.0800, +0.6753, -0.5215,
+0.0715, -0.1621, +0.0212, -0.6631, -0.3874, -0.5760, -0.0029,
-0.0078, +0.1267, +0.2254, -0.3903, +0.0217, +0.6639, -0.0116,
-0.3764, -0.1426, -0.6670, +0.1575, +0.1441, +0.3506, -0.3192,
+0.4579, +0.2575, -0.3044, +0.1800, -0.7823, -0.2821, +0.0089,
+0.1812, +0.2162, +0.2755, -0.0168, -0.0375, -0.0799, +0.4972,
-0.0946, -0.4242, -0.5448, -0.4950, -1.1926, +0.1192, -0.1251,
-0.3178, -0.2847, -0.2470, -0.1367, -0.4373, -0.4606, -0.2024,
+0.0416, -0.6683, +0.3778, -0.5890, +0.0598, +0.6347, -0.4389,
-0.1130, +0.8578, -0.4276, +0.4702, +0.2074, +0.2512, -0.1159,
-0.3021, +0.4248, -0.4255, -0.1036, +0.5195, -0.3745, -0.2933,
+0.5167, -0.6131, +0.5847, +0.2629, -0.5066, -0.0528, -0.5667,
+0.5172, +0.4329
],
[
-0.0319, -0.0812, +0.1893, -0.0076, +0.0408, +0.9861, +0.1532,
+0.5791, +0.4635, -0.5317, +0.0360, +0.1377, +0.4940, -0.3670,
+0.0597, -0.2673, +0.4268, +0.5371, +0.9459, +0.6026, +0.3581,
+0.5822, -0.3556, -0.2087, +0.2368, -0.6264, -0.6553, -0.3130,
-0.0068, +0.0108, +0.6198, -0.1246, +0.4941, +0.0035, +1.1518,
-0.0484, -0.3501, -0.1881, -0.5316, +0.6341, -0.0666, -0.3592,
-0.8007, +0.0054, +0.4806, +0.3872, -0.1414, -0.1555, +0.1436,
-0.9244, -0.3463, +0.3656, +0.4359, +0.3659, +0.1684, -0.0401,
-0.2197, -0.1504, +0.4222, +0.1781, -0.1574, +0.2468, +0.2445,
-1.2630, -0.2287, +0.2540, -0.0357, -0.4610, -0.3447, +0.0243,
+0.4182, -0.1686, -0.1656, +0.0064, -0.1063, +0.0522, -0.2769,
+0.4825, +0.0400, -1.8240, -1.3218, -0.3842, -0.2491, +1.1058,
+0.3958, +0.0994, +0.0235, +0.4443, -0.3370, +0.4494, -0.2813,
+0.1513, -0.7493, -0.1674, -0.8722, +0.6108, -0.2310, -0.4216,
-0.6610, +0.0855, -0.3479, -0.1705, -0.0974, +0.0694, -0.2493,
-0.0780, -0.7308, -0.0663, +0.5153, -0.0797, +0.1073, -0.5629,
+0.0242, +0.1148, -0.2580, +0.0235, -0.1664, -0.4889, +0.1834,
-0.4821, -0.4944, -0.3256, +0.0020, -1.0169, -0.7666, +0.0411,
-0.4586, +0.1203
],
[
-0.5432, -0.8020, +0.0918, -0.1605, -0.3804, -0.2535, -0.4118,
+0.3286, -0.6496, +0.3496, -0.0343, -0.3351, +0.1318, +0.3702,
+0.4219, -0.1902, +0.3082, +0.0794, -0.8055, -0.1940, -0.7841,
+0.2015, +0.4231, -0.2529, -0.1225, +0.0647, +0.1334, -0.0605,
-0.0080, +0.4599, -0.0993, +0.2709, -0.5362, +0.2351, -0.0537,
-0.3275, -0.9580, +0.1666, +0.0469, -0.1855, +0.3882, +0.2301,
-0.0206, +0.1937, -0.3547, -0.0842, +0.0262, -0.1558, -0.2253,
-0.4692, -0.0289, -0.1977, -0.3126, +0.1086, -0.3525, +0.0515,
-0.7167, -0.1288, +0.0262, -0.0331, +0.1438, -0.2140, -0.2829,
+0.0953, +0.0114, -0.5833, +0.0937, +0.3545, -0.0935, -0.2859,
+0.0646, +0.2267, -0.3251, +0.3041, +0.0245, -0.0780, -0.4906,
-0.8082, -0.0172, +0.0940, +0.1477, +0.1898, -0.0042, +0.2624,
+0.2332, +0.2750, -0.2046, +0.0405, +0.2157, +0.2673, -0.8699,
+0.0976, +0.0859, -0.1087, -1.5906, -0.7188, +0.1202, -0.0439,
+0.1963, +0.1605, +0.0621, +0.0184, -0.0679, -0.6141, +0.3824,
-0.0532, -0.1961, +0.0858, +0.1383, -0.7507, -0.2177, +0.4552,
-0.1719, +0.1524, +0.0328, +0.2659, +0.1018, +0.1842, +0.0123,
-0.5312, -0.0575, -1.0402, +0.2488, -0.0787, +0.2566, +0.2899,
-0.3392, -0.1630
],
[
-0.1194, +0.3467, -0.2206, +0.1329, +0.0768, +0.6891, +0.0714,
-0.4637, -0.5310, -0.0903, +0.4664, +0.3078, -1.2855, +0.1329,
+0.0155, +0.0817, -0.3605, -0.8722, +0.0441, +0.5491, -0.1563,
-0.8174, +0.3046, +0.1270, +0.3098, -0.0151, -0.0045, -0.9193,
-0.1783, +0.1567, +0.5533, +0.0915, -0.6435, -0.3851, -0.4275,
-0.2248, +0.7145, +0.0099, -0.0457, -0.4532, +0.1251, -0.3843,
-0.1030, -0.0806, -0.5033, +0.2596, +0.3066, -0.5329, -0.4325,
+0.1346, +0.1403, -0.5028, -0.0000, +0.0686, -0.3853, +0.1682,
-0.1527, -0.0705, +0.4857, +0.3771, +0.0472, +0.2039, -0.1899,
+0.3069, -0.3187, +0.3162, +0.2182, -0.2863, -0.5433, -0.6458,
+0.3378, +0.3192, +0.1345, +0.0484, -0.0181, -0.1583, -0.9560,
-0.3627, -0.3025, -0.2251, -0.4278, -0.3376, +0.0789, -1.0522,
+0.0469, -0.6978, +0.1622, -1.1855, +0.1655, +0.0138, -0.8016,
-0.1230, +0.5485, +0.3558, +0.3172, +0.0599, +0.1526, -0.1669,
-1.0670, -0.3345, -0.3429, -0.8567, +0.5445, +0.5922, +0.2734,
-0.0315, -0.6848, -0.3854, -0.1927, -1.1359, -0.4998, +0.1007,
-0.3175, -0.5658, +0.4406, -0.0551, +0.4072, +0.1219, -0.5193,
-0.2507, -0.8700, +0.2308, +0.4804, -0.7289, +0.1062, +0.4289,
+0.4740, +0.5733
],
[
+0.4704, +0.0984, +0.5533, -0.1550, -0.7135, +0.3362, -0.1627,
+0.3920, -0.3064, -0.9689, -0.5753, -0.5526, -0.0743, +0.3165,
-1.3096, -0.0676, -0.1110, -0.0326, -0.7468, -0.0229, -0.2737,
-0.0090, -0.3595, -0.5636, -0.0341, -0.4081, +0.2472, -0.3242,
-0.1158, -0.6365, +0.1962, +0.2294, +0.1400, +0.5869, +0.1713,
+0.0023, +0.0232, -1.2354, +0.1584, -0.1950, -0.2557, -0.0327,
+0.1266, +0.2868, +0.2143, -0.1170, +0.3738, +0.2260, +0.1861,
+0.1461, -0.4707, +0.3385, -0.5547, -0.5667, +0.1489, +0.1197,
+0.1865, +0.1954, +0.3249, -0.0711, +0.1393, -0.1540, +0.1666,
-0.0516, +0.4031, +0.3465, +0.0516, +0.3941, -0.7685, -0.3348,
-0.2972, +0.2939, +0.3497, -0.6229, +0.0615, -0.2449, +0.3241,
-0.0620, +0.1183, +0.2582, +0.0633, -0.3186, -0.0816, +0.0410,
-0.4114, -0.0251, -0.1757, +0.2182, +0.0251, +0.7628, -0.0240,
-0.0525, +0.4578, +0.0557, +0.2615, +0.2159, +0.2395, +0.1016,
+0.0726, -0.4254, -0.0181, -0.6271, -0.6427, +0.0284, +0.4687,
+0.2167, -0.8132, +0.3172, +0.4417, -0.1187, -0.5183, -0.1523,
-0.1811, -0.8048, +0.4459, +0.4368, +0.4110, -0.3081, -0.3863,
+0.3325, +0.0145, +0.0293, -0.0961, -0.6588, -0.6577, -0.3175,
+0.0458, +0.1852
],
[
-0.3244, +0.3453, +0.4659, +0.1278, +0.0263, +0.3692, -0.1322,
+0.5408, +0.0223, -0.4064, -0.0329, +0.0427, +0.0518, +0.1526,
-0.6745, -0.6814, -0.2794, +0.0076, +0.4716, +0.0989, +0.2801,
+0.4726, +0.8755, -0.0468, -0.1572, +0.0386, -0.0018, +0.0436,
-0.7961, +0.5156, -0.0432, +0.3131, +0.1239, +0.4978, +0.0452,
-0.0287, -0.5577, +0.0747, +0.1409, +0.2401, +0.2333, +0.2038,
+0.0243, -1.4126, -0.0031, -0.5302, +0.1366, +0.4580, -0.9463,
+0.2064, -0.1316, -0.4320, -0.2185, +0.0589, +0.0850, -0.0792,
+0.1943, -0.2924, -0.1323, -0.2118, -0.4611, +0.4587, +0.0022,
+0.1149, -0.3412, +0.5380, +0.1929, +0.1675, -0.0647, -0.0765,
-0.4030, -0.8584, -0.1539, -0.0367, -0.5127, +0.1979, +0.0682,
-0.4566, -0.0323, -0.1059, -0.0351, -0.2856, -0.0800, -1.1048,
+0.1813, -1.0385, -0.2560, -0.5610, -0.8361, +0.7297, +0.3240,
+0.2591, -0.0883, +0.3069, -0.7018, -0.4564, -0.0893, -0.0577,
-0.3922, -0.2135, -0.0295, -0.2734, +0.2407, -0.5047, -0.1526,
-0.2218, +0.1511, -0.4272, -0.0752, +0.0014, +0.1558, -0.0979,
+0.2299, -0.4949, +0.0825, +0.2803, +0.2763, +0.3299, -1.4277,
-0.1536, +0.6685, -0.6775, -0.4354, -0.0490, +0.1724, +0.4184,
-0.9353, +0.0334
],
[
-0.0237, +0.1151, +0.3952, +0.0888, +0.2637, +0.0448, +0.0319,
-0.0838, +0.2934, +0.6864, -0.3903, +0.1003, -0.1447, -0.2490,
-0.1426, +0.3537, +0.0199, -0.1132, -0.2192, -0.2704, +0.0392,
+0.3247, -0.2443, +0.2803, -0.0727, +0.0282, -0.6015, +0.3304,
-0.1920, +0.1517, -0.0807, -0.3249, +0.2810, -0.1769, +0.1114,
-0.2788, +0.1025, +0.1887, +0.3154, -0.1968, +0.2691, -0.5229,
+0.2975, +0.2208, -0.6344, +0.1650, -1.2574, -0.6373, -0.5196,
+0.0786, -0.2593, +0.0290, -0.0111, -0.2176, -0.4665, +0.3046,
+0.1656, +0.0257, -1.1199, -0.3922, -0.8051, +0.5381, +0.3533,
-0.8867, +0.3014, +0.0465, -0.4772, +0.7044, +0.0911, +0.4027,
-0.6977, +0.6835, -0.1308, +0.2430, +0.0300, +0.6958, -0.3284,
+0.0979, -0.2630, +0.3343, -0.0862, +0.1222, -0.0311, +0.1402,
-0.0087, -1.2493, +0.0120, +0.4401, -0.2778, -0.1811, -0.4879,
-0.0948, +0.4839, -0.0848, -1.0303, -0.0772, -1.1900, -0.2582,
-1.0577, -0.0152, -0.5970, +0.2376, +0.3681, -0.2117, -0.3704,
-0.3580, -0.1064, +0.0134, +0.1705, +0.3112, -0.2077, +0.2380,
+0.3597, -0.2131, -0.1586, -0.0093, -1.0376, -0.1940, -0.0034,
-0.0930, +0.0630, -0.6793, -0.2775, -0.1766, -1.0874, -0.0242,
+0.1328, -0.1924
],
[
-1.1468, -0.5489, +0.6413, -0.2795, +0.2465, -0.4405, -0.4913,
-0.5075, -0.4153, +0.3906, -0.6549, +0.2271, -0.6259, -0.2240,
-0.4772, -0.1406, -0.4188, -0.8288, +0.0420, -0.2326, -0.0934,
-0.2890, -0.4452, +0.0712, -0.1635, -0.1321, -0.1832, -1.8540,
-0.1445, +0.5685, -0.3540, +0.3484, -0.2176, -0.5167, -0.3536,
+0.1132, +0.3618, -0.0910, +0.4795, -0.9537, -0.3972, -0.5818,
+0.0083, -0.0556, -0.4010, +0.0754, +0.1722, -0.7388, -0.2646,
-0.2676, -0.2518, -0.1411, +0.1569, -0.6923, +0.6213, +0.4417,
-0.4442, -0.3801, +0.1185, -0.2595, -0.2257, -0.6323, +0.5424,
-0.6875, +0.0820, -0.0442, +0.0114, -0.5173, -0.7365, +0.3089,
-0.4189, +0.1188, -0.8139, +0.4656, +0.3616, -0.2981, -0.8207,
-0.1880, -0.1042, -1.0588, -1.5954, -0.1536, +0.0503, -0.9106,
-0.9322, +0.2419, -0.1794, -0.0604, +0.0307, +0.0501, -0.4008,
+0.1348, -0.6489, -0.5990, +0.1142, +0.6489, +0.0001, -0.0647,
-0.2143, -0.3874, +0.1357, -0.5418, +0.5746, +0.1351, -0.0070,
-0.6785, -0.3209, +0.5239, -0.1253, +0.1702, -0.5094, +0.0634,
+0.2103, +0.7014, -0.2335, +0.3936, -0.2217, -0.0133, -1.0560,
+0.5696, +0.2297, -0.1418, +0.0136, -0.3143, -0.5973, +0.6279,
-0.3356, -0.9309
],
[
-0.5827, -0.0787, +0.3842, +0.6057, -0.3744, -0.1667, +0.2557,
+0.5199, +0.3724, -1.0853, -0.0962, +0.1177, +0.3746, -0.3622,
-0.6888, -0.1403, -0.3840, -0.4151, +0.3217, +0.3957, -0.1144,
+0.0298, -0.0959, +0.1055, -0.0409, +0.3927, -0.2206, +0.0536,
-2.3865, -0.2262, +0.3847, -0.1112, -0.3791, +0.2793, -0.0349,
-0.3009, +0.0038, -0.3509, +0.5816, +0.2646, -0.0839, -0.9176,
-0.2282, -0.5341, -0.3662, -0.9341, -1.9107, -0.0757, -0.8490,
-0.4928, +0.3568, -0.0798, +0.5515, -0.2662, +0.1106, +0.1013,
+0.0582, +0.3810, +0.0315, -0.0477, +0.2104, -0.3271, -0.2738,
-0.8973, +0.2263, -0.5105, +0.0182, -0.4493, +0.2752, +0.4808,
+0.2276, -1.3286, -0.0032, -0.4071, +0.5225, -0.0665, +0.2638,
-0.4450, +0.0857, +0.3098, +0.1059, +0.1158, +0.2210, -0.0440,
-0.1754, -0.9768, -0.1933, -1.1677, -0.1349, +0.1467, -0.3771,
+0.3430, +0.1020, +0.0143, -0.0550, -0.9697, -0.1434, -0.3386,
-0.9714, -0.0249, +0.0532, +0.0235, -0.0579, +0.2284, +0.4151,
-0.2941, -0.4709, -0.0016, -0.0338, +0.4262, +0.4149, +0.5157,
+0.3371, -0.1495, +0.4588, -0.3911, -0.4941, +0.2330, -0.5315,
+0.0509, -0.5540, +0.1312, -0.2170, +0.0867, +0.4329, -0.0276,
+0.5138, +0.3968
],
[
+0.1998, -0.6684, +0.2405, -0.2226, +0.0870, +0.1251, -0.4833,
-0.4312, -1.0778, -0.4821, +0.4113, +0.0560, +0.6275, +0.0820,
-0.0387, +0.0599, +0.4298, -0.1943, -0.1843, -0.0708, +0.0096,
-0.7072, +0.3246, +0.6720, -0.7070, +0.2634, +0.2187, +0.3547,
+0.0874, +0.0002, -0.6501, -0.6871, -0.2653, +0.0179, -0.0689,
+0.0401, -0.2196, +0.1604, -0.3960, -0.1868, -0.1688, -0.0027,
-0.1126, -0.2441, -0.4034, -0.1133, -0.3361, -0.3621, +0.3158,
-0.7948, -0.0176, -0.1477, +0.3512, +0.3682, +0.4621, +0.0704,
+0.3446, +0.0440, -0.4264, +0.3301, -0.3714, -0.5986, -0.6574,
+0.4734, -0.0074, +0.2797, -0.4191, -0.2061, +0.5278, -0.2210,
-0.0518, +0.5543, -0.0311, +0.2562, -0.1211, -0.4534, -0.9899,
+0.4539, -0.1718, +0.2690, -0.7115, +0.1055, -0.0716, +0.1627,
-0.1308, +0.1347, +0.7711, -0.6220, -0.7527, -0.5695, +0.0804,
-0.7078, +0.0921, -0.3860, +0.4700, -0.1721, -1.0485, +0.0481,
-0.0229, +0.0861, +0.4104, +0.2938, -0.5985, +0.6484, -0.0059,
-0.4095, +0.2968, +0.1489, +0.0121, -0.7843, -0.5046, -0.3111,
-0.1684, +0.0097, +0.5670, -0.0855, +0.3821, -0.0115, +0.2500,
-0.1258, -0.5862, -0.0663, -0.2129, +0.2693, +0.2945, -0.2954,
+0.2243, +0.0915
],
[
+0.1491, -0.4850, +0.0809, -0.2971, +1.0399, +0.3669, -0.0123,
-0.1522, +0.1059, +0.2378, +0.2477, -0.2592, +0.3836, -0.0857,
-0.3333, -0.6129, +0.0457, -0.0590, -0.0397, +0.1242, -0.5554,
+0.0273, -0.4591, +0.5780, -0.2889, +0.3091, +0.2659, +0.4226,
+0.4354, +0.2692, +0.4760, +0.3523, +0.0599, -0.2643, +0.1443,
-0.1300, +0.0442, +0.1209, +0.1833, -0.4734, +0.1125, -0.2706,
-0.1989, +0.2754, +0.1445, +0.0215, -0.4975, -0.2346, -0.2444,
-0.5111, -0.5242, -0.4954, +0.5329, +0.0265, +0.1177, +0.2493,
-0.4953, +0.0555, +0.0568, +0.2252, -0.1676, -0.1292, +0.1785,
+0.1137, -0.0854, +0.7888, -0.0573, +0.5062, -0.0035, +0.5743,
+0.4510, +0.3742, -0.4986, +0.1980, -0.1241, -0.0968, -0.8677,
-1.0860, -0.1191, +0.2507, +0.0636, +0.3880, -0.2442, -0.1665,
+0.3182, -0.3076, -0.1607, +0.6608, -0.2604, +0.0612, -0.1349,
-0.3337, -0.8032, -0.3801, +0.1673, -0.0237, -0.0209, +0.0583,
-0.1621, -0.0147, +0.2593, +0.0167, +0.1946, -0.6068, +0.2827,
-0.5656, +0.2463, -0.0721, +0.1951, -1.2302, -0.0800, -0.5759,
-0.5164, -0.2900, -0.2080, +0.0350, +0.0602, -0.1280, -0.0347,
-0.1066, +0.2023, +0.4489, +0.6241, +0.3711, +0.5918, +0.2161,
-0.4956, +0.1668
],
[
-0.1357, -0.1698, -0.7316, +0.3652, +0.0674, -1.2720, -0.0912,
-0.5569, +0.0930, -0.0845, +0.1653, +0.0734, -0.3262, +0.0146,
-0.4687, +0.1030, +0.1811, +0.1278, +0.2951, -0.4805, +0.1283,
-0.3634, -0.1027, +0.0652, -0.3734, +0.0178, +0.0866, -0.5421,
-0.2491, -0.0326, +0.1385, -0.8705, +0.2461, -0.0332, +0.4078,
+0.1592, +0.0401, -0.7436, +0.3928, +0.1498, +0.5691, -0.3048,
+0.0986, +0.2868, -0.5354, +0.1134, +0.4664, +0.0562, +0.2706,
-0.1634, +0.3235, +0.3488, -0.0153, -0.2665, -0.0167, -0.4450,
-0.4409, +0.2170, -0.1988, -0.2794, -0.0368, -0.2582, -0.1815,
+0.0439, -0.3384, +0.4003, -0.0865, -0.5755, -0.0364, +0.1403,
+0.0383, -0.1997, -0.1938, +0.0637, +0.2091, +0.3471, +0.0495,
-0.1602, +0.2309, +0.0595, -0.2814, -0.2176, +0.4826, -0.7634,
-0.0873, -0.1180, -0.1384, +0.1120, -0.2059, -0.2466, +0.1173,
-0.3572, -0.3572, -0.2505, -0.6559, +0.1643, +0.1230, +0.2162,
-0.6531, -0.8617, +0.4565, -0.1033, +0.5384, -0.8266, -0.3981,
+0.4561, +0.2701, +0.2100, -0.1536, +0.5401, -0.2075, -0.4098,
-0.4645, +0.1560, -0.2259, -0.3549, +0.4475, -0.1320, +0.2875,
-0.7113, -0.2840, +0.1135, -0.0461, -0.5452, -0.1279, -0.1301,
+0.2210, +0.1697
],
[
+0.6435, -0.5374, +0.7059, -1.2844, +0.3331, +0.1965, -0.0462,
-0.3113, -0.5450, +0.1836, -0.8688, +0.1490, +0.1404, +0.2400,
+0.5905, +0.6755, -0.4403, -0.3344, +0.6835, -0.2276, +0.4156,
-0.0392, +0.0920, -0.4069, +0.3981, +0.4558, -0.0543, +0.1275,
+0.6342, +0.2539, -0.0430, +0.3649, +0.5104, -1.1261, -0.0370,
+0.3103, +0.3421, -0.1283, +0.4646, +0.2611, +0.3789, +0.1872,
-0.7172, -0.7900, -0.1028, -0.3773, -0.3112, +0.3605, -0.0117,
+0.4345, +0.7488, +0.1943, +0.0338, -0.3219, -0.4089, -1.0467,
+0.3920, -0.0685, -0.0742, -0.6270, +0.4976, +0.4420, +0.2313,
-0.0729, -0.0655, -0.0379, +0.0229, +0.2117, +0.2947, -0.5324,
+0.3692, -0.4863, +0.6824, -0.3295, -0.1910, -0.1978, +0.5208,
+0.4267, +0.0503, -0.2036, +0.2884, +0.2474, -0.0536, +0.1046,
-0.0932, -0.1035, +0.0076, +0.2182, +0.0644, -0.0851, -0.2564,
+0.1322, +0.2937, +0.1040, -0.3049, +1.0216, -0.9281, +0.0380,
-0.1511, +0.4697, -0.1954, -0.3933, -0.2124, -0.3136, +0.6186,
+0.4962, +0.5756, -0.1871, -0.2659, +0.1281, +0.4317, +0.7874,
-0.0278, +0.2312, -0.3561, +0.1679, -0.4217, +0.2351, +0.2232,
+0.5258, -0.0928, +0.6838, -0.7248, +0.4629, +0.2531, +0.1841,
-0.1334, -0.0406
],
[
-0.3188, -0.0379, -0.7361, +0.3839, +0.4312, +0.1416, -0.6620,
-0.1834, +0.6492, +0.2045, +0.0918, +0.2239, -0.3887, -0.4778,
-0.2293, +0.2039, +0.6022, -0.3230, -0.0367, -0.0062, +0.5820,
-0.1047, -0.0389, -0.2042, +0.1448, +0.5953, +0.2238, -0.2452,
+0.0376, +0.4239, -0.0790, -0.1298, -0.4279, +0.1278, +0.2601,
+0.6771, +0.0499, +0.9155, -0.3233, +0.1631, +0.2057, -0.2145,
+0.3982, -0.0398, -0.7616, +0.2762, -0.0642, -1.2607, -0.4831,
+0.2535, +0.3595, +0.8134, -0.6300, -0.7384, -0.5111, +0.5001,
+0.0594, +0.0364, -0.1260, -0.0926, -0.0640, -0.1960, -0.1721,
-0.5651, +0.4040, +0.2053, -0.5327, +0.1512, +0.2509, -0.1374,
-0.0726, +0.3524, -0.3582, -0.2351, -0.2557, -0.0285, -0.1707,
+0.6744, +0.2174, -0.4556, +0.2178, +0.3446, -0.3666, +0.1903,
-0.2920, +0.3549, +0.2236, -0.0646, +0.5224, -0.1073, +0.1408,
-0.1009, +0.1210, -0.1780, +0.2569, +0.5953, +0.1192, -0.0251,
+0.4589, +0.3212, +0.0774, -0.2077, +0.1941, -0.0578, -0.0650,
+0.4753, -0.4224, +0.4952, +0.1230, -0.2847, +0.1758, -0.5142,
+0.4695, -0.3469, -0.3873, +0.5217, +0.0902, +0.7590, +0.0308,
-0.2582, -0.5947, +0.0503, +0.3442, -0.3170, +0.0445, -0.2957,
+0.5019, +0.2107
],
[
+0.1048, -0.0561, -0.0525, -0.7182, +0.0354, +0.3660, -1.1555,
-1.1208, -0.5165, -0.0398, +0.0327, -0.3179, +0.0262, -0.3808,
+0.0158, +0.0145, +0.4033, +0.3654, +0.5534, +0.4138, +0.0817,
-0.1434, -0.0530, -1.4958, +0.4055, +0.2290, +0.1861, +0.3552,
+0.1662, -0.3241, +0.0854, +0.1171, +0.0808, -0.1947, -0.2937,
-0.7268, -0.7695, +0.3824, +0.5217, +0.1133, -0.4979, -0.2852,
-0.4815, -0.4886, +0.3429, -0.2525, +0.5806, -0.2051, +0.3463,
-0.5691, -0.5052, -0.4976, +0.2206, -0.7014, -0.0586, -0.2747,
-0.8545, -0.0350, -0.3665, -1.5639, +0.0383, -0.4149, +0.1876,
-0.8109, -0.4667, +0.2656, -0.0170, -0.3138, -1.2522, +0.4170,
+0.0891, -0.1986, -0.7625, -0.6820, -0.3524, +0.3081, -0.1074,
-0.5283, +0.3158, +0.0359, +0.4437, +0.1540, -0.4941, +0.1000,
-0.1309, -0.2385, -0.9683, +0.5341, -0.0965, -0.1651, -0.1631,
+0.1685, +0.1192, -0.0842, +0.5338, +0.4437, +0.0736, +0.8859,
-0.5837, -1.0810, +0.5466, -0.6217, +0.2345, -0.2710, +0.0490,
-0.3720, -0.1459, -0.1457, -0.2499, -0.9045, +0.1468, -0.4773,
+0.4526, -0.1049, +0.4992, -0.8887, +0.5294, +0.0333, +0.3291,
-0.5449, -0.8528, -0.1049, -0.1040, -2.6816, +0.3526, +0.2408,
-0.9251, +0.0610
],
[
-0.2964, -0.2083, +0.0424, +0.0505, +0.1074, +0.3514, -0.0107,
-0.5381, -0.3241, -0.3222, +0.0024, +0.6921, -0.4159, -0.4654,
+0.2284, -0.7048, -0.6569, +0.0266, -0.1426, +0.0671, -0.0949,
+0.0700, +0.5167, +0.2447, +0.2482, +0.1973, -0.1360, +0.3100,
+0.2290, +0.4399, +0.1359, +0.0125, -0.1705, -0.6492, +0.0810,
+0.2847, +0.1981, -0.2366, -1.4675, -0.1468, +0.4505, +0.2943,
-0.5034, -0.5678, -0.0262, +0.2682, +0.4964, -0.6642, -1.1591,
+0.4420, +0.2433, -0.0938, +0.2768, -0.8135, -0.9967, +0.0143,
-0.2953, +0.1051, -0.5533, +0.0041, -0.6914, -0.1544, +0.3746,
+0.3854, +0.2449, -0.2513, -0.4268, -0.3551, -0.0974, -0.5500,
+0.0167, +0.2236, +0.3164, -0.1721, +0.1192, +0.3897, -0.4358,
-0.6739, -0.4272, -0.1869, -0.3346, -0.1730, -0.5168, -0.1763,
-0.5286, -0.0470, -0.0356, +0.0258, -1.1001, +0.0107, -0.1562,
+0.2231, -1.0873, -0.3293, -0.5195, +0.5809, +0.1942, -0.1022,
-0.2603, -0.1272, +0.1439, -0.2179, +0.2000, +0.0747, -0.4566,
+0.4470, +0.3608, -0.6655, +0.1031, -0.4562, +0.2097, -0.0302,
+0.3557, -0.9670, -0.4443, +0.2522, +0.3013, -0.2915, +0.0225,
-0.1372, -0.4500, -0.2117, -0.1959, -0.0600, +0.5294, -0.1705,
-0.1156, -0.0542
],
[
-0.0208, +0.7405, -0.4383, +0.5303, +0.2675, +0.0568, -0.4659,
-0.3365, +0.4135, +0.3779, +0.1938, -0.7019, -0.2156, -0.5470,
-0.7469, +0.1874, +0.0905, -0.5753, -0.7818, -0.1834, +0.4375,
-0.2027, -0.1311, -0.1083, -0.3591, +0.3264, +0.0091, -0.6734,
+0.3241, +0.2321, -0.1777, -0.0185, +0.2118, +0.0124, +0.1134,
+0.0124, -0.4860, -0.0174, +0.3886, -0.7700, +0.1497, +0.1214,
+0.2275, +0.0411, +0.1275, -0.4150, +0.0006, +0.7057, -0.9674,
-0.3909, +0.1516, -0.4077, -0.0694, +0.3073, -0.0010, -0.1849,
+0.7372, +0.0443, -0.1081, -0.0876, -0.3725, -0.2869, -0.4462,
-0.4792, +0.3107, -0.3265, -0.1656, -0.3827, -0.1651, +0.3184,
-0.2078, +0.0350, +0.1123, +0.0150, +0.0277, +0.2372, -0.4666,
+0.1226, -0.1048, +0.2568, -0.2841, -1.0603, +0.1028, +0.3149,
+0.5520, -1.8827, -0.3189, -0.2473, +0.2472, +0.0820, -0.4540,
+0.5905, +0.3832, -0.6837, +0.2165, +0.0537, -0.2796, -0.0364,
-1.2825, -0.3029, +0.3441, -0.2498, -0.2441, -0.2514, -0.4793,
+0.2179, +0.5167, +0.6067, -0.3378, -0.1064, +0.1996, -0.5619,
-0.4490, -0.1642, -0.1905, +0.0808, +0.2745, -0.2686, +0.1887,
-0.0947, -0.5427, +0.1202, +0.4490, -0.6860, -0.7833, +0.0671,
+0.2752, -0.7211
],
[
-0.2267, -0.4105, +0.0993, -0.8289, -0.0224, +0.0670, -0.0504,
-0.1812, -0.7614, +0.3466, -0.7155, +0.1835, -0.1513, +0.1975,
+0.0964, +0.0733, -0.1324, -0.1731, +0.5462, +0.0405, +0.4601,
+0.0706, +0.1772, -0.1179, +0.2340, +0.0641, +0.1571, -0.6174,
-1.5172, -0.0205, -0.3400, -0.1019, +0.1146, +0.2761, -0.7689,
-0.4238, -0.3081, +0.1397, -0.4290, -0.0428, +0.1390, -0.1651,
+0.0185, -0.4418, -0.0883, +0.4287, -0.5795, +0.0443, +0.0215,
+0.0641, +0.2475, +0.0211, -1.1612, -1.4932, -0.8886, +0.0467,
+0.1714, -0.2832, -0.3644, -0.0116, -0.3516, -0.5362, -0.2998,
-0.1456, -0.0855, +0.0317, +0.0449, +0.1793, +0.4453, -0.0510,
-0.3119, -0.4332, +0.1258, -0.2377, -0.0082, +0.0491, +0.7216,
+0.0419, +0.0119, +0.1955, +0.2907, -0.4195, +0.2496, +0.1890,
+0.0793, -0.3036, -0.2109, -1.3736, -0.3541, +0.1394, -0.9612,
-0.4894, -0.7474, +0.2959, -0.4060, +0.5467, -0.3859, -0.5294,
+0.2514, +0.1297, -0.4952, +0.0727, +0.5270, +0.0924, -0.4186,
+0.4920, -0.6645, +0.2551, -0.2143, -0.1432, -0.1560, +0.6847,
-0.0212, -0.8119, -0.1084, +0.0864, -0.4569, +0.1187, -0.1744,
-0.2312, -0.2499, -0.2763, -0.9371, +0.6095, +0.0696, -0.2863,
-0.3708, -0.5223
],
[
-0.0993, +0.1409, -0.1793, -0.2212, +0.4238, -0.0122, +0.6906,
+0.1833, -2.0175, +0.3794, +0.4159, +0.5124, +0.7251, +0.7544,
-0.4348, +0.1692, -0.3089, +0.7574, -0.0629, -1.1735, +0.2699,
-2.2604, -0.2160, -0.0578, -0.8138, +0.0195, +0.3376, -1.2619,
+0.3395, -0.1720, -0.6129, +0.0844, +0.3609, -0.1273, +0.2086,
+0.3600, -0.1894, -3.0401, +0.3955, -0.4007, -0.0204, -0.6748,
+0.0073, -0.8910, +0.2773, +0.1150, +0.8242, +0.7532, +0.1008,
-0.0217, -0.0444, +0.1562, +0.6075, -0.1385, +0.8037, -1.2225,
-1.0055, -0.3115, -1.3787, -0.6643, +0.3021, +0.1386, +0.2093,
-0.3072, +0.0422, +0.1009, +0.3473, +0.2925, +0.3533, +0.4157,
+0.4200, +0.2637, +0.0863, +0.4686, -1.2533, +0.3166, +0.9838,
-0.6280, +0.1329, -1.0853, -0.5044, -0.1253, +0.1313, -1.1326,
+0.1254, -0.1927, -0.1857, +0.1723, +0.0076, -1.2293, -0.2048,
+0.4640, -0.2034, -0.6014, -0.3775, -0.3859, +0.3363, -0.2945,
+0.0382, +0.5226, -0.1826, +0.6612, -0.3325, +0.1265, +0.3302,
+0.1980, -0.2690, +0.0844, -0.2510, -0.4544, -1.5446, +0.1948,
-0.1010, -0.0159, -0.1156, -0.3827, -0.1778, +0.0800, -0.8696,
+0.6485, -1.0103, -0.4169, -0.7944, +0.0499, -0.6260, -0.5471,
-0.4483, -0.5094
],
[
+0.5392, +0.1172, +0.0275, +0.1544, +1.1570, -0.3080, -0.5902,
-0.2057, -0.0664, -0.4454, +0.2383, +0.0378, +0.5122, -0.3850,
+0.3855, -1.8852, +0.1903, -0.2271, +0.1657, -0.0089, -0.7770,
-0.3977, +0.1360, -0.7773, -0.2348, +0.0727, +0.2212, -0.3639,
+0.6679, +0.2904, +0.1359, -0.2630, +0.1017, +0.3486, +0.0137,
-0.6607, +0.0223, -0.2305, +0.8595, -0.3969, -0.4659, -0.5061,
+0.0212, -0.1709, +0.0391, -0.2071, +0.4317, +0.4371, -0.2553,
+0.5389, +0.8191, +0.5738, -0.6220, -0.2034, -1.1353, -0.3365,
+0.4038, -0.3686, +0.2349, -0.6040, -0.4666, -1.0111, +0.2354,
-0.2390, -0.2813, +0.0137, -0.3303, +0.4529, -0.3076, -0.0214,
-0.3884, -0.1122, +0.7831, -1.2856, +0.2155, -0.5377, +0.2776,
-0.1138, -0.2114, +0.1813, +0.1368, -1.0741, +0.0916, +0.5912,
-0.2229, -1.0282, -0.1046, +0.4429, -0.4369, +0.2257, -0.1841,
-0.1243, -0.5969, +0.6227, +0.1674, -0.5557, +0.4748, -0.2929,
-0.7047, -0.2068, -0.1569, -0.6345, -0.1933, +0.1590, -0.2230,
-0.1560, +0.4469, -0.3296, -0.0643, -0.1517, -0.6115, -0.4458,
-0.8033, -0.7308, +0.3220, -0.2440, -0.3975, +0.9195, +0.0928,
+0.1867, +0.1243, -0.1109, -0.5436, -0.7016, +0.1229, -0.2620,
+0.0747, +0.0395
],
[
-0.0930, -0.3029, -0.8064, +0.0595, -0.0668, +0.1339, -0.0232,
+0.1940, -0.0714, -0.2307, +0.1003, +0.3736, -0.0819, -0.2503,
-0.2354, -0.0711, +0.1693, +0.0067, -0.4982, +0.3025, +0.0624,
-0.0853, -0.6454, +0.1086, +0.2843, -0.1772, -0.4409, -1.4024,
+0.3635, -0.0416, +0.0553, +0.4314, -0.6700, +0.1636, -0.3898,
+0.0493, +0.3268, -0.0700, +0.0769, -0.0717, +0.2101, -0.0093,
+0.2108, -1.5399, +0.2003, -0.4194, -0.8017, +0.1748, -0.8769,
-0.0063, +0.1513, +0.0150, -0.6772, +0.1734, +0.2375, +0.0514,
-0.1617, +0.3689, -0.5984, +0.1162, -0.1536, -0.0563, +0.2035,
-0.0057, -0.6710, +0.0289, +0.4487, +0.2953, -1.2650, -0.0297,
-0.5534, -0.6967, +0.0130, -0.1984, -0.7579, +0.5317, -0.0111,
+0.0940, -0.5060, -0.1077, -0.1284, +0.0490, -0.1276, -1.8656,
+0.3652, +0.0840, -0.1318, -0.1325, -0.3580, -0.2401, +0.0261,
+0.2243, +0.0091, +0.1637, +0.0901, +0.0508, -0.0459, -0.1134,
-0.8943, -0.1263, +0.3323, -0.0566, -0.3716, +0.2921, +0.0915,
-0.1310, -0.3878, -0.4224, -0.7798, -0.2240, -0.2051, -0.7585,
-0.5029, -0.0167, -0.1496, -0.0068, -0.1469, +0.3943, +0.3463,
-0.7880, +0.0019, +0.4755, -0.2236, -0.3436, +0.1817, -0.2543,
-0.2750, -0.6216
],
[
+0.2941, +0.5212, -0.5303, +0.2481, -0.1247, +0.2795, -1.5880,
-0.7743, +0.3191, -0.3844, +0.3387, -0.5577, -0.6174, +0.3974,
-0.4779, -0.7220, -0.0463, -0.0913, -1.3168, +0.0135, +0.1484,
-0.0498, +0.2259, +0.4178, -0.0876, -0.2693, +0.1369, +0.2502,
-0.0656, -0.5450, +0.2394, +0.7425, -0.1176, -0.6768, +0.3786,
+0.4059, -0.6969, -0.0171, +0.0904, -0.5270, -0.0360, -0.3437,
+0.4420, +0.2451, -0.6108, +0.2447, -0.9276, -0.1345, -0.4143,
+0.1865, +0.1812, +0.0931, -0.1564, -0.5130, -0.7225, -0.0157,
+0.1752, -0.4479, +0.0675, +0.5620, -0.4499, +0.3246, +0.0693,
+0.2682, +0.3500, -0.2140, -0.6988, -0.0295, -0.1263, -0.0159,
-0.2053, -0.4079, +0.4124, -0.0439, -0.4604, +0.1539, -0.0377,
+0.5189, -0.5679, +0.2178, -0.4457, -0.4877, +0.3757, +0.4720,
-0.9661, -0.1367, -0.0157, +0.4836, -0.1011, +0.1128, -0.4998,
+0.4024, +0.7681, +0.1909, -0.1169, -0.5119, -0.2230, +0.2154,
-0.1260, +0.2651, -0.7235, -0.3367, +0.0055, -0.4508, +0.4804,
-0.2347, -0.5094, -0.1651, +0.2996, -0.0992, +0.2322, +0.2990,
-0.3644, -0.6489, +0.3211, +0.3138, +0.0483, +0.0671, +0.5185,
-0.1174, +0.1915, -0.0306, -0.5184, +0.2785, -0.4954, +0.1726,
+0.6179, -0.0235
],
[
+0.2760, +0.3725, -0.0371, -0.5838, +0.2662, +0.1452, +0.3655,
+0.2378, -0.1644, -0.9611, +0.5298, -0.4268, -0.3355, +0.3852,
+0.4199, -0.3979, -0.3181, +0.3396, +0.3900, -0.5893, -0.2061,
-0.1745, +0.3943, +0.6097, -0.2366, +0.0709, -0.6992, +0.3411,
+0.4275, +0.0919, -0.0053, -0.3365, +0.4480, -0.1550, -0.8431,
+0.5171, -0.4993, +0.1273, -0.2844, -0.3381, +0.5470, -0.2013,
+0.2882, -0.9610, +0.1412, -0.0964, +0.0044, +0.0038, +0.2124,
-0.5394, -0.2606, -0.1058, +0.0822, -0.4766, -0.4265, -0.4760,
-0.0880, +0.0038, -0.3442, +0.0923, +0.0758, +0.2901, +0.0425,
+0.5120, +0.0898, +0.5336, -0.1097, -1.1518, +0.3573, +0.0061,
+0.1088, -0.5034, +0.4174, +0.2556, +0.1247, -0.1178, +0.1635,
-0.0455, +0.3008, +0.1288, -0.1310, -0.1485, +0.2552, -0.6359,
-0.0076, -0.1008, -0.3596, -0.2426, -0.2802, -0.3335, +0.0636,
+0.0919, -0.6020, -0.1037, +0.2003, -0.2811, +0.7944, +0.4460,
+0.6442, -0.0287, -0.3200, +0.4558, +0.2535, -0.2042, -0.1385,
+0.0802, -0.8761, +0.2059, +0.1495, -0.0019, -0.5408, +0.2027,
-0.4553, -0.4089, +0.0049, -0.5366, +0.1640, +0.3857, +0.3282,
-0.2697, -0.0992, +0.4844, -0.1067, -0.0197, -0.2498, +0.3069,
-0.0976, -0.7914
],
[
+0.0142, -0.0026, -0.5157, -0.4681, +0.0490, +0.1227, -0.2058,
-0.8495, +0.1658, -0.2885, +0.1190, +0.1598, +0.5551, -0.3939,
-0.6779, -0.1901, -0.6929, -0.4041, -0.0699, -0.0882, -0.0221,
+0.1317, +0.3063, +0.2556, +0.1287, -0.0917, -0.0125, -0.3015,
-0.3198, -0.1379, -0.2990, +0.3797, +0.1721, +0.5024, +0.2430,
+0.0677, +0.1688, +0.0327, -0.1941, -0.0533, -0.3133, +0.1973,
-0.2886, -0.3350, -0.0477, +0.2165, +0.3585, -0.1051, -0.1397,
-0.1625, -0.7361, -0.2345, +0.3849, -0.1586, -0.3379, +0.2672,
-0.0470, -0.1875, +0.2920, +0.2989, -0.2291, +0.6091, +0.0558,
+0.1342, -0.8134, -0.2803, -1.0841, -0.2406, -0.1681, -0.5506,
-0.2986, +0.2645, -0.0933, +0.1797, +0.0895, -0.2683, +0.4570,
-0.0242, +0.1440, -0.1492, -0.0114, -0.0047, +0.2500, +0.1609,
-0.0815, +0.5155, -0.8726, -0.3475, +0.2900, +0.2171, +0.8899,
+0.0279, -0.0639, -0.7957, -0.6067, -0.2539, +0.2562, -0.1503,
+0.2246, -0.4192, +0.2636, +0.3823, -0.0439, -0.1201, +0.7856,
-0.4672, -0.4431, -0.2572, -0.3309, +0.1956, +0.1299, -1.4765,
-0.5911, +0.2877, -0.1105, +0.0746, -0.4482, +0.5192, +0.0021,
+0.1777, -0.9104, -0.4471, -0.3169, +0.0379, +0.2301, -0.0664,
-0.3687, -0.3786
],
[
+0.1452, +0.4198, +0.0029, -0.0653, +0.1279, -0.6948, -0.1948,
-0.1250, -0.1105, -0.2747, +0.4042, +0.1690, -0.2262, +0.7211,
-0.4752, -0.4305, -0.4072, -0.2737, -0.5282, -0.8213, +0.4226,
-0.2642, -0.7944, +0.1969, -0.6681, -0.4189, +0.1181, -0.4910,
-0.2916, +0.0997, +0.0117, -0.0747, +0.1751, -0.3160, +0.2432,
+0.5472, -0.0317, -2.7381, +0.0325, +0.2137, -0.1013, +0.1664,
-0.0917, -0.9620, +0.5616, -0.2506, +0.2052, +0.2765, +0.2510,
+0.0808, -0.6145, -0.1196, -0.0933, +0.3694, +0.4940, -0.2804,
-0.2966, -0.0262, -0.8431, -0.1175, +0.0853, -0.0399, -0.0043,
+0.3427, -0.2527, +0.0856, +0.3868, +0.3824, -0.0973, +0.2683,
-0.4012, +0.2811, +0.1426, -0.0987, -0.1868, -0.1632, -0.6806,
+0.0187, -0.0480, -0.4748, +0.0284, -0.9966, +0.1995, +0.3622,
+0.4135, -0.4110, +0.1553, +0.1887, +0.1804, +0.0739, -0.1146,
+0.5380, -0.3272, -0.0136, -0.1157, +0.3098, +0.3483, -0.4912,
+0.2376, +0.3166, -0.1080, -0.2667, +0.3626, -0.0983, +0.2281,
+0.0979, +0.1868, +0.0387, +0.3307, -0.4710, -0.3623, +0.0682,
-0.0451, +0.1160, +0.8219, -0.7232, -0.0459, -0.0459, -0.3669,
-0.5020, +0.1200, +0.1164, -0.4330, -0.1329, +0.1223, +0.1938,
-0.3021, -0.0096
],
[
+0.0307, -0.1074, -0.5054, +0.1841, +0.3880, -0.4125, -0.0031,
-0.8579, +0.1511, -0.4849, +0.3685, -0.0198, -0.1758, -0.1953,
-0.0517, -0.6631, -0.2308, -1.0682, +0.0739, -0.6081, -0.0497,
+0.2496, +0.0853, +0.6000, +0.2825, +0.4384, +0.0759, -0.4724,
+0.4003, -0.3203, -0.2263, +0.3586, -0.1000, -0.6729, +0.3914,
+0.4385, -0.2797, -0.2717, -0.3915, +0.2754, -0.6234, +0.1709,
-0.5713, +0.0729, +0.0171, +0.3864, +0.1788, +0.7924, +0.4496,
+0.5952, -0.4405, +0.5408, -0.6600, -1.1460, -0.2819, -0.1166,
-0.4777, -0.2295, +0.5355, -1.2873, +0.1339, -0.2345, +0.0593,
-0.0066, +0.0373, -0.0773, +0.9912, +0.1974, -0.3827, -0.4358,
+0.2365, +0.2508, -0.2249, +0.0185, +0.0868, +0.0450, -0.0914,
-0.3509, -0.1456, +0.0433, -0.5912, -0.6836, -0.9345, +0.1830,
+0.4085, -0.5123, +0.0836, +0.2667, -0.4867, +0.1397, +0.8275,
-0.0913, -0.1340, +0.4561, +0.0949, +0.7140, -0.1231, -1.1192,
-0.7728, +0.0636, -0.8626, -0.3440, -0.0248, -0.3980, +0.2891,
+0.3982, -0.2360, +0.5865, +0.5816, -0.8450, +0.0182, -0.8395,
-0.4600, -0.0709, -0.6221, +0.3712, -0.1913, +0.3792, +0.0326,
+0.2845, +1.4206, -0.4405, +0.5666, -0.1124, -0.6945, -0.1388,
-0.4277, -0.0777
],
[
+0.1843, -0.1371, -1.4576, -0.0320, -0.2398, -0.3147, -0.8235,
-0.9139, -0.0446, +0.4619, -0.7615, -0.2812, +0.1924, -0.1018,
+0.1940, +0.3424, +0.0566, +0.2892, -0.4231, -0.6184, +0.2544,
-0.0587, +0.1673, -0.8947, -0.1791, +0.2497, -1.3585, +0.3457,
+0.1367, -0.0162, -0.2054, -0.2865, -1.2269, -0.0584, +0.2565,
+0.1843, -0.5112, +0.0977, +0.1516, -0.3964, -0.0925, -0.0725,
-0.6325, -0.0460, -0.9747, -0.2661, -0.6230, +0.4624, -0.1699,
+0.7612, -0.4710, -0.1355, -0.5187, +0.4608, -0.8770, +0.7354,
-0.3921, +0.6296, +0.5976, -0.6807, -0.4973, -0.0747, +0.2010,
-0.8008, +0.3282, -0.0850, +0.1498, -0.7899, -0.5724, +0.0551,
+0.0326, +0.8411, +0.1285, -0.2207, -0.0472, +0.1642, +0.0442,
-0.3305, +0.1524, -0.9644, -0.2065, +0.2000, +0.1004, +0.2642,
-0.5985, -0.2113, -0.2952, -0.0788, -0.1846, -0.6372, +0.4044,
-0.8786, -0.0542, +0.5890, +0.0451, -0.0903, -0.7676, +0.1204,
+0.3354, -0.4390, -0.3610, -0.3031, +0.2680, +0.0857, +0.6542,
+0.2660, -0.1265, +0.3335, -0.5802, +0.2781, -0.3541, +0.4898,
+0.1315, +0.7118, +0.0035, -0.0266, -0.3570, +0.3671, +0.0647,
-0.5382, -0.9323, -0.0421, -0.3779, -0.7472, +0.3330, -0.0476,
-0.3754, +0.0899
],
[
-0.2042, -0.0323, +0.3879, +0.5487, -1.6125, +0.4439, -0.1583,
-0.1561, +0.0041, -0.0937, +0.1418, +0.4048, -0.6339, -0.2292,
-0.4615, +0.1910, +1.1010, +0.2793, -0.1490, -0.1520, -0.4352,
-0.2968, +0.0764, -0.0107, +0.5407, +0.3415, -0.8503, -0.1676,
+0.1517, +0.2076, -0.3898, +0.0414, -0.3089, -0.4528, -0.1311,
-1.8980, +0.3833, -0.4061, +0.3816, -0.0483, +0.6989, +0.2631,
+0.0906, -0.3159, +0.4436, -0.4232, -0.1941, +0.0117, -0.5852,
-0.5719, +0.0158, +0.0229, -0.4120, -0.5200, -0.6199, +0.3683,
-0.0060, -0.2108, -1.0807, -0.6747, -0.1258, -0.5363, -0.1420,
-0.3519, -0.3312, +0.2380, -0.3571, -0.4272, -0.6733, -0.4430,
-0.0839, -0.2624, -0.9297, +0.4757, -1.1690, +0.5474, +0.3513,
+0.5209, +0.1999, -0.0798, -0.2623, +0.2463, -0.1697, -0.0553,
-0.8494, -0.0646, -0.7830, -0.1119, -0.6601, -0.0337, +0.0791,
-0.7605, -0.1590, -0.2137, +0.5531, -0.1455, -0.6184, +0.0261,
-0.2470, -0.1335, -0.0515, -1.1777, +0.5676, -0.3388, -0.9833,
-0.6877, -0.3134, +0.1848, +0.1014, +0.6343, -0.4910, +0.1301,
+0.1852, +0.5893, -0.1147, -0.1877, -0.3991, -2.2994, +0.3953,
+0.9315, -1.2463, -0.0389, -1.9675, +0.2797, -1.1813, -0.0691,
+0.0999, +0.2255
],
[
+0.0639, +0.2611, +0.0838, +0.0412, -0.7483, +0.2393, +0.0790,
-0.0965, -0.1130, +0.1024, -0.2223, -0.4675, +0.1104, -0.3107,
-0.0431, +0.4740, +0.2066, -0.2226, +0.1068, -0.1576, -0.2934,
+0.4886, -0.2895, +0.2457, -0.2344, -0.4610, -0.0189, -0.0641,
-0.6700, -0.3323, -0.3526, -0.0608, -0.2804, -0.2360, +0.2766,
-0.1703, -1.0815, -0.3406, -0.2445, -1.2578, -0.9418, +0.1628,
-0.2049, -2.1585, +0.3158, -0.4128, -0.0737, -0.3591, -0.1621,
+0.1021, -0.0658, -0.0335, -0.1421, +0.1696, -0.0457, +0.0978,
-0.1947, -0.5236, -0.2415, +0.5390, +0.0559, -0.4258, +0.0044,
+0.4105, -1.5538, -0.1270, -0.2763, +0.4124, +0.2169, -0.6059,
+0.1375, -0.5144, -0.4451, -0.5532, -0.5400, -0.6416, -0.6827,
-1.2415, -0.3844, +0.2479, -0.2285, +0.0698, -0.4980, -0.4911,
-0.2677, -0.4542, -0.4367, +0.1047, +0.4759, -0.0804, -0.3458,
+0.1239, -0.9496, +0.1120, -0.3820, -0.6315, -0.9806, +0.1917,
+0.0162, +0.2255, -0.3382, -0.2956, -0.3159, +0.1116, -0.3715,
+0.2648, -1.3455, -0.1177, -0.3607, +0.2901, -0.0607, +0.2687,
+0.2823, -0.0854, +0.1242, -0.5903, +0.3702, -0.2985, -0.8958,
-0.0392, +0.3378, -2.1146, +0.1999, -0.7328, -0.0326, +0.0213,
-0.4903, -0.6084
],
[
+0.4005, -0.3539, +0.0011, -1.1321, -0.0929, -0.1841, -0.0538,
-0.2418, -0.1983, +0.5027, -0.2828, -0.3017, +0.2616, -0.0047,
-0.4966, +0.5684, -0.3078, -0.0194, -0.0970, -0.5903, +0.0153,
-0.0594, +0.3514, +0.0435, -0.0569, -0.0351, -0.3814, -0.4379,
-0.0480, +0.2862, -0.3512, -0.3203, +0.1903, -0.0512, +0.2500,
-0.0621, -0.1596, +0.0077, -0.0288, +0.4258, +0.1465, +0.0066,
+0.3781, +0.2365, -0.2162, +0.1226, -0.2619, +0.3739, +0.4088,
+0.5015, +0.2144, -0.1472, -0.1681, +0.7557, +0.4556, -0.8190,
-0.7596, +0.0147, -0.9587, +0.3054, +0.3807, -0.2091, -0.0612,
-0.0192, -0.3241, -0.3563, +0.1805, -0.1966, +0.2087, -0.0979,
-0.2870, +0.0936, +0.4373, +0.2061, -0.8870, -0.3513, -0.3158,
-0.4968, -0.0804, +0.1094, -0.0568, -0.2142, +0.2822, +0.4175,
+0.1747, -0.4602, +0.0505, -0.0977, +0.0062, -0.5924, -1.1038,
+0.0881, -0.2514, +0.3786, -0.6522, -0.0987, +0.3761, -0.4479,
-0.2430, +0.3430, +0.0108, -0.2528, -0.2237, -0.3578, -0.4940,
-0.5558, +0.1648, -0.0010, +0.5309, +0.4639, -0.1798, +0.7006,
-0.0603, -1.2906, -0.4044, +0.2199, +0.1772, +0.3966, -0.0497,
+0.3213, +0.4042, +0.1926, +0.0290, -0.2981, -0.3246, -0.4370,
-1.1463, -0.4444
],
[
+0.3363, +0.0507, -0.2536, +0.0100, -0.0414, -0.0509, +0.0414,
-0.0734, -0.4132, -0.6428, +0.0566, -0.0504, +0.5851, +0.0472,
-0.2488, -0.9736, +0.4157, -0.4703, -0.4616, +0.1847, -0.2878,
-0.2250, -0.3208, +0.1068, -0.1276, -0.0589, -0.1503, -0.0336,
+0.1374, -0.2608, -0.3196, -0.3170, +0.3374, -0.1441, -0.2669,
-0.5909, +0.8994, +0.3808, -0.0591, +0.4503, -0.0050, +0.2127,
-0.0422, -0.5771, -0.0980, +0.0769, -0.3279, +0.1233, -1.0878,
+0.2904, +0.0990, -0.1093, +0.1928, -0.4382, +0.1245, -0.3089,
+0.1358, -0.0282, +0.2054, +0.2020, -0.0041, +0.5111, -0.1810,
-0.0360, -0.0613, +0.1710, -0.3795, +0.1224, +0.2107, +0.2086,
-0.4034, +0.3966, +0.2989, -0.2028, +0.0624, -0.4257, -0.0667,
-0.1747, +0.2471, -0.1497, +0.1393, -0.1737, +0.3902, -0.2525,
+0.2768, +0.3510, +0.7304, +0.0294, -1.1805, -0.3225, -0.6172,
-0.7072, -0.9334, -0.0625, -0.2341, -0.1015, -0.0004, +0.5904,
-0.2985, -0.4275, +0.2492, -0.3768, +0.4914, +0.5226, -0.3087,
-0.2044, +0.6708, -0.3427, -0.3538, +0.1439, -0.1679, -0.1714,
-0.0742, +0.2308, +0.2786, +0.3741, +0.1912, -0.2934, -0.4440,
-0.3034, -0.2523, +0.4034, +0.1184, +0.0691, +0.2607, -0.2009,
+0.2144, +0.0359
],
[
-1.0791, -0.6872, +0.6891, -0.8215, -0.0272, +0.1619, +0.1327,
-0.5389, -0.5439, +0.1500, +0.4166, -0.0599, +0.1693, +0.6353,
-0.5305, +0.3496, -0.1893, +0.1512, +0.2821, +0.5251, -0.2433,
+0.6280, -0.3474, -0.0719, -0.6998, -0.2908, -0.0741, -0.4300,
+0.0312, +0.0731, +0.1560, +0.2255, -0.0360, +0.3246, -0.5899,
+0.2137, -0.5356, -0.1725, +0.0803, +0.6945, -0.0740, +0.2914,
+0.1562, -0.1953, -0.0664, -0.1649, -0.1471, +0.2800, -0.7210,
-0.0192, -0.0773, +0.1419, -0.5461, -1.0192, +0.4102, -0.4465,
-0.5506, -0.0394, +0.2234, -0.1967, +0.3527, -0.3041, +0.4800,
+0.1482, -0.3358, -0.8865, +0.4729, +0.3006, -1.5214, +0.3571,
-0.1637, -0.3210, -0.3610, -0.0140, +0.1028, +0.2118, -0.8997,
+0.3469, +0.6706, +0.8076, -0.3839, +0.0217, +0.4903, +0.0804,
-0.4094, -0.4560, -0.3604, -0.6021, -0.5543, -0.2088, -1.1652,
-0.5443, -0.2383, +0.1295, -0.0840, -1.3197, -0.1383, -0.1171,
-1.2788, +0.6796, +0.2560, +0.3741, +0.4607, +0.2502, -0.9861,
+0.1005, -0.0201, -0.1667, -0.7721, +0.1416, -0.1328, -0.3513,
-0.5112, +0.5501, -0.5608, -0.0224, -0.6817, +0.3626, -0.7071,
-0.0967, -1.1760, -0.3415, -0.1030, -0.2168, -0.3978, +0.3884,
+0.0936, +0.4108
],
[
-0.2277, -0.0980, -0.0279, -0.0435, -0.6397, +0.4329, +0.0239,
+0.2381, +0.2800, -0.6969, +0.2404, +0.1888, -0.5496, -0.6255,
-0.1656, +0.0180, -1.1028, +0.2257, -0.2512, +0.2772, +0.2770,
-0.6592, +0.1774, +0.6097, -0.1839, -0.1900, -0.0550, +0.5695,
-0.1559, -0.4451, -0.2801, -0.0049, +0.2866, +0.3268, -0.5438,
+0.6265, -0.9367, -0.1937, -0.2782, -0.3756, +0.0420, -0.0782,
+0.0943, +0.5940, +0.2726, -0.0824, +0.4456, +0.3934, -0.4720,
-0.1995, +0.4892, -0.1421, +0.0174, -0.5084, -0.1167, -0.3024,
-0.0922, -0.3118, -0.0335, -0.0703, +0.2440, -0.9311, -0.0924,
-0.2168, -0.0706, -0.3731, +0.6529, -1.0958, -0.3929, -1.1930,
-0.4035, +0.4336, +0.1214, +0.5382, +0.7261, +0.1610, +0.2551,
+0.7774, +0.1628, -0.2756, -0.0601, +0.1143, -0.3179, +0.4815,
-0.1348, -0.1413, +0.1628, -0.1243, -0.0586, +0.3029, -0.5346,
+0.2212, -0.2135, +0.2941, +0.1778, +0.3688, -0.2048, -0.1424,
-1.0428, +0.5779, +0.3448, -0.8665, +0.1875, +0.1918, -0.4207,
+0.3115, -0.5152, -0.6870, +0.8070, -1.1151, +0.1282, +0.6629,
+0.4506, -0.1955, -0.0138, -0.2898, +0.0707, +0.3210, -0.8534,
-0.1408, -0.8929, -0.5922, -0.3174, +0.6164, +0.2929, -0.2424,
+0.0949, +0.3436
],
[
+0.3176, -0.3924, +0.0108, +0.0073, +0.0709, -0.7655, +0.5798,
-0.0885, -0.7087, +0.4234, +0.2339, -0.1536, -0.5395, +0.3095,
-1.2875, +0.0223, -0.0518, +0.1281, -0.1305, +0.2955, -0.4969,
+0.3210, +0.6567, -0.0472, +0.4891, +0.5844, +0.0608, +0.0588,
-0.0866, -0.3339, -0.0446, -0.1444, -0.0394, -0.5734, -1.1200,
+0.3550, +0.1063, -0.3921, +0.6259, +0.5534, -0.1268, +0.0167,
-0.2933, -0.0775, +0.3208, +0.3137, +0.1161, -0.5944, -0.4033,
-0.9224, -1.6031, -0.2286, +0.1156, -0.5235, +0.0071, -0.0999,
+0.2603, +0.2959, -0.3796, -0.1974, +0.6005, -0.1037, -0.5495,
-0.5030, -0.4202, -0.5696, +0.1623, +0.0398, -0.1173, +0.3304,
-0.1637, -0.2347, +0.6994, -0.5227, -0.9144, -0.2601, -0.4145,
+0.4540, +0.4762, -0.0762, +0.9777, +0.0984, -0.0808, -1.7371,
-0.0866, +0.2376, +0.1593, -0.2857, -0.0105, -0.8542, -0.3489,
+0.5299, +0.2855, +0.1606, -0.2741, -0.3969, +0.0376, -0.1196,
-0.3073, -0.5830, -0.2857, -0.0687, -0.3538, +0.0535, -0.1520,
-0.6600, +0.3730, -0.0237, +0.1413, -0.4994, -0.2943, +0.4360,
+0.0716, +0.0569, +0.0699, -0.0384, +0.1156, -0.1784, -0.2375,
-1.0747, +0.0669, +0.1220, -0.5153, +0.1585, -0.3345, +0.3933,
+0.4297, +0.1745
],
[
+0.0451, +0.1192, +0.1513, -0.4122, -0.4990, -0.3285, +0.0121,
+0.0996, -1.2994, +0.3913, -0.9270, +0.5467, -0.4719, +0.0201,
+0.5704, +0.5264, -0.0165, +0.0239, -0.0100, -1.1232, +0.0243,
+0.9827, -1.8897, -0.0513, -0.4016, -0.4913, -0.5011, -1.0531,
-0.7585, -0.8680, -0.3483, +0.6168, -0.6506, -0.1178, -0.2269,
-0.0077, +0.5145, -0.0643, +0.1179, +0.5522, +0.1897, -0.9912,
-0.3042, -1.2749, -0.0666, -0.8707, +0.1314, +0.0776, -0.3644,
-1.6311, +0.4315, +0.0053, -0.3322, -0.4387, +0.0335, +0.1827,
+0.4689, +0.1040, -1.2393, -0.4263, +0.0712, -1.5940, -0.0110,
+0.5220, -0.2696, +0.0326, +0.2011, -1.3511, +0.0776, +0.1381,
-0.9384, -0.2791, -0.7470, +0.3198, -0.0343, -0.3858, +0.7762,
-0.0530, +0.4048, -0.1381, +0.0476, +0.1951, +0.2237, -1.6044,
+0.2551, -0.4974, -1.8483, -0.7537, -1.6731, -1.0536, -0.1171,
+0.2709, -0.6349, +0.2810, +0.1560, +0.2926, -0.1911, +0.2209,
-0.7197, -0.0883, -0.0261, -0.3919, -0.2963, -0.2439, -0.6637,
-0.2337, -0.1216, -1.7968, -0.1052, -0.1832, -0.8489, -0.6524,
-0.2081, -0.1841, +0.2611, -0.4390, +0.7014, +0.1897, -0.4626,
+0.1440, +0.1534, +0.4789, +0.9003, +0.5775, +0.1325, -0.4038,
-1.6225, +0.5905
],
[
-0.3205, -0.4752, -0.4912, -0.4865, -0.0093, -0.8625, -0.7111,
+0.6141, +0.3583, -1.1199, -0.3957, +0.0922, -1.3333, +0.1549,
-0.0332, -0.1690, +0.0349, +0.0664, +0.1576, -0.9784, -0.0826,
+0.0687, -0.1124, -0.8914, -0.2932, -0.0694, -0.2219, +0.0341,
-0.1199, -0.3127, -0.2212, -0.1968, -0.1055, -0.1935, +0.1470,
-0.1760, -1.2864, -1.3588, -0.5615, +0.0230, +0.3473, -0.0310,
-0.0407, -0.9018, +0.3128, -0.2546, +0.1682, +0.1034, -0.9478,
-0.3190, -0.1191, -0.2102, -0.0530, -0.6558, +0.0063, -0.4495,
-0.0597, -0.1611, -0.5632, -0.0701, +0.3635, -0.2415, -0.1355,
-0.0856, -0.3443, +0.0222, +0.3314, -0.6737, -0.3053, -0.2678,
-0.4764, -0.1435, -0.6503, +0.1988, -0.9723, -0.0095, +0.4556,
+0.3001, -0.2753, +0.0375, -0.2730, -0.1158, -0.0386, -0.9331,
-0.2312, +0.0767, -0.0451, +0.4903, -0.9659, -0.5555, +0.3733,
-0.9485, -0.1633, +0.6562, +0.0532, +0.0032, -1.4373, +0.0468,
+0.1665, -0.8583, -0.1737, -0.8077, +0.0736, -0.2435, -0.0940,
+0.4364, +0.3262, -0.6249, +0.1925, -0.1501, +0.5031, -0.2871,
-0.0328, -1.5283, -1.2084, -0.6116, +0.1620, -0.6313, +0.0055,
-0.9072, +0.2416, -0.0478, -0.3839, -0.3438, +0.5106, -0.6303,
-0.0786, -0.1824
],
[
-1.0061, -0.1415, +0.5614, +0.1471, +0.2804, -0.2520, +0.2453,
+0.0006, -0.7605, +0.6590, -0.3647, +0.1825, +0.1384, -0.2187,
+0.6017, +0.3445, -0.2924, -0.0996, -0.4517, +0.0997, +0.2598,
-0.3217, -0.1636, -0.1402, -0.0688, +0.5085, +0.0841, +0.1102,
+0.2540, +0.0796, +0.2593, +0.1400, -0.7217, +0.2879, -0.2045,
+0.1075, +0.0661, -0.0931, -0.0124, -0.3172, -0.0032, -0.1647,
-0.2619, +0.0710, -0.0976, -0.0399, -0.4316, -0.4230, -0.3267,
-0.5921, +0.3123, +0.4273, -0.4458, -1.3337, -0.1572, -1.1379,
-0.4392, +0.2743, -0.4505, -0.2160, -0.5103, +0.3051, +0.3146,
-0.2750, +0.0965, -0.2486, +0.2757, -0.4422, -1.3708, +0.1642,
+0.4191, -0.2572, +0.1958, -0.0087, +0.3229, -0.5492, +0.0851,
+0.5151, -0.1987, -0.1567, -0.0296, +0.1179, +0.3395, +0.0261,
+0.0881, -1.2141, -0.6468, -1.9418, +0.3738, +0.1240, -0.1291,
+0.2193, +0.4126, +0.6854, +0.2690, -0.4613, +0.1831, +0.3535,
-0.0399, +0.0945, -0.0578, +0.5635, +0.0836, +0.0286, +0.0747,
+0.4220, -0.1242, -0.2479, -0.4533, +0.2856, -0.2348, -0.0885,
+0.4039, -0.6302, -0.2092, +0.1364, -0.7537, -0.1654, -0.0220,
-0.5586, -2.3927, +0.1004, -0.0423, -0.0261, -0.3384, +0.0462,
-0.7971, -0.9127
],
[
+0.0750, +0.2176, -0.4996, +0.1824, -0.0707, +0.1966, +0.0236,
-0.0692, -0.4960, +0.1383, +0.0373, -0.2026, -0.2008, +0.3046,
+0.3803, -0.0825, +0.1184, +0.4917, +0.2306, +0.0326, -0.3638,
-0.0580, -0.6834, +0.0438, -0.1417, -0.3357, -0.2336, -0.6334,
+0.3518, +0.1087, +0.3293, +0.0706, -0.1184, -0.2735, +0.2591,
-0.2327, -0.9828, -0.0540, -0.1303, -0.4909, +0.2748, +0.0293,
+0.0903, +0.1555, +0.5734, +0.0854, -0.7190, +0.3496, +0.0953,
-0.4672, -0.3598, -0.1305, +0.4157, -1.2690, -0.1589, -0.9937,
-0.0994, +0.2282, -0.4250, +0.1519, -0.1842, -0.2414, -0.0604,
-0.1126, -0.0335, +0.0520, +0.0182, -0.2505, +0.2693, -0.0057,
+0.5708, +0.1181, +0.3936, -0.1274, +0.1240, +0.3345, -0.5654,
-0.2478, -0.3554, -0.2807, +0.7255, -0.0021, -0.0572, +0.0135,
-0.0282, -0.2176, -0.0646, -0.8797, +0.0480, -0.5962, -0.3936,
-0.0989, -0.0082, -0.1741, -0.4116, -0.3323, +0.4336, +0.0306,
-0.3786, -0.3817, +0.2412, +0.3054, +0.3641, -0.2448, +0.1098,
+0.0840, -0.2574, +0.0844, +0.1103, -0.7323, +0.1966, -0.3377,
-0.0849, -0.0206, -0.4983, +0.3827, +0.0164, -0.3648, -0.0900,
-0.7235, -0.2408, -0.2687, +0.1033, +0.3813, -0.2013, +0.0816,
+0.0228, +0.2278
],
[
+0.4075, -0.4337, -0.1178, -0.2645, -0.3989, +0.3753, -0.4020,
+0.5258, -0.2696, +0.1506, +0.3436, +0.4036, +0.2138, +0.3263,
-0.5074, +0.0975, -0.6906, +0.2567, -0.3453, +0.1801, -0.2124,
-0.1523, +0.6583, +0.4003, +0.3154, +0.4761, +0.3418, +0.2095,
+0.5004, -0.1168, +0.5128, +0.2726, +0.4005, +0.5720, +0.4916,
-0.1016, +0.4738, +0.1282, +0.0060, +0.8116, +0.1077, -0.4063,
-0.2667, +0.0857, +0.1045, -0.5171, -0.5304, -0.2763, +0.2491,
-0.6936, +0.1619, +0.3061, -0.1143, -0.1346, -0.1540, +0.3273,
-0.1911, +0.3598, -0.2261, -0.0589, -0.0330, +0.7586, +0.0538,
-0.1769, -0.4478, +0.7072, +0.1733, +0.2620, +0.7263, +0.0611,
-0.5258, -0.2466, +0.2799, +0.2616, -0.0277, -0.1707, -0.3391,
-0.3818, +0.3912, +0.1183, +0.2835, -0.2168, +0.4314, +0.1513,
-0.5520, -1.1234, +0.6860, +0.5702, -0.1339, +0.0741, -0.5413,
-0.0858, -0.2133, +0.2737, -0.0825, -0.1390, -0.0584, -0.1054,
+0.0083, +0.3280, -0.2467, -0.3385, -0.3302, +0.1709, -0.0679,
-0.1004, -0.6046, -0.0236, +0.4449, +0.0112, -0.1172, +0.0051,
+0.1691, +0.3826, +0.2565, -0.0092, +0.2152, +0.7632, +0.3072,
-0.5973, -0.1711, +0.2336, +0.3417, -0.1362, -0.2016, +0.3892,
-0.2284, -0.7264
],
[
-0.5619, -0.6588, +0.2222, -0.7032, +0.3192, -0.7468, +0.0840,
-0.0085, -1.0928, +0.7917, -0.8584, -0.1583, +0.3086, +0.4438,
-0.6609, -0.4945, +0.0735, +0.2988, +0.0502, +0.1138, -0.4697,
-0.9869, -0.8634, -0.0310, +0.5568, -0.6586, +0.2718, -0.4758,
-0.2878, -0.2967, -0.3541, -0.2828, +0.1682, -0.3047, -0.6659,
-0.3369, -0.8593, -0.3854, +0.3200, +0.2052, +0.5943, +0.5155,
+0.1806, -0.3103, +0.1668, +0.4870, +0.1900, -0.2799, +0.1105,
+0.1297, -0.1050, -0.1574, -0.6901, +0.2987, -0.2892, -0.1965,
-0.2233, -0.1386, -0.3617, -0.2200, +0.2744, -0.1720, -0.0025,
-0.3192, +0.6888, -0.9526, +0.3419, +0.4704, -0.5047, +0.3867,
-0.2252, +0.4470, +0.1633, +0.3513, +0.3425, +0.0219, -0.4380,
-0.4013, -0.0750, -1.2795, +0.2330, +0.1879, +0.1886, -0.4729,
+0.0993, -0.3209, +0.1687, +0.0220, +0.3353, +0.4888, +0.7272,
-0.4622, +0.4032, +0.0962, +0.2044, +0.2351, -0.2522, +0.6231,
-0.1771, +0.2073, +0.1115, -0.0105, -0.4885, -0.1947, +0.0805,
-0.7077, +0.6541, -0.2168, -0.3990, +0.4405, -0.5269, +0.4340,
-0.0695, -0.6759, -0.2265, +0.2120, -0.9304, -0.1006, -0.4760,
-0.8165, -0.8547, -0.4625, +0.4114, +0.5815, +0.1176, -0.1204,
-0.0489, +0.0484
],
[
+0.3307, +0.4011, -0.1160, -0.2264, +0.1470, -0.2919, -0.7451,
-0.1501, -0.2051, -0.1858, -0.2368, -0.0166, +0.2410, -0.0673,
+0.2497, +0.3609, +0.0376, +0.3896, +0.5163, -0.2009, -0.0213,
-0.0858, -0.3649, -0.5694, +0.2950, -0.3715, -0.4091, +0.1047,
-0.2480, +0.2524, +0.0759, +0.3455, +0.1984, -0.2355, -0.0878,
-0.1322, -1.3433, -1.0194, -0.5226, -0.3194, -0.4891, +0.2655,
-0.2550, -0.1050, +0.1669, +0.0303, -0.0818, +0.0851, +0.0980,
+0.0502, -1.3712, -0.3609, +0.1263, -0.0160, -0.7327, +0.1758,
-1.4315, -0.3756, -0.4343, -0.7862, -0.1735, +0.6499, +0.3025,
-0.3268, +0.0232, -0.3298, +0.5586, -0.1262, -0.2705, -0.2214,
-0.4371, +0.0723, +0.8103, -0.3228, +0.0355, +0.3893, -1.1094,
-0.1664, +0.4552, -0.2732, +0.4157, +0.2128, -0.1776, -0.1176,
-0.3515, -0.0519, -1.4214, +0.3016, +0.0068, -0.2785, -0.0741,
+0.0472, -0.2149, -0.5563, -0.5599, +0.3389, -0.1509, +0.2100,
-0.6473, -0.0704, +0.0156, -1.0333, +0.1829, +0.1841, -0.0582,
+0.1230, -0.8259, +0.0747, -0.3466, -0.9991, +0.4214, -0.8522,
+0.6740, +0.0441, -0.3886, -0.0458, +0.0325, +0.3056, -0.3655,
+0.2377, -0.0662, -0.3979, +0.5509, -0.4970, +0.2473, +0.4498,
-0.0303, -0.1357
],
[
-0.1411, +0.3419, -0.5398, -0.5728, -0.2083, -0.7746, -1.6018,
+0.0830, -0.1589, +0.1139, -0.9158, -0.1886, -0.3481, -0.2742,
-0.6108, -0.4646, -0.1986, -0.2215, -0.0440, -0.2837, -0.6019,
+0.8197, -0.2571, -0.3325, +0.0778, +0.1455, -0.2286, -0.3149,
-0.1893, +0.1037, -0.0041, +0.1791, -0.0783, -0.9331, +0.3056,
+0.2097, +0.2735, -0.6685, +0.1896, -0.3313, +0.0086, +0.0904,
-0.3519, -0.1869, -0.2648, +0.4288, +0.5785, -0.1345, -0.4295,
+0.3263, -1.2706, -0.1486, -0.2965, +0.5303, +0.5528, +0.1091,
+0.5738, -0.3166, -0.4828, -1.0198, +0.1566, +0.0928, +0.3758,
-0.0311, -0.3368, +0.0728, -0.5817, -0.5760, +0.2071, +0.0976,
-0.1485, -0.5637, +0.0350, +0.0808, -1.4359, -1.0612, +0.8414,
-0.0146, -0.2197, +0.3973, +0.1252, -0.6162, -0.2415, +0.8750,
-0.0968, -0.1306, -0.9543, -0.1862, -1.5782, +0.3982, +0.4383,
-0.6786, -0.6033, +0.5087, -0.7869, +0.4946, +0.0823, +0.1406,
+0.9046, -0.3147, +0.3026, -0.5016, -0.0749, +0.0197, -0.0467,
+0.0819, -0.8144, -0.1670, -0.3128, +0.7011, -0.0116, +0.2329,
-0.0852, -0.9855, -0.2289, -0.4368, +0.1742, -0.4543, -0.1306,
-1.3917, -0.0987, -0.4074, -1.3360, +0.4082, +0.0751, +0.2841,
-0.3836, -0.0619
],
[
+0.5599, -0.1414, -0.4146, +0.3829, +0.1853, +0.4371, +0.1098,
-0.3408, -0.3087, +0.5215, +0.3125, -0.0436, -0.3835, +0.1165,
-0.0211, +0.7342, +0.3444, -0.2147, +0.0706, +0.1791, -0.2517,
+0.2504, +0.2694, +0.6347, +0.0294, +0.0789, -0.0674, -0.3697,
+0.4269, +0.4728, +0.2009, +0.4281, +0.3512, +0.1221, -0.6764,
+0.4082, -0.0088, +0.5336, +0.0043, -0.4289, +0.5984, -0.0283,
-0.3683, -0.0025, -0.2357, +0.3666, -0.6498, -0.0941, +0.5055,
-0.2962, -0.2996, +0.6271, +0.2199, -0.2913, -0.2883, +0.1686,
+0.0509, +0.3784, -0.2252, -0.2628, +0.1992, -0.2538, -0.7235,
-0.2512, -0.6292, +0.3317, -0.2750, -0.1492, -0.1254, -0.5303,
+0.4742, +0.2374, -0.0799, +0.3206, -0.1635, +0.2186, -1.5885,
-0.2624, -0.2078, +0.0618, +0.1017, -0.4087, +0.4021, -0.5257,
+0.1620, +0.0994, +0.1038, +0.3908, +0.7491, -0.2611, +0.4643,
-0.3850, -0.6188, +0.2157, +0.0873, -0.0726, -0.1172, +0.3000,
+0.1168, -0.7371, -0.0892, +0.5591, -0.1833, -0.1558, -0.1696,
-0.3287, +0.1759, +0.1962, +0.1898, -0.8243, -0.0589, -0.2871,
-0.1862, +0.3834, -0.4924, -0.9987, +0.4994, +0.1364, +0.2384,
-0.2542, -0.2244, +0.2779, +0.1243, -0.3323, -0.1065, +0.9858,
-0.1509, -0.3793
],
[
+0.1211, -0.3892, -0.2579, -0.0539, +0.3488, -0.4385, -0.0953,
-0.1475, -0.0085, -0.0146, -0.0385, -0.0863, +0.3296, +0.1033,
-0.7796, -1.0686, -0.4646, -0.9428, +0.1191, +0.5546, -0.2665,
-0.3790, -0.2799, -0.4635, +0.2681, +0.5384, +0.4331, +0.5839,
-0.1674, +0.1048, +0.2617, +0.2412, +0.2817, +0.3190, +0.0717,
+0.1469, -0.8842, -0.2001, -0.1666, +0.7128, -0.0140, +0.0970,
+0.4524, +0.1373, -0.1271, -1.0031, +0.4634, -0.9270, -0.6111,
+0.2023, +0.1047, -0.1319, -0.8178, +0.2046, -0.3841, -0.0014,
+0.0221, -0.3722, +0.3840, -0.2176, -0.7972, -0.9204, +0.1808,
+0.4128, -0.2148, +0.3167, -0.4500, -0.3328, +0.0039, -0.4873,
-0.3715, +0.1700, -0.8838, -0.2712, -0.0198, +0.2058, +0.0441,
-1.1947, -0.1763, -0.1705, +0.2709, +0.0692, +0.1217, +0.2196,
+0.7444, -0.5537, +0.5183, -1.1049, -0.7976, +0.4688, -1.2243,
-0.1406, -0.3925, +0.0298, +0.0430, +0.1307, +0.2235, -0.8780,
-0.4595, +0.6274, -0.8746, +0.1502, +0.1100, -0.3349, -0.3247,
+0.5027, -0.5859, +0.0378, +0.2973, -0.3859, -0.3021, +0.4116,
-0.0703, +0.8823, +0.0924, +0.0365, -0.5382, +0.1939, -0.6474,
-0.2411, +0.2754, -0.9982, +0.0524, -0.4023, +0.1290, +0.2443,
-0.4641, +0.1065
],
[
-0.8622, +0.1746, +0.0478, +0.2293, -0.5433, +0.7395, +0.0786,
-0.2146, +0.2212, +0.4695, +0.9859, -0.0263, +0.3056, -0.0288,
+0.6469, -0.7688, +0.1167, -0.1867, -0.7654, +0.0038, +0.5478,
-1.0341, +0.0961, -0.2939, +0.1562, -0.1333, -0.7247, +0.2685,
+0.5300, +0.1041, +0.2512, +0.2629, -0.1732, +0.5918, -0.3586,
-0.1235, +0.8831, +0.0805, +0.4253, -0.0225, +0.4928, -0.1960,
+0.0558, +0.7018, -0.5662, -0.3790, +0.2023, -0.1019, +0.0291,
-1.4159, +0.6631, -0.4778, -0.1910, +0.1474, -0.1357, +0.8589,
+0.6851, -0.2610, -0.7132, -0.2426, -0.3355, -0.6891, +0.4665,
+0.1988, -0.0993, -0.2685, +0.1827, -0.4094, +1.2800, +0.3095,
+0.0993, -0.0654, -0.2385, -0.1326, -0.8041, +0.1193, -0.7565,
-0.0409, -0.1184, -1.1444, +0.1374, +0.4981, +0.0891, -1.0196,
+0.2107, +0.5862, +0.4555, +0.2593, -0.4296, +0.1686, -0.0552,
-0.6144, -0.1157, -0.7372, -0.1982, -0.2709, -0.6923, -0.9942,
+0.1412, +0.2982, -0.0820, -0.4159, -0.8002, -1.0354, -1.1311,
-0.7153, +0.6903, -0.1778, +0.2467, +0.0600, -0.7793, -0.1700,
-0.5409, -0.0309, -0.5901, +0.1537, -0.5494, -0.1176, -1.7891,
-1.0237, -0.6773, -0.7804, +0.4489, -0.3772, -0.3757, -0.1479,
-0.4148, +0.0759
],
[
-0.2130, -1.1645, +0.3232, +0.3900, -0.5796, -0.4883, -0.0754,
-0.5800, -0.3135, -0.7695, +0.0458, -0.3120, -0.3763, +0.1087,
+0.6053, +0.7496, +0.2697, -0.4492, -0.4960, +0.0117, +0.0613,
+0.0936, -0.0581, +0.2946, +0.0277, +0.1042, -0.1424, -0.2893,
+0.6856, -0.0385, -0.0054, +0.2713, +0.3466, -1.0460, -0.6893,
-0.2328, +0.2178, -1.0084, -0.4262, -1.5063, -0.3900, -0.3777,
-0.4588, -0.7957, -0.1770, -0.2022, -0.3452, -0.0698, +1.0757,
+0.4649, +0.1002, +0.0709, -0.5472, -0.3277, -0.5591, +0.0326,
-0.6309, -0.7932, +0.3326, -1.0855, -1.2924, -0.9289, -0.0786,
-0.4831, -0.0025, -0.4642, -0.2356, -0.1564, -0.0782, -0.6758,
-0.5957, -0.2548, -0.5085, +0.4147, -0.4589, -0.7869, -0.2252,
-0.5512, -0.5648, +0.2208, +0.1798, +0.8284, -1.0848, +0.4015,
-0.1890, +0.2518, -2.7915, +0.2166, +0.0735, -0.2205, -0.1941,
-0.9020, -1.9251, -0.0926, +0.2323, -0.7674, -1.3656, -0.3813,
-0.0141, -0.0956, -0.5309, +0.0864, +0.3387, -0.0847, -1.7727,
+0.3071, -1.7860, -0.5097, -0.1783, -0.6700, -0.0269, -1.2584,
+0.0446, +0.0107, -1.2882, -0.7400, +0.0471, -1.3375, -0.3332,
+0.4684, -0.1211, -1.3073, +0.1311, -0.8196, +0.1702, -0.0472,
+0.1488, -0.3162
],
[
-0.4822, +0.1781, -0.5486, -0.3511, -0.6247, +0.0431, -0.6801,
-0.1866, +0.2514, -0.3799, +0.0273, +0.2231, +0.2796, -0.2793,
+0.1092, +0.0941, -0.2017, +0.3812, -0.0306, -0.6378, -0.0158,
+0.1661, +0.2605, +0.1764, -0.8942, -0.2745, +0.3173, +0.5745,
-1.2489, -0.1893, -0.6678, -0.7470, +0.1252, +0.1814, +0.2449,
-0.0477, -0.4136, -0.0115, -0.1523, +0.4298, -0.2459, +0.0294,
-0.4592, +0.2257, -0.1062, -0.3744, -0.0735, +0.6844, -0.2403,
+0.1384, -0.1363, -0.1945, -0.2138, -0.1837, +0.0862, +0.3144,
+0.1178, -0.2623, +0.1173, -0.4966, +0.0534, -0.0307, +0.4077,
-0.0290, -0.4554, -0.2460, -0.5608, +0.1067, +0.6111, -0.2582,
-0.2110, +0.0142, +0.0669, -0.3146, +0.0614, +0.2457, +0.5555,
-0.4073, +0.1069, +0.1074, -0.5879, -0.6752, +0.4640, -0.6362,
-0.8860, -0.2178, -0.1162, +0.0746, -0.1270, -0.2287, -0.0852,
-0.0441, -0.2772, -0.3746, -0.7997, -0.5596, -0.3286, -0.1993,
-1.1352, +0.3078, +0.2114, +0.4098, -0.2233, -0.5876, -0.5737,
-0.1624, -0.1678, -0.8670, -0.8843, +0.0697, +0.4886, -0.2260,
-0.0436, -0.6764, -0.2266, +0.2289, -0.2964, +0.1925, -0.2377,
-0.1030, -0.4171, +0.3580, +0.1054, +0.4303, -0.0084, -0.6212,
-0.2186, -0.1943
],
[
-0.2368, -0.3281, -0.2468, +0.6167, -0.8063, -0.0141, +0.4634,
+0.7047, +0.2760, -1.6134, -0.2726, -0.0294, +0.0873, -0.9213,
+0.0207, +0.5808, +0.4838, -0.4079, +0.1755, +0.2668, -0.4591,
+0.3907, +0.0266, -0.1942, +0.3317, -0.1848, -0.1486, -0.2302,
-0.2769, +0.6142, +0.0375, -0.1520, -0.5497, +0.2973, +0.3097,
-1.1093, +0.2989, -0.1326, -0.4286, -1.2209, -0.4974, -0.1208,
-0.9010, -0.6289, +0.3409, -0.1231, +0.8650, -0.2671, +0.6767,
+0.7254, +0.1158, -0.3425, +0.3165, -0.6184, +0.1867, -0.0042,
-0.4986, -0.4531, +0.4788, +0.3050, +0.0496, +0.4402, +0.3147,
-1.1178, +0.4522, -0.1048, -0.1525, +0.2907, +0.4481, -1.0584,
-0.0087, +0.3321, -0.4124, +0.3314, -0.2549, -0.5649, -0.0939,
+0.3508, -0.4372, +0.2062, -1.0781, +0.1715, -0.1390, -0.4017,
-2.0066, -1.1449, -0.1206, -0.3169, -0.9551, +0.0547, +0.3231,
+0.2871, -0.0780, +0.4857, +0.4933, +0.2082, -0.2030, +0.6173,
-0.5404, -0.0371, -0.4683, -0.2967, -0.0608, -0.3361, -0.5735,
+0.2297, -0.2957, -0.4493, +0.2015, +0.8652, -0.4828, -0.7838,
+0.1694, -0.3324, -0.2470, +0.2109, +0.4299, -0.3284, +0.4948,
+0.1671, -0.6183, -0.1987, +0.4212, +0.5936, +0.2322, -0.4222,
+0.1624, +0.8517
],
[
+0.1104, -0.4921, +0.0474, -0.7762, -1.1535, -0.2700, +0.3253,
-0.8367, +0.2960, +0.1246, -0.0578, +0.0235, -0.6222, -0.0553,
-0.8389, +0.2631, -0.6444, +0.0190, +0.0188, +0.1844, +0.3793,
+0.3827, +0.4708, +0.3743, -0.2069, +0.1266, -0.4388, +0.0426,
-0.3448, +0.2629, -0.0213, +0.6795, -0.0563, -0.3251, +0.3396,
+0.1987, +0.0749, +0.3789, -0.9001, -0.4680, -0.1956, +0.0772,
-0.7066, +0.2571, -0.6362, -0.0667, +0.0589, -1.0207, +0.2653,
+0.2805, -0.4010, -0.1658, -0.1780, +0.0457, -0.8765, +0.1537,
-0.7626, +0.2182, +0.0918, -0.0017, -0.2147, -0.6848, -0.1827,
-0.2736, +0.4772, +0.1851, +0.0148, -0.1144, +0.2520, -0.5352,
-0.0693, +0.6540, -0.0091, -0.2929, -0.6751, +0.3532, -0.2106,
+0.1118, +0.1222, -0.9654, -0.0066, +0.1840, +0.0129, -0.3222,
+0.3205, -0.0201, -0.3000, +0.1861, -0.3392, +0.0374, +0.1404,
-0.1935, -0.0747, -0.1133, -0.3540, -0.1466, -0.4392, +0.6757,
+0.2981, +0.2639, -0.1507, -0.1990, +0.1983, +0.5770, +0.4329,
+0.3249, +0.2107, -0.1458, +0.2654, -0.1229, +0.5379, -0.0903,
+0.1080, -0.2282, +0.1653, -0.2343, +0.4844, -0.8038, -0.6899,
+0.0414, -0.4948, -0.0860, -0.1432, -0.5109, +0.0398, -0.3908,
+0.2015, +0.4452
],
[
+0.4464, -0.1016, -0.2108, +0.6285, +0.7605, -0.0959, +0.6057,
+0.0765, -0.3599, -0.4701, -0.5980, +0.0008, +0.4155, +0.0650,
+0.1021, +0.2823, +0.3559, -0.2473, +0.3277, -0.1048, +0.1826,
-0.4503, +0.6390, +0.2877, -0.2585, +0.0565, -0.7396, -0.4097,
-1.1459, +0.5500, +0.4349, -0.3829, +0.5239, -0.3815, -0.8615,
-0.0339, +0.2943, -0.3213, -0.2750, -0.1061, -0.1573, -0.3898,
-0.0822, -0.2937, -0.1468, +0.1442, +0.1370, -0.8253, -0.5933,
-0.3806, +0.6324, -0.0170, +0.0879, -0.0069, +0.1753, -0.2524,
+0.2062, -0.4372, -0.0059, +0.1578, +0.2900, -0.3776, +0.2091,
+0.1612, +0.3661, +0.2404, -0.1718, -0.4438, -0.4260, -0.7568,
+0.4362, +0.0811, -0.7770, +0.2216, +0.0564, +0.4907, +0.3224,
+0.0886, -0.1049, -0.5407, -0.1025, -0.1982, -0.2967, -0.7103,
+0.2048, +0.0722, +0.5329, +0.0530, +0.2883, -0.1321, +0.3782,
-0.0988, +0.2376, -0.7593, +0.2411, -0.2622, -0.5329, +0.5309,
-0.2957, -0.9082, +0.2224, -0.4173, +0.0038, +0.1562, -0.5012,
+0.5892, -0.2013, -0.5368, +0.0320, +0.3389, -0.8605, -0.8244,
-0.4792, -0.0523, -0.0195, +0.0430, +0.1482, +0.0215, +0.4290,
-0.2574, -0.2047, -0.0628, -1.7579, -0.0751, +0.7376, -0.5738,
-0.1425, +0.4466
],
[
-0.2047, -0.2955, +0.4383, -0.2099, -0.3229, +0.0324, +0.1413,
+0.4679, +0.0618, -0.1480, -0.0718, +0.1261, -0.4324, -0.1585,
-0.5738, -0.8086, -1.2257, +0.1531, -1.1906, +0.0960, +0.0034,
-1.0042, -0.4898, -0.0569, -0.2377, -0.6157, -0.3313, -0.6325,
+0.5634, -0.2944, -0.5246, +0.1536, +0.2806, +0.0689, +0.0035,
-0.2231, +0.0330, -0.2128, +0.3271, -1.4344, -0.4247, -0.7055,
-0.3834, +0.0650, +0.3693, -0.2910, -0.1479, +0.2697, -0.3216,
+0.8833, -0.1711, -1.1387, +0.2106, -0.1236, -0.2836, -0.4468,
+0.0387, -0.1254, -1.8480, +0.1198, +0.3021, +0.2219, +0.7346,
-0.2258, +0.8582, -0.4501, +0.0066, -0.0058, +0.2466, +0.3448,
-0.0955, +0.1150, +0.1171, -0.0305, -0.7754, +0.2977, -0.3415,
+0.1446, +1.0973, -1.1663, -0.1010, -0.2815, +0.3744, -0.3839,
-0.2079, -1.8889, -0.5745, +0.0064, +0.1967, +0.0068, +0.0453,
+0.4830, +0.1987, +0.6962, -0.3831, +0.4901, -0.4959, +0.4123,
-0.4468, -0.0611, -0.1656, -0.4883, +0.6344, -0.1394, -0.0989,
-1.1268, +0.5392, -0.9870, -0.4727, +0.0362, -0.5664, -0.8200,
-0.1482, -0.4791, -0.2926, -0.5099, +0.6774, -0.1072, -0.0655,
+0.2952, -0.8458, -0.2151, -0.5641, +0.0376, +0.2182, +0.1099,
+0.2379, +0.1908
],
[
-0.1225, -0.1321, +0.7308, -1.1087, -0.5171, +0.0304, -0.8022,
-0.2931, +0.1665, -0.1050, -0.6631, -0.5912, +0.1576, +0.3605,
-0.1145, +0.5829, +0.3607, +0.3474, -0.2511, -0.7079, -0.2942,
+0.3808, +0.5294, -0.5013, +0.0238, +0.4688, -0.3788, +0.0730,
+0.0283, +0.6128, -0.3222, -0.4536, -0.9039, -0.6244, -0.5269,
-0.8876, +0.1430, +0.4378, -0.1609, -0.8150, -0.4497, +0.2789,
-0.2679, +0.7530, +0.3137, -0.8434, +0.4280, +0.1761, -0.8674,
-0.5673, +0.2236, -0.3403, -0.7646, -0.6555, +0.9779, +0.0927,
-0.4200, +0.0056, -0.8580, +0.1268, -0.1355, -0.4540, -0.2248,
+0.9954, +0.0536, +0.0595, +0.3436, -1.0771, +0.3269, +0.6748,
-0.2971, +0.3235, -0.1241, -0.0185, +0.2466, +0.3684, -0.9468,
-0.0591, -0.7956, -0.2100, -0.1989, +0.3475, -0.2810, -0.3435,
-0.3471, +0.0183, -0.8959, -0.6866, -0.4349, -0.1154, -0.3443,
-0.0657, +0.4922, +0.2964, +0.2669, +0.1446, -0.0485, -0.6028,
-0.9521, -0.0438, +0.4984, +0.3148, -0.7061, -0.0147, -0.4091,
-0.4603, -0.0370, -0.2918, -0.1455, -0.8047, +0.4455, +0.9899,
-0.1563, +0.1182, +0.2497, -0.9639, +0.4513, -0.1153, -0.2384,
-0.1501, +0.2620, -0.9036, -0.2327, -1.3314, +0.4013, -0.4306,
+0.2883, -1.2667
],
[
+0.2644, -0.1494, -0.4453, +0.1137, +0.0802, +0.1931, -0.1430,
-0.0241, +0.5387, +0.1517, +0.0854, -0.0698, -0.1889, +0.4570,
-0.1975, +0.3453, -0.2398, -0.4224, +0.1325, +0.2208, -0.0402,
+0.1095, -0.5577, +0.2867, -0.1122, +0.1830, +0.0217, -0.0430,
-0.0696, -0.1120, +0.0500, +0.0370, -0.4425, +0.1814, -0.1573,
+0.0998, -0.1730, -0.3537, -0.8194, +0.1510, -0.2713, -0.2499,
+0.0226, +0.1604, +0.3219, -0.2855, +0.0610, +0.3411, -0.0813,
+0.0378, -0.6988, -0.0412, +0.0657, -0.0841, +0.4532, +0.0441,
+0.1345, +0.1558, +0.2114, +0.3063, -0.1986, -0.0880, -0.0281,
+0.5160, +0.2705, -0.0413, +0.0352, +0.0696, -0.0719, -0.2414,
-0.3859, +0.4521, -0.1886, +0.2422, -0.1094, +0.2950, +0.0909,
-0.4692, -0.0058, +0.0013, +0.4307, +0.4305, +0.0478, +0.3528,
+0.5099, +0.2312, +0.3044, -1.1194, +0.0669, -0.3181, -0.2290,
+0.4461, +0.1198, -0.0438, +0.2953, +0.0438, -0.0518, -0.5190,
+0.2083, +0.4686, -0.1845, -0.2299, -0.3921, +0.4322, +0.2297,
+0.2758, -0.3691, +0.4775, +0.3972, -0.9253, +0.4109, -1.1842,
+0.3989, -0.4237, -0.6019, -0.2239, +0.0047, -0.0416, -0.6338,
-0.0119, +0.3457, +0.1080, +0.4905, +0.1786, -0.0777, +0.1234,
-0.2929, +0.3286
],
[
-0.1358, -0.5763, -0.9117, -0.5821, -0.1674, +0.4194, +0.3464,
-1.3516, +0.5387, -0.0086, -0.4016, +0.4349, +0.1220, -0.6551,
-0.6775, -0.4614, -0.5133, +0.3374, +0.2617, +0.0825, +0.1601,
+0.6206, +0.1189, +0.2517, -0.0462, +0.0175, -0.0761, +0.2331,
+0.3815, +0.2835, -0.2826, +0.5036, +0.0863, +0.1331, -0.2766,
-0.3673, +0.3852, -0.0354, +0.1149, +0.8184, -0.8265, -0.0190,
-0.4810, +0.0216, +0.0128, -0.0960, +0.2823, -0.8916, +0.4520,
-0.6486, -2.2243, -0.3405, +0.3947, +0.2003, +0.4490, +0.0508,
-0.3408, +0.2818, +0.0271, +0.2548, -0.8521, -0.2771, -0.3072,
+0.3251, -0.2674, -0.1178, +0.2249, -1.5805, -1.8331, -1.0409,
-0.1002, +0.1934, -0.0360, -0.2364, -0.0350, +0.0151, +0.2383,
-0.0633, +0.2490, -0.0642, -0.2854, +0.7608, -1.0875, +0.0126,
+0.1063, +0.4831, +0.1556, +0.3813, +0.1668, -0.2929, -0.6254,
+0.5186, -0.0166, -0.4155, -0.6734, +0.1663, -0.3398, -0.1092,
+0.1818, +0.2933, +0.1602, +0.5191, -0.2649, +0.2901, -0.1126,
+0.0552, -0.4307, -0.4647, -0.0786, -0.9300, +0.1249, +0.6485,
+0.0501, -0.5488, +0.6199, +0.2848, -0.2970, +0.1893, +0.7779,
-0.2323, +0.1798, -0.0726, +0.1871, -0.9195, -0.3217, +0.3426,
-0.3773, +0.2996
],
[
+0.1591, +0.5102, +0.2424, -0.3209, -0.5437, -0.6565, +0.3967,
-0.2887, +0.3699, +0.3665, +0.2700, -0.0879, +0.2999, -0.0883,
+0.2035, +0.2699, -0.1585, -0.4209, +0.3688, -0.1251, +0.0093,
-0.1950, +0.1031, +0.1462, +0.0963, -0.0132, +0.2962, +0.2199,
+0.0907, -0.1868, +0.0138, +0.0799, +0.4006, -0.8396, -1.5625,
+0.0963, -0.8820, -0.8120, -0.2451, +0.0646, +0.1316, -0.0886,
-0.6373, -0.4917, +0.4230, -0.4656, -0.0706, -0.1486, +0.3288,
-0.0991, -0.1230, -0.0042, -0.0949, -0.8593, +0.8234, -0.1525,
-0.6916, -0.2377, +0.6268, -0.7994, -0.4317, -0.9239, +0.1686,
-0.0160, +0.4368, -0.2227, +1.2948, -0.2040, +0.2399, +0.4261,
-0.6343, -0.4238, -0.0279, -0.3740, -0.0494, -0.2556, -1.0388,
-0.8581, -0.1656, -0.0359, +0.7224, -0.3739, -1.2194, +0.6395,
-0.1583, -0.3026, -0.1850, -0.1700, -1.8707, +0.2407, -0.4709,
-0.6156, +0.5196, -0.2519, -1.1860, -0.4825, +0.1446, +0.0457,
+0.5660, -0.9857, +0.2271, -0.6855, +0.2044, +0.3245, -0.7669,
+0.2010, +0.8049, +0.5283, +0.0931, -0.5368, +0.1922, -0.2671,
+0.0585, +0.0481, -0.1221, -0.2978, -0.2395, -1.3760, -0.3491,
-0.9191, +0.2586, -0.1832, -0.0098, -1.1076, +0.0029, -0.7639,
-0.3380, +0.3705
],
[
+0.0289, -0.4805, -0.1357, +0.1879, -0.5422, +0.2763, -0.0112,
-0.1107, +0.4688, +0.2981, -0.1360, -0.4712, -0.2914, -0.0272,
-0.6401, -0.0511, -0.1172, +0.0755, -0.5573, -0.2963, -0.0006,
+0.2621, +0.1875, +0.1875, -0.0846, +0.2546, -0.5560, -0.3267,
-0.1187, +0.3248, +0.3340, +0.0702, -0.0139, -0.1771, -0.0638,
-0.0986, -0.0267, +0.2829, +0.0948, -1.1968, +0.1601, -0.4763,
-0.4689, -0.1215, +0.1932, -0.9174, -0.0659, +0.4321, -0.1908,
-0.7125, -0.3206, -0.0359, -0.3748, +0.2235, +0.6466, -0.2252,
-0.2070, +0.3157, -1.2327, -0.0452, -0.2814, -0.6249, +0.0581,
+0.2075, -0.0165, -0.1580, +0.4082, -0.5096, -0.0861, +0.1971,
+0.0945, -0.0999, -0.5605, +0.2850, +0.2503, -0.2752, -1.1420,
+0.1810, +0.2212, -0.0055, +0.0991, +0.2571, +0.2492, -0.3627,
-0.1796, -0.6484, -0.5700, +0.0658, +0.4221, -0.4974, -1.0723,
-0.0637, +0.0172, -0.3979, +0.5588, -0.3942, +0.5263, -0.3237,
-1.2946, -0.7221, -0.4368, -0.1285, -0.4849, -0.0860, +0.2354,
-0.8339, +0.2524, +0.0469, -0.3501, -0.0119, +0.1499, +0.0355,
+0.0300, -0.1779, -0.2451, +0.0619, -0.0764, -0.1715, -0.1163,
-0.3817, -0.3149, -0.0202, +0.1961, -0.2986, -0.2575, +0.0752,
-0.8310, -0.1580
],
[
-0.3187, -0.6681, -0.1421, -0.4481, +0.1419, +0.3267, +0.0442,
+0.5460, +0.1549, +0.5221, +0.3236, +0.0272, -0.2433, -0.0965,
-0.0190, +0.2937, +0.1597, +0.1054, +0.3486, +0.4370, +0.3442,
+0.3670, -0.0690, -0.2773, +0.4176, +0.0272, -0.3902, -0.3398,
+0.3189, +0.3319, -0.0357, +0.3368, +0.3673, +0.1598, -0.7883,
+0.2557, -0.1954, -0.2160, +0.1178, +0.2254, +0.1501, -0.1296,
-0.4042, -0.3844, +0.2403, +0.3404, +0.4625, -0.4329, +0.2005,
+0.5049, -0.4352, -0.0119, -0.8480, -0.0911, -0.2687, +0.0486,
-0.3807, +0.0633, -0.3636, +0.1809, +0.1578, +0.0463, -0.4087,
+0.0168, +0.0967, -0.1952, +0.0402, +0.2730, -1.0654, -0.2688,
-0.1371, +0.0163, -0.0675, +0.2259, +0.0828, +0.5484, +0.3267,
-0.5555, +0.1474, -0.2014, -0.7283, -0.2138, +0.1367, +0.2919,
-0.2362, -0.1456, -0.4480, -0.2033, -0.0546, -0.0005, +0.5427,
+0.2188, -0.0197, -0.0850, -1.0038, -0.2540, +0.2578, +0.4207,
-0.1029, +0.2920, +0.1735, +0.4866, -0.1391, -0.0216, -1.0560,
-0.4210, +0.4035, -0.1627, +0.2446, +0.3247, -0.6063, +0.2979,
+0.5203, +0.6016, +0.1982, +0.2929, -0.1622, -0.2267, -0.1046,
+0.6733, -0.6183, -1.0064, +0.4085, -0.2986, +0.0815, -0.0446,
-0.0211, -0.1044
],
[
-0.0967, -0.4198, +0.0837, -0.6985, +0.6727, +0.2239, -0.2881,
+0.2889, -0.4438, +0.0474, +0.1777, -0.5039, +0.0721, +0.2901,
-0.1435, +0.2687, -1.2548, -0.1118, +0.4789, -0.2731, -0.0257,
-0.2657, +0.0613, -0.7471, +0.1597, +0.6435, -0.1961, +0.0205,
-0.3476, -0.1768, +0.1272, -0.3248, -0.1725, -0.0518, -0.1520,
-0.8646, -0.4568, -0.2900, +0.0804, +0.2874, -0.1601, -0.1218,
-0.0916, -0.3936, +0.0563, -0.5226, +0.7649, +0.2661, +0.1315,
+0.1456, -0.1783, -0.4065, +0.2313, +0.3206, -0.1751, +0.0491,
+0.6178, -0.4157, +0.5004, +0.4791, -0.8273, +0.3121, +0.5197,
-0.7075, +0.2732, +0.5617, -1.7903, -0.9306, -0.4015, -0.1448,
+0.1041, +0.0514, -0.5475, -0.3665, +0.0613, +0.3292, +0.5653,
-0.1457, +0.2908, -0.2775, +0.2372, +0.0219, -0.5245, -0.1014,
-0.7805, -0.2367, +0.0397, +0.3511, -0.4261, -0.0955, +0.0628,
+0.0939, +0.3141, +0.4996, +0.0211, -0.7560, +0.3572, +0.0311,
+0.4276, -0.6596, +0.2172, +0.0416, +0.3343, +0.1415, -0.3582,
-0.2764, -0.5064, -0.6150, +0.2117, -2.0168, -0.0036, -0.4181,
-0.0952, +0.3186, -0.4855, -0.5311, +0.2385, +0.4523, -0.0403,
-0.9055, +0.4183, +0.3096, -0.5405, +0.0984, +0.2680, -0.2033,
-0.5637, +0.3754
],
[
+0.0815, -0.4868, -0.3229, -0.1918, -0.7094, -0.4347, -0.4141,
+0.3582, -0.0272, -0.1804, +0.2254, -0.2479, +0.1649, -0.3197,
+0.2285, -0.0620, -0.4405, -0.1798, -0.7993, +0.2816, -0.3819,
-0.1264, +0.1136, +0.2591, -0.0338, +0.2426, +0.3092, +0.2532,
-0.1220, -0.5627, +0.4531, -0.5025, -0.0519, -0.0247, +0.0790,
+0.0817, +0.1633, -0.0444, -0.4636, -0.1700, +0.0939, -0.1771,
-0.0334, -0.9386, +0.3456, -0.2429, +0.0718, -0.0228, -1.7157,
-0.3157, -0.2158, -0.0453, -0.9205, -0.3201, +0.0152, +0.0439,
-0.0100, +0.0594, -0.0715, -0.8998, -0.6740, -0.0128, +0.2614,
+0.0946, +0.0080, +0.0540, -0.8912, -0.1545, -0.4753, -0.1118,
-0.8795, +0.1528, -0.9679, +0.5510, -0.4913, +0.0155, -0.7465,
-0.2269, -0.1916, -0.7147, -0.1364, -0.1825, -0.3343, -0.5667,
+0.3604, -0.3926, -0.1481, -0.0674, -1.1057, +0.2662, -0.4133,
+0.0483, -0.1386, +0.3793, -0.4958, -0.6745, -0.0975, -1.0668,
-0.6878, -0.8876, +0.0577, -0.1947, -0.8066, -0.1917, +0.0802,
+0.0349, -0.1140, -1.5906, +0.5666, -0.9585, +0.5123, -0.4561,
+0.1220, -1.2760, -0.2994, +0.5025, +0.1188, +0.0047, -0.7984,
-0.1449, -0.0897, +0.1967, -0.2902, +0.9205, +0.3216, +0.1896,
+0.0031, +0.0811
],
[
+0.1508, -1.1152, -0.4444, -0.1015, +0.2495, +0.3801, -0.1659,
+0.3051, -0.0344, +0.0685, +0.1417, -0.2961, +0.2852, -0.7454,
+0.2276, -0.6416, -0.6397, +0.0890, +0.2244, +0.0755, +0.2155,
+0.2553, +0.0755, -1.2678, +0.0580, +0.0448, +0.3109, -0.2716,
-0.2427, +0.0622, -0.1986, -0.0201, +0.1963, +0.1069, -0.0677,
-0.3124, -0.2603, -0.0496, +0.1603, +0.0779, -0.0491, +0.1279,
-0.1761, -0.1035, -0.0013, -0.2889, -0.0354, -0.2291, +0.1568,
+0.2295, -0.4736, +0.2320, -0.0130, -0.6341, +0.2555, +0.2521,
-0.8990, -0.0297, -0.4157, -0.8187, +0.3604, +0.3897, +0.2412,
-0.1096, -0.2729, -0.5034, -0.3382, +0.3082, +0.0280, -0.7055,
-0.1825, +0.0428, -0.2388, -0.5499, +0.6341, -0.1584, +0.3710,
-0.7945, +0.1762, +0.2580, -0.4087, -0.6248, -0.1134, -0.1217,
-0.0592, -0.6103, +0.5790, +0.0986, -0.4863, +0.3986, -1.6397,
+0.2345, -0.4191, -0.3749, -0.8363, -0.5947, -0.1273, -0.0475,
-0.7957, +0.2344, -0.2187, +0.0841, +0.1320, -0.0680, -0.8809,
+0.0584, -0.6072, -0.6798, +0.2180, -1.3019, -0.3872, +0.4067,
+0.3076, +0.7941, +0.8064, -0.4320, -0.4769, +0.0634, -1.0734,
-0.0873, -0.2996, -1.0084, -0.0319, -0.1943, -0.0330, -0.1190,
-0.3231, -0.2432
],
[
+0.8061, +0.0414, -0.5187, -1.8237, -0.1573, +0.3550, -0.0713,
+0.0761, +0.2378, +0.1792, -0.1353, -0.1836, +0.3602, +0.4975,
-0.3694, -0.9753, -0.8763, -0.6916, -0.6294, +0.3999, +0.5712,
-0.3162, -0.4714, -0.2927, +0.1412, +0.2313, +0.2226, +0.2106,
-0.2885, -0.0488, +0.2140, -0.4388, -0.6004, +0.2552, -0.5705,
-0.1103, -0.3916, -0.3245, +0.0148, +0.3116, -0.7406, -0.3943,
+0.3876, -0.2876, -0.6687, +0.1455, +0.2159, +0.3626, +0.1751,
+0.4169, +0.2423, +0.1283, +0.4872, +0.0035, +0.0388, +0.2582,
+0.4407, -0.8525, -0.4614, -0.4708, -0.0271, +0.3957, +0.2106,
-0.6293, -0.3893, -0.3817, -0.5386, -0.3784, -0.0756, -0.5606,
-0.2913, -0.2612, -0.7377, +0.2709, +0.0092, -0.2244, +0.7055,
-0.2901, +0.0308, +0.4058, -1.6255, -0.1924, -0.1218, -0.4762,
-0.1004, +0.5754, -0.2167, -0.9728, +0.1414, +0.0720, -0.4261,
+0.2407, +0.1644, -0.3591, -0.8870, -0.6904, -0.5330, +0.3033,
-0.1471, -0.3799, +0.0609, -0.0158, -0.1149, +0.0969, +0.3944,
-0.6251, +0.3801, -0.3275, -0.5975, -0.1105, -0.8845, +0.4784,
+0.6250, -0.5131, -0.1038, +0.7839, +0.0422, +0.0789, +0.3880,
+0.3156, -1.7372, -1.0678, +0.4849, -0.2369, +0.1520, -0.2966,
+0.0060, -0.3120
],
[
-0.2918, -0.2262, -0.3504, -0.0837, -0.4429, -0.8962, -0.4601,
+0.0141, -0.0975, -0.1129, +0.5787, +0.4863, -0.1125, -0.2147,
+0.4314, -0.1362, +0.2811, +0.8989, -0.9412, +0.3465, +0.4242,
-0.2377, +0.2412, +0.3327, +0.1597, +0.0798, +0.2373, -0.1421,
+0.0364, -0.9199, +0.0272, +0.0701, -0.2294, -0.3992, +0.0251,
-1.9692, -0.0399, -0.1494, +0.1042, +0.1043, +0.4137, -0.1997,
-0.3013, -0.5738, +0.2823, +0.3191, -0.2606, +0.1708, -0.9317,
-0.0394, +0.4235, +0.1541, -0.4125, -1.0192, -2.3828, -0.2181,
-0.2494, -0.3435, +0.3089, -0.9115, -0.1843, -2.0460, -0.1583,
+0.5973, -0.0500, -0.8085, -0.3261, -0.4273, -0.1561, -0.5951,
-0.2680, +0.3162, +0.2059, -0.6817, +0.1199, -0.2442, -0.0533,
+0.3222, -0.4804, -0.6010, -0.4073, +0.8277, +0.0078, -0.2969,
-0.4418, +0.3121, -0.0361, -0.5091, +0.4132, +0.3545, +0.4635,
-0.4379, +0.6498, +0.2989, -2.3967, -0.1776, +0.1242, -1.1995,
+0.4168, -1.4933, -0.0583, -1.2506, -0.3718, +0.3099, +0.1641,
+0.4878, -0.2551, +0.5048, -0.4897, -0.3886, +0.0833, +0.1213,
+0.0227, -0.8806, -0.6782, +0.5467, -0.1304, +0.3051, +0.4033,
+0.1663, -0.8800, -0.6483, +0.2446, +0.0204, +0.2517, -1.2316,
-0.0747, +0.5904
],
[
+0.2094, +0.4037, -0.8266, -0.2985, -0.9099, +0.0830, -0.6207,
+0.4955, +0.6387, -0.8040, +0.4625, -0.2901, -0.6773, +0.3770,
+0.7285, +0.3740, +0.0346, -0.0444, -0.0635, -0.1103, +0.2181,
+0.5463, +0.3262, -0.1519, +0.3058, +0.1327, -0.3031, -0.7194,
-0.5021, +0.0379, -0.0157, -0.0427, +0.1579, -0.1737, -0.2882,
+0.1571, +0.2838, +0.2221, -0.0288, -0.0749, -0.0014, -0.2073,
-0.5759, +0.5900, -0.3818, +0.1474, +0.4674, -0.0011, -0.1010,
-0.3498, +0.6558, -0.0812, -0.3489, +0.0398, -0.1400, -0.2533,
-0.6793, -0.0488, +0.2128, +0.0621, -0.0049, +0.9653, -1.1831,
-0.3126, +0.0806, -0.4907, +0.4353, +0.3233, +0.0632, -0.6646,
-0.4535, -0.0299, +0.6531, +0.0920, -0.2882, -0.2987, +0.4691,
-0.0792, +0.1770, -0.4178, -0.0450, +0.1346, +0.1328, +0.1063,
-0.5334, +0.0142, -1.1001, -0.5854, -0.0311, +0.5967, -1.1213,
+0.3408, +0.1359, +0.3228, -0.4258, -0.2816, +0.4218, -0.4707,
-1.0189, -0.2999, +0.1977, -0.3483, -0.0017, -0.2277, -0.0898,
-0.2637, -0.3317, -0.1012, -0.1405, -0.4500, -0.6260, +0.6423,
+0.2137, +0.5864, +0.0911, +0.2422, -0.5717, -0.5104, -0.5501,
+0.4084, -0.6578, -0.2792, -0.0373, +0.1023, -0.4588, -0.2065,
+0.4159, +0.0291
],
[
-0.0633, +0.1900, -0.0316, -0.6121, +0.0363, +0.0089, -0.1875,
-1.7498, +0.0125, -0.1088, +0.2785, +0.1229, +0.0286, +0.2072,
+0.0906, -0.1487, +0.0048, -0.6439, -0.8480, -0.1054, -0.3508,
+0.4092, +0.3682, +0.3626, -0.1944, +0.1335, +0.4123, -0.3110,
-0.7782, -0.0082, +0.5426, -0.9679, +0.3419, +0.2434, -0.4558,
+0.1008, +0.1001, +0.3073, -0.3161, -0.7719, -0.3348, +0.1857,
+0.6334, -1.0666, +0.1509, -0.0705, +0.3096, -0.6401, -0.0345,
+0.1606, -0.7597, +0.0135, -0.0459, -1.0242, -0.2522, +0.1098,
-0.7466, +0.2503, -0.7443, -1.0207, -0.5589, -0.4333, -0.3767,
-0.9047, +0.3236, -0.2105, +0.0718, -0.3654, +0.3875, +0.0695,
+0.3379, +0.5472, -0.0609, -0.0038, -0.5566, +0.4001, +0.0368,
+0.3434, +0.1840, -0.2020, -0.1040, -0.1571, +0.0542, +0.2845,
+0.3815, -0.6025, -0.0693, -1.2639, -0.9612, +0.2361, -1.5898,
-0.6602, -0.3257, -0.0902, -0.0034, -0.2451, -0.2921, -1.2953,
-0.3734, +0.2509, -0.3979, -0.5262, -0.8265, -0.6400, +0.0767,
+0.3744, -0.2818, -0.6876, +0.5142, +0.2778, -0.3917, -0.4421,
-0.4253, +0.7273, -0.4951, +0.3048, +0.3287, +0.1589, -0.3765,
+0.2310, +0.1045, +0.0334, -0.0645, -0.0329, +0.2335, +0.1058,
-0.4252, +0.2281
],
[
-0.4140, -0.4391, -0.5113, +0.0978, +0.1550, -0.1891, -0.1878,
-0.7209, +0.6270, -0.6307, -0.0813, -0.3831, +0.2939, -0.4056,
-0.0919, -1.0405, -0.4539, -0.5300, -0.8153, +0.1230, -0.2606,
-1.0893, +0.5740, -0.0727, -0.4035, +0.3717, +0.0811, +0.0787,
+0.0551, +0.0799, +0.0022, -0.0810, -0.1550, -0.0919, +0.0184,
+0.1899, +0.3246, +0.3190, -0.4321, -0.8694, +0.2681, -0.1237,
+0.0163, -0.4255, -0.6452, -0.5593, -0.4473, +0.2048, -0.4597,
+0.1990, +0.7002, +0.0012, -0.3186, +0.0787, -1.0031, +0.3690,
+0.3852, +0.2772, +0.2252, -0.1840, -0.6804, -0.6328, +0.2052,
-0.2431, -0.0694, -0.5177, -0.0266, -0.4128, -0.1556, +0.0945,
-0.1813, +0.1167, -0.3094, +0.3329, +0.2081, -0.6154, -0.3685,
+0.3549, -0.4292, +0.1615, +0.3999, +0.0454, +0.2819, +0.3528,
+0.3521, +0.0139, -0.7987, -0.4235, +0.2299, +0.0016, +0.5296,
+0.3507, +0.0363, -0.0489, +0.6135, -0.1952, +0.3045, -0.0518,
+0.0352, +0.0725, -1.1994, +0.2048, -0.2690, -0.0496, +0.4500,
-0.0757, +0.4166, +0.2138, -0.0075, -0.0621, +0.8098, -0.3943,
+0.1628, -0.8582, +0.2362, +0.3902, +0.3173, -0.1347, -0.4285,
-0.2000, +0.4943, -0.2492, -0.1656, +0.2132, -0.0433, +0.2884,
-0.2845, -0.4936
],
[
-0.4738, -0.0158, -0.2765, +0.1722, +0.3621, +0.5771, +0.1723,
-0.8243, +0.0913, -0.3469, +0.5777, -0.2210, -0.1961, -0.8988,
+0.5442, +0.2901, -0.2991, +0.6127, -0.2821, +0.3159, -0.8808,
-0.0491, -0.1903, +0.1704, +0.2932, -0.0118, +0.2040, -0.2998,
+0.4355, +0.1396, +0.0578, +0.2586, +0.2063, +0.1804, +0.0332,
+0.1979, +0.1810, -0.3074, -0.4613, -0.3067, +0.1214, -0.0493,
-0.0024, +0.4824, +0.2318, -0.0506, +0.3857, +0.3477, -0.8468,
+0.3712, -0.4446, -0.1276, -0.0269, +0.3221, -0.2202, +0.2256,
+0.0683, -0.2882, +0.2667, -0.3171, -0.1336, -0.0404, -0.0441,
+0.1958, +0.1474, -0.2557, -0.8337, +0.0171, +0.1839, -0.5242,
+0.1376, +0.7396, -0.2130, -0.0796, -0.3462, -0.1404, +0.0744,
-0.1832, +0.5701, +0.0777, +0.2206, +0.2120, +0.2066, -0.2115,
-0.2945, +0.2164, -0.3908, +0.2701, +0.2148, -0.0048, -0.1246,
+0.0619, -0.4415, +0.0907, -1.4013, -0.1996, +0.1082, +0.0862,
+0.1056, +0.6264, -0.0440, -0.7101, -0.7725, -0.8352, +0.1243,
-0.2385, -0.0286, -0.1492, -0.0639, +0.2705, +0.1741, -0.3408,
+0.3609, +0.7159, +0.3274, -0.3942, +0.2419, +0.3255, -0.0995,
-0.0738, -0.2326, -0.8728, -0.0007, -0.0530, +0.4491, +0.4334,
+0.2235, -0.5149
],
[
+0.3999, +0.4161, -0.5792, -0.1638, -0.0401, +0.2404, +0.2323,
+0.0217, -0.0647, -0.8051, -0.9147, +0.0337, -0.2957, +0.1351,
+0.0574, +0.1644, -0.3406, -0.1560, +0.3007, -0.2773, -0.2168,
+0.3504, -0.0845, -0.2320, +0.1636, -0.3549, -1.1003, +0.3613,
-0.8127, +0.1260, -0.1164, +0.2141, -0.9710, +0.8408, +0.3108,
+0.0717, -0.1734, -0.0426, +0.1837, +0.2777, +0.4342, +0.0072,
-0.7050, -0.2702, -0.7281, -0.1669, -1.3528, -0.8867, +0.2906,
+0.0970, -0.0789, +0.2028, -0.0715, -0.1935, -0.0710, -0.3757,
+0.7201, +0.0652, +0.1590, +0.5348, -0.0384, +0.3781, +0.1935,
+0.4964, -0.2404, -0.5474, +0.1082, +0.0798, -0.7492, -0.3952,
-0.4727, -0.4758, +0.1355, +0.1009, +0.1819, -0.3007, -0.0196,
+0.2785, -0.0800, +0.6883, +0.1986, +0.3387, -0.0251, +0.4226,
-0.0952, -0.7956, -1.0131, -0.8996, -0.0791, -0.1689, -0.6311,
+0.0509, +0.4836, -0.0825, -0.2963, -0.5650, +0.1117, -0.3100,
+0.1132, -0.8710, +0.2885, -0.2328, +0.5853, +0.1142, +0.1174,
+0.3191, +0.2894, +0.0455, -0.8276, -0.0121, -0.1303, -0.1764,
+0.3464, +0.5618, -0.0948, +0.3641, -0.6222, +0.0030, -0.2741,
-0.5782, -0.3197, +0.5524, -1.0324, +0.3513, -0.5829, -0.1195,
+0.0778, -0.4998
],
[
-0.3566, +0.0670, +0.3577, +0.5295, -0.2605, +0.4155, -0.0461,
+0.0135, +0.4739, -0.3473, +0.8062, -0.0381, -0.5666, -0.3236,
-0.6585, -0.2073, -0.7391, +0.1240, +0.0095, -0.0581, -0.1210,
-0.0318, -0.3297, -0.1456, -1.0478, -0.3795, -0.9441, +0.5343,
+0.0613, -0.1349, -0.3619, -0.2339, -0.6799, +0.7598, -0.4541,
+0.0684, -0.1045, -0.0131, -0.8898, -0.0297, +0.3299, -0.0869,
-0.0549, -1.5833, +0.3829, -0.0927, +0.1164, -0.2028, +0.6262,
-0.2578, -0.2788, +0.4153, +0.3932, -0.1627, +0.2694, +0.2254,
+0.0485, -0.1574, +0.0571, +0.3084, -0.1627, -0.0165, +0.5349,
+0.0071, +0.2080, +0.2899, +0.1979, +0.3030, +0.5253, +0.1890,
+0.1115, +0.0426, +0.3508, +0.3014, -0.1113, +0.4422, +0.5590,
+0.2923, +0.2363, -0.3012, -0.1984, -0.0192, -0.0310, -1.1952,
-0.3245, -0.0701, -0.0854, -0.8582, -0.2550, -0.3040, -0.1221,
+0.2037, -0.5178, -0.2820, +0.0618, +0.6848, -0.2248, -0.1200,
+0.4649, +0.0764, +0.2552, -0.2030, -0.3596, +0.1910, -0.3935,
+0.4110, -0.2730, +0.3675, -0.5421, -0.0933, +0.2148, +0.0517,
+0.3015, -0.1655, +0.1093, -0.2327, +0.0471, +0.0495, -0.3547,
+0.1874, -0.4726, -0.4709, -0.3212, +0.0643, +0.4104, -0.2445,
-0.2715, +0.3497
],
[
-0.3460, -0.0247, -0.3679, +0.2280, +0.5255, +0.1160, -0.2873,
+0.0290, +0.0658, -0.6741, +0.0203, -0.3913, -0.5221, -0.6916,
-0.1703, -0.8972, -0.3779, -0.1352, -0.1635, -0.1056, -0.1549,
+0.1882, -0.2525, +0.0203, -0.3930, +0.0924, +0.1071, -0.6681,
+0.3224, +0.0302, -0.5123, -0.3224, +0.2410, +0.2783, +0.3554,
+0.1055, +0.2246, +0.4666, -0.0120, -0.0394, -0.4212, -1.6024,
-0.3518, +0.0282, -0.1718, +0.2194, -0.4077, +0.4651, +0.0556,
+0.5885, +0.2617, +0.0363, -1.5114, +0.0687, -0.5579, -0.2032,
+0.8185, -0.3873, -0.2995, -0.4347, +0.4054, +0.3423, -0.2112,
-0.5053, +0.1885, -0.1289, +0.0005, +0.2305, +0.1113, +0.1777,
+0.4680, +0.3591, +0.1185, +0.1904, +0.0793, -0.0519, +0.6681,
-0.1187, +0.0313, +0.3139, +0.4099, -0.3367, +0.0264, +0.7315,
-0.4034, -0.2053, -0.0312, -0.4737, -0.4315, +0.1450, +0.2905,
+0.0513, +0.1233, +0.3458, -1.0577, -0.4384, +0.2307, -0.2988,
-0.1299, +0.0850, -0.6017, -0.3036, -0.8614, -0.1928, -0.9890,
-0.4472, +0.0402, -1.5595, -0.4688, +0.1330, -0.8697, -0.8565,
+0.1233, -0.6775, +0.0021, -0.0146, -0.0389, +0.6895, +0.6748,
-0.5974, +0.0158, +0.0060, +0.3573, +0.0680, -0.0926, -0.2050,
-0.4197, -0.1597
],
[
+0.1891, +0.1122, -0.5610, -0.8994, -0.5449, +0.5029, -0.3364,
-1.2799, -0.9692, -0.5666, -0.2846, +0.1047, +0.4240, -0.4109,
-0.4119, +0.0980, -0.6415, +0.1580, +0.5822, +0.1547, -0.3187,
-0.0076, -0.5339, -0.1303, +0.3331, -0.3424, +0.0772, +0.1627,
-0.9344, +0.0275, -0.3101, -0.3224, +0.3266, -0.5172, -0.3805,
+0.9751, -0.9742, -0.8692, -0.0544, -0.3695, -0.1767, +0.2266,
+0.3360, -0.3269, +0.1319, -0.1249, +0.3600, +0.7534, -1.9579,
+0.0168, -0.4798, +0.3251, +0.2135, -0.3825, -0.1489, -0.3484,
+0.0174, -0.6041, -0.0943, +0.4646, +0.1946, -0.3953, +0.0745,
-1.0481, -0.0381, -0.0483, -0.6462, +0.2311, +0.0149, -0.2660,
+0.3559, -0.3173, +0.3713, +0.0860, +0.5249, -0.4836, -0.1196,
+0.4732, -0.0573, +0.0751, -0.3692, -0.0531, -0.5510, -0.8530,
-0.0153, +0.1275, -0.3970, +0.2427, +0.3064, +0.2559, +0.2124,
+0.3455, +0.3750, -0.3621, -0.3717, -0.3938, +0.2608, -0.3003,
+0.3163, +0.8138, -0.3208, +0.4267, +0.1622, +0.0802, -0.2647,
+0.2078, -0.1138, -0.8264, -0.1568, -0.6197, +0.3754, -1.2493,
-0.0603, -1.7287, +0.6661, -0.2143, +0.2809, -0.7342, -0.2961,
+0.1394, +0.2656, +0.1807, +0.5828, -0.5450, +0.3207, +0.0057,
+0.0039, +0.3181
],
[
-0.0280, +0.4064, +0.7043, -1.0389, +0.1923, -0.0091, -0.6891,
-0.1993, +0.0138, +0.1689, +0.2214, -0.2040, +0.1393, +0.2917,
-0.1474, -0.0371, +0.0503, -0.4136, +0.7585, +0.0595, +0.3568,
-0.6041, +0.2441, -0.5409, +0.1036, +0.2503, +0.2190, +1.0548,
-0.4079, -0.1604, +0.3204, +0.8233, +0.1581, -0.2578, +0.8894,
-0.3164, -0.2615, +0.0106, -0.1043, -1.6156, +0.1678, -0.4879,
-0.5066, -0.9457, -0.0135, +0.0765, +0.3967, -0.2431, +0.2703,
+0.3481, +0.1127, -0.2120, +0.2531, -0.0420, +0.2590, -0.2827,
+0.0810, +0.1260, -0.5727, -0.2298, -0.9872, +0.2354, -0.0038,
+0.1316, -0.2633, +0.3596, -0.4560, +0.0477, +0.3235, -0.2541,
+0.7789, -0.1461, -0.5637, -0.2323, -0.5482, +0.1176, -0.1410,
-0.6329, -0.5649, +0.3793, -0.0715, -0.3250, -0.5338, -0.7280,
-0.4626, -0.2513, -0.3433, -0.0288, -0.0091, +0.6689, -0.1540,
-0.7026, +0.3719, -0.2011, +0.0463, -0.5744, -0.5172, +0.0868,
+0.1896, +0.1622, -0.0330, -0.4509, +0.8395, -0.1213, +0.2093,
-0.2008, -0.7906, +0.4472, -0.3448, +0.4654, +0.2722, -0.1167,
+0.3253, +0.3181, +0.1789, +0.3003, +0.1138, +0.2300, -0.7714,
+0.1366, -0.2502, +0.1757, -0.5757, +0.4775, -0.0934, -0.1466,
-0.2202, -0.0247
],
[
-0.2769, -0.4844, -0.3218, -0.3712, -0.7460, -0.1288, -0.0567,
-0.1479, -0.1536, +0.3630, +0.3212, +0.1854, -0.5243, -0.5611,
-0.6008, -0.5943, -0.3658, -0.4895, +0.1720, +0.4116, +0.0328,
-0.0545, +0.1886, -0.3732, -0.1733, +0.2179, -0.5173, +0.2468,
-0.2607, -0.0678, -0.0360, +0.4942, +0.0898, +0.6431, -0.2604,
-0.4927, -0.1629, +0.2905, -0.0363, -0.5312, +0.1221, -0.0275,
-0.6900, +0.0902, +0.2585, +0.0698, -0.4510, -1.0082, +0.0656,
+0.1257, -0.0968, +0.0258, -0.1017, -0.1600, -1.1350, -0.1891,
-0.1417, +0.0675, +0.1163, -0.0818, -0.0141, -0.4789, -0.8681,
-1.1096, -0.3683, -0.4197, +0.0667, +0.4919, +0.0937, +0.4707,
-0.4875, -0.1434, -0.1309, -0.1494, +0.4293, +0.5837, +0.4335,
-0.3542, +0.2858, -0.4738, -0.2478, +0.2231, +0.0185, +0.1273,
+0.4333, +0.4926, -0.5192, +0.1540, -0.6322, +0.3050, -0.0561,
+0.4242, -0.6576, -0.1632, -1.3002, +0.2911, -0.8649, +0.2074,
+0.4507, -0.5161, +0.1431, +0.0479, +0.1763, -0.2115, -0.1463,
+0.7066, -0.1477, -0.2690, -0.2031, +0.2401, -0.5833, +0.4310,
+0.2519, +0.0066, +0.5099, +0.2787, -0.5172, +0.1471, +0.4772,
+0.2075, -0.7542, -0.3488, +0.1534, -1.9077, +0.0053, +0.2767,
+0.2842, +0.3068
],
[
+0.1627, +0.0932, -0.0692, +0.0333, +0.3671, +0.0617, +0.1825,
+0.2126, -0.1105, -0.3398, -0.2716, -0.4564, +0.0132, +0.0497,
+0.5738, -0.8727, +0.1385, -0.3337, -0.1708, -0.1856, -0.1948,
+0.6282, +0.6461, +0.5056, -0.0936, +0.3556, -0.7145, +0.2956,
-0.1190, +0.4536, -0.2825, +0.3600, -0.1794, +0.8380, +0.3844,
-0.3455, -0.3925, +0.2874, -0.4389, +0.2259, +0.0381, -0.4052,
-0.0864, +0.3228, +0.2352, +0.2540, +0.4140, -0.1725, +0.3446,
+0.0663, +0.1490, -0.7661, +0.4594, +0.3423, +0.0742, +0.2266,
+0.2925, +0.0026, -0.4927, +0.2868, +0.0799, +0.6575, -0.6153,
+0.2773, -0.1182, +0.0802, -0.7272, +0.1809, -0.1299, +0.0545,
+0.0146, -0.1938, +0.4941, +0.3754, -0.1482, +0.4977, +0.0251,
-0.4961, -0.3299, -0.1679, +0.3145, +0.4671, -0.2297, -0.1446,
+0.1179, -0.1848, +0.5299, +0.6279, +0.7981, -0.0374, -0.4267,
-0.1548, +0.0680, -0.1861, +0.1613, +0.0075, -0.6424, -0.3829,
+0.2955, -0.2015, -0.8048, +0.4824, +0.1460, -0.2246, +0.5881,
-0.2222, +0.1126, +0.3343, +0.8536, -0.6848, +0.5377, -0.0368,
+0.1391, -0.1359, -0.3442, -0.3034, -0.4391, +0.1235, -0.0406,
+0.0089, +1.2666, +0.0973, -0.6750, +0.1518, +0.3950, +0.1489,
-0.0426, -0.3698
],
[
-0.1249, -0.2586, +0.5466, +0.5176, +0.3041, -1.0447, -0.1893,
-0.3644, -0.0254, +0.3762, +0.1505, +0.3849, +0.5096, -0.4326,
+0.0874, +0.3525, +0.0466, +0.3403, +0.0881, -0.4224, +0.3217,
+0.3785, -0.2631, +0.4593, -0.2674, -0.1998, -0.1385, +0.2314,
-0.4872, -0.2581, -0.4222, -0.3113, -0.1859, -0.0358, -0.1046,
-0.7881, +0.0959, +0.1361, -0.5511, -0.1600, -0.1596, +0.4817,
-0.4353, +0.0208, -0.5273, -0.1125, +0.4736, -1.0330, -0.1043,
+0.4457, -0.3403, -0.5466, -0.6678, +0.3428, +0.1688, -0.0121,
+0.2024, -0.0831, -0.6305, -0.1766, +0.4599, -0.4830, -0.6242,
-0.2924, +0.4737, -0.1717, -0.4773, -0.3370, +0.8250, +0.2123,
+0.3336, -0.5152, -0.1952, +0.0197, -0.1985, +0.0554, -0.7055,
+0.6239, -0.2258, -0.6815, -0.4525, -0.2257, +0.0849, +0.6123,
+0.3497, +0.2228, -0.0817, -1.1429, +0.3994, -0.3225, +0.3986,
-0.0081, +0.2777, -0.2004, -0.7202, -0.4120, +0.3846, -0.2532,
-0.8475, -0.8692, -0.0659, -0.6207, +0.2605, +0.0058, -1.5390,
-0.2211, -0.2353, +1.4223, +0.4847, +0.0080, +0.2586, +0.0283,
-0.3735, -1.1436, +0.3844, +0.7311, -0.5703, -0.1188, +0.0333,
-0.3770, -0.4160, -0.5079, -0.5831, -0.2401, -0.5277, +0.2274,
+0.0551, -0.0440
],
[
-0.2948, -0.2675, -0.3498, +0.0672, -0.6153, -0.3831, +0.2727,
-0.4699, -0.4750, +0.4319, +0.3771, +0.0785, +0.0081, -0.2131,
-0.3405, +0.0619, +0.4527, +0.1726, -0.1512, +0.1530, -0.2053,
-0.3135, -0.0249, +0.3128, -0.4377, +0.2800, +0.3620, -0.8843,
-0.6940, +0.2616, +0.2476, -1.4521, +0.1270, +0.2371, +0.1431,
-0.0657, +0.4752, -0.3677, -1.3756, -0.3213, +0.1166, -0.2154,
-0.5762, +0.0691, +0.4376, +0.1365, +0.4809, +0.0093, +0.0810,
-0.1205, +0.2233, -0.0932, -0.2922, +0.0394, -0.1762, +0.2118,
+0.2160, +0.1256, +0.0622, +0.6482, -0.1080, +1.0528, +0.4798,
+0.2713, +0.0338, -0.0506, -0.7699, -0.4552, +0.3653, +0.3004,
+0.3685, +0.2309, +0.0852, +0.3183, -1.8090, -0.1127, -0.3724,
+0.2288, -0.0874, +0.6633, +0.5991, -0.9262, +0.2167, +0.2694,
-0.0009, -0.3100, +0.1314, -0.6853, -0.6782, -0.1331, -0.5942,
-0.3491, +0.4602, +0.3154, +0.5430, -0.6382, +0.4787, -0.0395,
-0.4122, +0.0822, +0.1106, -0.2653, -0.1207, +0.2503, +0.0988,
-0.1648, +0.3514, +0.2168, -0.4024, -0.0270, +0.0449, -1.0198,
+0.0083, -0.1998, +0.2589, -0.8012, +0.6724, -0.8470, -0.8099,
-0.5403, -0.3611, -0.1546, -0.0373, +0.0279, -0.1776, -0.1399,
-0.2660, +0.2166
],
[
+0.1468, +0.1605, -0.7206, -0.1315, +0.1711, -0.0937, +0.1821,
-0.2529, +0.3596, +0.5021, +0.2899, -0.1337, -0.0562, -0.3750,
-0.0213, -0.3416, +0.1163, -0.3959, -0.1453, -0.0226, -0.2151,
-0.1230, -0.1099, +0.0746, -0.1753, +0.3563, -0.0813, +0.1116,
+0.0483, -0.0490, +0.1264, -0.4237, -0.3931, -0.0208, -0.4030,
-0.0314, -0.3142, +0.4447, -0.6161, -0.4919, -0.0185, +0.4071,
+0.1044, -0.8308, -0.2317, +0.1504, -0.5274, -0.3391, +0.1318,
-0.0589, -0.0422, +0.1590, -0.4159, +0.4971, -0.8429, +0.1896,
+0.4471, +0.5414, +0.2074, -0.1459, +0.1880, +0.3062, -0.2648,
+0.3391, -0.1389, +0.3524, -0.4327, -0.6014, +0.1049, -0.1632,
-0.2835, +0.4386, -0.1937, +0.4618, +0.0392, -0.0116, +0.0426,
+0.5406, -0.3774, +0.1741, +0.7146, -0.1920, +0.2511, -0.4769,
+0.4593, -0.7666, +0.2325, -0.4224, -0.1658, +0.0027, -0.0929,
-0.4411, +0.0945, +0.5415, +0.5139, +0.0461, +0.0246, +0.2496,
-0.0086, -0.0972, -0.1127, +0.5594, -0.3779, +0.3021, +0.7424,
-0.7742, +0.4401, +0.3576, +0.2463, +0.2575, +0.0405, +0.2020,
+0.0751, -0.8001, -0.0496, +0.0415, +0.9000, +0.1513, -1.0452,
-0.5526, +0.0345, +0.4193, -0.0346, -0.4692, +0.3855, +0.3223,
-0.0605, +0.2719
],
[
-0.3304, -0.5461, +0.1105, -0.4784, +0.8587, +0.2012, +0.2441,
+0.3844, -0.0353, +0.2197, -0.4514, -0.1174, -0.3371, +0.6193,
+0.5215, +0.0668, +0.2793, +0.0772, -0.3565, -0.7580, +0.0638,
-0.0688, -0.5527, +0.5006, +0.2147, +0.3336, -0.1883, -1.2293,
-0.3735, +0.2956, -0.8615, -0.1399, -0.5002, +0.2953, -0.4261,
+0.0643, -0.4822, +0.5314, +0.2076, -0.1544, -0.3966, -0.0563,
-0.2849, -0.1433, -0.1702, +0.3132, -0.2281, -0.4330, +0.4116,
+0.5219, -0.1308, -0.7934, +0.2316, +0.3169, +0.0822, -0.0786,
-0.2983, -0.2409, -0.4243, +0.0411, +0.3327, +0.0553, -0.1777,
+0.4661, -0.0095, -0.7174, +0.2346, +0.1966, +0.1853, +0.1936,
-0.1538, +0.0861, +0.1072, +0.4284, -0.4048, +0.2446, +0.0831,
-0.3847, +0.1770, +0.3220, +0.3967, +0.0473, +0.0293, +0.7619,
+0.6648, -0.0809, +0.2496, -0.5963, +0.6097, -0.0942, -0.8157,
+0.0398, +0.1286, -0.3510, -0.0276, -0.4275, +0.2023, +0.1366,
+0.7164, +0.2144, -0.4248, +0.3646, +0.2128, -0.2796, +0.4273,
-0.5062, +0.4408, +0.1130, -0.3957, -0.1383, +0.0493, +0.0144,
+0.2001, +0.1299, -0.1198, +0.2288, -0.3851, +0.2022, -0.8794,
+0.6381, -0.2271, +0.0757, +0.6264, +0.1603, +0.1398, -0.5992,
-0.5076, +0.4124
],
[
-0.4653, +0.2013, -0.6020, +0.2698, -0.1746, -0.0599, -0.8512,
+0.4460, -0.1568, -0.4250, +0.2001, -0.3419, +0.2083, +0.2165,
-0.5031, -0.0974, +0.3348, +0.1919, +0.1043, -0.4256, +0.1499,
+0.4606, -0.4713, +0.0558, -0.1333, -0.1562, +0.2721, -0.0773,
-0.2043, -0.3741, +0.0581, +0.4342, +0.2091, -0.0077, +0.0282,
+0.2841, -1.2537, -1.8891, +0.4342, -0.0602, -0.1319, +0.1746,
+0.2591, +0.1119, +0.3353, -0.0305, -0.3655, +0.1949, -0.1574,
+0.2060, -1.2330, +0.1509, -0.1390, -0.3126, -0.2878, +0.2268,
+0.0015, -1.0281, +0.2765, -1.6093, +0.1884, -0.1080, +0.2171,
+0.1133, -0.3212, +0.0263, +0.4069, -0.3031, +0.2118, +0.2243,
-0.2194, -0.4348, +0.5289, +0.2021, +0.2662, -0.4296, +0.5655,
-0.0506, +0.0327, +0.4667, -0.6388, -0.3863, +0.0791, -1.9748,
-0.7543, +0.1312, +0.3268, +0.0462, -0.3009, +0.1676, +0.6889,
-0.7325, +0.0110, +0.1539, +0.3882, -0.3300, -0.0346, -0.1716,
-0.1278, -0.3229, -0.2927, -0.5614, -0.2496, +0.1966, +0.2029,
-0.0382, -1.7907, +0.1786, +0.1333, +0.4529, +0.4298, +0.0186,
+0.1384, -0.1147, +0.1967, -0.7488, +0.2887, -0.5002, -0.7509,
+0.0944, -0.2133, +0.0350, +0.0661, -0.4390, -0.3448, +0.0407,
+0.4323, -0.2928
],
[
-0.0147, +0.4875, +0.8101, -0.4856, -0.0523, -0.3526, -0.6560,
+0.7529, +0.3859, -0.4544, +0.2600, -0.4286, +0.2637, +0.2107,
-0.1284, -0.1439, -0.1315, -0.8336, -0.1653, -0.3187, -0.2810,
+0.2348, -0.2626, -0.0338, -0.3902, -0.1839, -0.2207, +0.4949,
+0.3045, +0.1014, -0.0807, +0.2200, -0.2967, -0.9202, +0.1546,
-0.4144, -2.4271, -0.0263, +0.4327, -1.0839, -0.1580, +0.1715,
-0.0484, -0.3445, +0.0727, -0.0436, +0.2518, +0.1517, -0.3360,
-0.4268, +0.6681, -0.1279, -0.4185, +0.2538, -0.5804, -0.3471,
-1.3149, +0.2916, -0.2334, -0.3507, -0.2264, +0.2349, -0.0306,
-0.3034, +0.2636, +0.0868, -0.0179, -0.4565, -0.5344, -0.0175,
+0.4892, +0.3090, -0.0237, +0.0411, +0.4259, -0.0677, -2.6061,
+0.2379, +0.2025, -0.1072, +0.1846, -0.0507, -0.2904, +0.1787,
-0.3208, +0.3360, -0.4238, +0.0671, +0.2267, -0.5225, -0.0235,
+0.5066, +0.1909, +0.0754, -0.2492, -0.1392, +0.1230, -0.5220,
-0.1610, +0.4429, -0.4891, +0.1724, -0.4112, -0.1275, +0.1159,
-0.4529, +0.3007, +0.1971, +0.3724, +0.7879, +0.0329, +0.5699,
-0.0586, -0.5128, +0.0962, +0.8331, +0.2197, +0.1847, -0.9229,
+0.4521, -0.6595, -0.0474, +0.4472, -0.4547, +0.2102, -0.4422,
+0.1127, -0.0290
],
[
-0.2493, -0.1717, +0.8604, -0.7542, -0.4952, -0.4122, +0.3039,
-0.4902, -0.2674, -0.0523, -0.2816, +0.0923, +0.1304, -0.1094,
-1.1880, +0.2816, -0.6017, +0.0202, -0.0796, +0.0304, -0.3512,
+0.6504, -0.2099, -0.2257, +0.2124, +0.7719, -0.0650, -0.0281,
-1.1338, -0.5571, +0.1130, -0.1291, -0.7798, -0.2749, -0.5566,
-0.5531, -0.0331, +0.0040, +0.2884, +0.6961, -0.4286, +0.2904,
-0.7414, -0.6173, +0.2576, -0.0848, -0.2355, -0.4300, -0.0720,
-0.4600, -0.2441, -0.7780, +0.1823, +0.1500, +0.4750, -1.0978,
+0.2718, +0.0556, +0.2923, -0.6138, +0.3705, +0.4232, -0.2120,
+0.7972, -0.1092, -0.3228, +0.0125, +0.0879, -0.3939, -0.4144,
-0.3686, -0.6152, -0.3841, +0.0532, +0.0814, -1.0093, +0.1254,
-0.0881, +0.1689, +0.2619, -0.4825, +0.3082, -0.1236, -0.1613,
-0.0734, +0.2569, +0.5777, -0.5805, -0.2631, +0.2626, -0.4563,
+0.2758, +0.5379, +0.4217, +0.3193, -0.2149, +0.2140, -0.0421,
-0.4922, +0.2960, +0.3118, +0.2100, -0.2256, +0.2403, +0.5520,
-0.4115, +0.1593, -0.9762, -0.2154, -0.1241, -0.1680, -0.1556,
-0.1731, +0.1911, -0.1973, -0.4498, -0.3598, -0.5564, -0.5785,
-0.0956, +0.0273, -0.1774, -0.9415, +0.2808, -0.0721, +0.4881,
-0.1166, +0.6181
],
[
-0.0386, +0.1265, +0.0809, +0.3067, +0.2942, +0.6539, +0.3153,
+0.2100, -0.8135, -1.1951, -0.5475, -0.2480, +0.3113, -0.0350,
-0.1093, -0.0782, -0.0263, -0.0035, +0.5323, -0.0407, -0.1539,
-0.1074, +0.0438, +0.6214, -0.4991, +0.4022, +0.1268, +0.1501,
+0.5942, -0.0779, -0.0857, +0.0398, -0.2412, -0.2143, -0.5008,
+0.0085, -0.1958, +0.6470, +0.6039, -0.3274, -0.3645, +0.0357,
+0.0763, -0.0920, -0.1759, +0.1859, +0.5215, -0.1242, +0.0176,
-0.0861, -0.3552, +0.3534, +0.2396, -0.0521, +0.1086, +0.1892,
-0.3544, +0.1131, +0.2045, -0.1855, -0.0832, -1.3791, -0.3627,
+0.0419, -0.6307, -0.2916, +0.1963, -0.1953, -0.2043, +0.2270,
+0.2018, +0.0349, +0.0691, +0.4119, +0.0290, -0.3374, +0.0907,
-0.7810, -0.2323, +0.1644, +0.0718, +0.0805, +0.2973, +0.3208,
-0.1966, +0.1936, +0.1871, +0.3255, +0.1103, -0.0079, -0.3683,
+0.1853, +0.3867, -0.4835, -0.4310, -0.7948, +0.1171, +0.1882,
+0.4107, -0.2627, -0.0889, +0.1034, +0.2893, +0.0155, +0.0468,
-0.1663, +0.0111, +0.5293, +0.3996, -0.0184, +0.4669, -0.8742,
+0.0247, -0.1326, +0.2207, +0.2451, +0.1919, -0.2266, -0.3340,
+0.3490, -0.1676, +0.0994, -0.7257, +0.3689, -0.1808, +0.0645,
-0.1275, -0.5453
],
[
-0.4692, -0.1094, +0.0659, -0.0332, -0.6763, -0.1042, +0.2339,
+0.7994, -0.0650, -0.2682, -0.3786, -0.0333, +0.1062, +0.4097,
+0.2444, +0.0130, +0.0209, -0.1417, -0.1984, -0.2713, -0.0615,
-1.2190, +0.6678, +0.1132, -0.1875, -0.0167, -0.4101, -0.2796,
-0.0204, -0.1975, +0.5072, -0.0493, -0.0868, -0.1605, -0.1213,
+0.1540, +0.2977, +0.0866, -0.5916, -0.6326, +0.2374, +0.1011,
+0.4999, -0.2778, -0.3732, -0.2621, -0.0390, -0.8054, +0.2065,
+0.4601, +0.2876, +0.6231, -0.5389, -0.0494, -0.4927, -0.0118,
+0.3478, -0.0583, +0.4044, -0.6412, -0.0135, -0.2810, +0.3404,
-0.6886, +0.1874, +0.1197, -0.6534, -1.3675, +0.0585, -0.9163,
-0.0759, +0.1681, -0.2062, +0.1995, +0.3509, +0.1485, -0.1827,
-0.0109, -0.0520, -0.6026, -0.0309, +0.1481, +0.0998, -0.2491,
-0.1177, -0.0872, +0.3857, -0.0211, +0.1284, -0.1741, -0.2777,
-0.3062, -0.3271, +0.4802, +0.1352, -0.4759, -0.2406, +0.6412,
+0.5612, -0.1696, +0.2148, +0.1793, +0.0844, -0.0520, -0.0351,
-0.4822, +0.5715, +0.0096, -0.2875, -0.1981, -0.2082, -0.1329,
-0.3028, -0.3576, -0.7867, -0.0568, +0.4579, -0.2249, -0.1251,
-0.5290, -0.4751, +0.2220, -0.7200, +0.4872, +0.2924, +0.0189,
+0.2746, -0.4167
],
[
-0.2192, +0.0070, +0.6801, +0.0934, -0.7869, -0.0606, +0.2998,
+0.3192, -0.6894, +0.1938, -0.5750, -0.3213, +0.2864, -0.3018,
+0.5830, -0.0907, -0.6795, -0.1959, +0.0498, +0.0393, +0.0955,
+0.1378, -0.1448, -0.3244, +0.1325, -0.4948, -0.1462, +0.2279,
-0.1034, +0.5426, +0.2497, -0.4908, -0.7263, -0.6941, -0.2128,
+0.1560, +0.3640, +0.5246, -0.0872, -1.1948, +0.4199, -0.3246,
+0.5204, -0.2219, +0.5650, +0.1194, -0.7113, -0.7679, +0.0195,
-0.0896, +0.1393, +0.2413, -0.2315, -0.1455, -1.3670, +0.3861,
+0.4419, +0.0639, -0.4954, -0.1562, -0.0227, +0.5195, +0.0172,
+0.2498, -0.2056, +0.1355, -0.4583, -0.3596, +0.3332, +0.4399,
-0.5906, -0.0869, +0.3909, +0.2269, -0.1635, -0.5459, -1.0745,
+0.2837, -0.3212, +0.4923, +0.1431, +0.4467, +0.0149, +0.2936,
-0.2570, +0.3208, -0.3007, +0.3253, -1.0951, -0.2871, +0.2600,
+0.1508, -0.0872, +0.0481, -0.2920, +0.5201, -0.5000, -0.0068,
+0.0013, -0.9657, +0.1424, -0.0475, +0.1101, +0.5505, -0.5671,
+0.3893, -0.3366, +0.3850, -0.1474, +0.0822, +0.7856, -0.3526,
-0.0134, -0.2321, +0.0435, -0.8470, -0.2428, +0.4091, -0.3198,
-0.1435, -0.5942, +0.1096, -0.6466, -0.1988, -0.0292, +0.2447,
+0.0265, +0.0018
],
[
-0.0352, -0.2346, +0.0859, +0.1758, -0.3347, +0.1836, +0.0254,
+0.2248, +0.2209, +0.4463, +0.2712, -0.3749, +0.2835, -0.2375,
-0.3635, +0.2696, +0.1382, -0.3179, -0.1828, -0.0968, -0.2109,
+0.3391, +0.1696, -0.3930, +0.2445, +0.3224, +0.0135, +0.0054,
-0.1534, +0.3513, -0.0069, -0.3339, +0.0513, -0.0552, -0.3873,
-0.7594, -0.5288, +0.2727, +0.5225, -0.5971, -0.0917, -0.0256,
+0.1634, +0.1411, +0.0724, -0.0886, -0.2640, -0.4610, -0.2578,
-0.1688, +0.4008, -0.1871, +0.1569, +0.5000, -0.0814, +0.1857,
+0.3914, +0.2859, -0.6269, -0.0063, -0.3616, -0.4331, -0.2693,
+0.0870, +0.0893, -0.2255, +0.2131, +0.4674, +0.5417, +0.4653,
-0.0902, +0.2695, -0.5260, -0.1611, -0.3307, +0.1241, -0.1217,
-0.1343, +0.3153, +0.0153, +0.1986, -0.0012, +0.1353, +0.0304,
+0.1240, -1.3306, -0.3043, -0.1730, +0.2383, +0.1076, -0.4895,
-0.0328, +0.0427, -0.0550, +0.4644, +0.2662, +0.3317, -0.0916,
-0.2966, -0.8270, -0.0122, -0.1229, -0.1763, -0.2821, +0.1425,
-0.5055, +0.1814, -0.4447, -0.0676, -0.0509, -0.3788, -0.2959,
+0.3250, -1.3882, +0.0385, +0.2324, -0.0204, -0.1502, +0.4036,
+0.1506, +0.5967, -0.8639, +0.0580, -0.8099, +0.3450, -0.1774,
+0.0239, -0.1997
],
[
+0.5684, -0.0216, -0.5538, +0.2260, -0.2572, +0.4158, +0.0583,
-0.2957, -0.3397, +0.2123, -0.4795, -0.6243, -0.0550, -0.1616,
-0.2777, +0.4278, +0.1357, -0.5469, -0.8075, -0.0971, +0.5367,
+0.4817, +0.0394, +0.5311, -0.9895, -0.6751, +0.4951, +0.2023,
+0.3504, -0.1906, -0.0098, -0.3285, -0.3039, -0.0281, -0.1405,
-0.3270, +0.6313, -0.0546, -0.5070, -0.4210, -0.6010, +0.0226,
-1.0871, -0.3306, -0.0799, -0.2131, +0.1067, -0.1444, +0.5036,
+0.0780, -0.2379, +0.1027, +0.0106, +0.0445, +0.4597, -0.1717,
+0.0732, -0.1699, +0.0170, +0.3763, +0.1521, -0.1140, -0.0989,
+0.3150, -0.2521, -0.2231, +0.2822, +0.4532, -0.2959, +0.0680,
+0.0628, +0.5487, -0.4580, -0.2850, -0.8598, +0.2140, +0.0685,
-0.0025, -0.0184, -0.6549, -0.4093, -0.1550, +0.1699, -0.2195,
-0.3559, -0.0794, -0.2342, -0.8754, -0.0657, +0.1804, -0.7524,
-0.0975, +0.3923, -0.0837, -0.2477, +0.3972, -0.1719, -0.1009,
+0.4203, +0.1686, -0.0107, +0.0231, -0.4684, +0.8655, +0.3072,
-0.6401, -0.0229, +0.1848, +0.3838, +0.1357, -0.3682, -0.0991,
-0.0411, -0.3113, +0.0033, +0.5014, +0.6548, -0.5687, -0.8568,
+0.3984, -0.2581, -0.7770, +0.2297, -0.1984, -0.2011, -0.1527,
+0.2363, +0.5539
],
[
-0.1108, +0.0564, +0.4279, -0.2463, -0.0422, +0.0309, +0.4093,
-0.2905, +0.1476, +0.8005, -0.0774, +0.1371, -0.0529, -0.1442,
-0.0326, +0.5159, -0.1651, +0.1366, +0.1476, +0.2852, -0.4420,
-0.3403, -1.1074, +0.2820, -0.4842, -0.2464, -0.9850, +0.2316,
-0.7857, +0.5891, -0.1194, -0.3680, -0.6987, +0.4992, +0.5802,
-0.2089, -2.3145, -0.8983, +0.2000, +0.2328, +0.1504, -0.9694,
-0.0397, +0.5221, +0.2330, -0.5433, -0.5676, -1.0922, +0.3621,
-0.4841, +0.2218, -0.1468, -0.2117, -0.1497, -0.0331, -0.4497,
-0.3092, -0.4888, +0.4701, -0.6076, +0.2927, +0.2463, +0.0962,
+0.4161, +0.2268, -0.1202, +0.7343, +0.3379, +0.3461, -0.0830,
+0.0510, -0.1624, -0.2082, +0.5592, -0.1132, -0.0248, -0.3746,
+0.2488, -0.2618, -0.7138, -0.8501, -0.6333, +0.1322, -0.7100,
+0.1321, -0.7706, +0.2088, +0.0503, -0.0784, +0.2525, -1.4688,
+0.1988, -0.1214, -0.7245, +0.0321, +0.0732, -0.0386, +0.0683,
+0.3169, +0.1711, +0.1284, -0.3320, +0.0664, -0.2360, +0.2742,
+0.0370, +0.1123, +0.3008, +0.0063, -0.9690, -0.1391, -1.2992,
+0.2320, -0.8184, -0.0203, -1.3783, +0.1210, -0.0003, -0.5927,
+0.1050, -1.0701, -0.0294, +0.4847, -0.3004, +0.3503, -0.3461,
-0.0561, +0.2380
],
[
+0.3711, -0.5122, +0.3078, +0.0482, +0.6545, -0.8755, +0.6540,
+0.0361, -0.1457, +1.1641, +0.0596, -0.1890, +0.3317, +0.1001,
-0.2053, -0.4404, -1.2854, -1.4911, +0.2391, -1.8613, -0.0210,
+0.3602, -1.2691, -0.5337, -0.3914, +0.1565, -0.1174, -0.0831,
-0.4771, -0.0826, -0.0608, -0.3441, -0.1007, -0.8418, -0.5199,
-0.1941, -2.3056, +0.0221, +0.4960, -0.5980, -1.6187, -0.4539,
-0.0842, -1.0271, +0.5722, +0.1285, +0.0500, +0.3821, +0.2063,
+0.1320, -0.0600, -0.1402, -2.6885, +0.1343, +0.3053, -0.8118,
+0.2949, -0.2179, +0.0871, -0.2730, +0.5062, -1.1280, +0.0665,
-1.2273, -0.5753, -0.4810, +0.1590, +0.6403, -0.8059, +0.4987,
-1.2487, +0.1267, -0.1181, -1.5417, -0.2113, +0.0210, +0.4132,
-1.4785, -0.5953, +0.2741, +0.6321, +0.4405, -0.3417, +0.1373,
-0.2862, -0.5855, +1.0328, +0.2625, -0.2791, -0.8597, -0.3383,
+1.1336, -0.7141, +0.0972, -1.1505, -0.1554, +0.4601, -0.3757,
-0.1693, -0.5801, -0.1438, -1.8935, -0.0646, -0.5465, -0.4190,
-0.0872, +0.1621, -1.1848, -0.4613, -1.3268, +0.1992, -0.3329,
+0.3877, +0.4914, -0.2977, +0.0102, -0.0888, -1.0388, -0.3733,
-1.0903, -0.2616, +0.4798, -0.0164, -1.3258, -0.2859, -0.7703,
-0.0513, +0.1921
],
[
-0.4288, +0.4251, -0.8418, +0.2533, -0.2653, -0.3997, -1.4284,
+0.1745, -0.1872, -0.2045, +0.1453, +0.0717, -0.1374, +0.3480,
-0.1474, -0.0239, +0.3027, +0.1502, -0.0728, +0.1668, -0.1015,
-0.2752, -0.8042, -0.9138, +0.2101, -0.4770, +0.2333, -0.7844,
+0.1624, -0.2283, +0.4549, -0.1719, -0.6743, -0.2464, -0.0733,
+0.1715, -0.3638, -2.5636, +0.0812, -0.1348, +0.2581, +0.1219,
-0.0507, -0.4468, +0.7635, -0.3612, +0.3942, +0.2470, +0.2363,
+0.2047, -0.4177, +0.2005, -0.3347, -0.3090, -0.0926, -0.1653,
+0.1673, -0.7612, +0.5780, +0.0328, -0.2601, -0.0094, +0.2529,
+0.3364, -0.2499, +0.1589, +0.3585, -0.0452, -0.3698, +0.2585,
-0.5148, -0.0327, -0.3892, -0.6538, +0.0538, +0.0272, -0.0580,
+0.0795, -0.0597, -0.5941, +0.0464, -0.1051, -0.2354, -0.4521,
-0.2243, -0.2938, +0.0795, +0.0080, -0.2831, +0.0473, +0.2404,
+0.3361, +0.0059, +0.5983, +0.2747, +0.7289, +0.2341, -0.4635,
-0.4206, +0.4859, +0.0844, -0.9279, +0.4085, +0.2349, -0.0769,
+0.1156, -1.2277, +0.1506, -0.5583, +0.2217, -0.0559, +0.6840,
+0.1522, -0.2177, -0.2074, -1.2565, +0.4764, -0.2736, -0.8419,
+0.4239, -0.8196, -0.1434, +0.1341, -1.6368, -0.1080, -0.1508,
+0.3498, +0.0597
],
[
-0.3368, +0.2092, +0.5426, +0.2464, +0.4091, +0.1462, -0.2139,
-0.0042, +0.2487, -0.3703, +0.6684, +0.2895, -0.2406, -0.0249,
+0.1861, +0.8212, -0.3994, +0.2464, +0.3857, -0.2585, +0.5531,
-0.4998, -0.3836, +0.2367, +0.3707, +0.1389, -0.5142, +0.1372,
+0.4955, -0.4030, -0.2420, +0.0510, +0.0492, +0.1571, +0.0938,
+0.3084, -0.8010, -1.0584, +0.4347, -0.2451, -0.2941, -0.6722,
+0.1788, -0.1841, +0.1940, -0.1804, +0.1675, +0.4239, -0.0414,
-0.1289, +0.1509, -0.0777, +0.1962, +0.0061, +0.5778, -0.5213,
-1.1370, -0.3478, +0.7225, -0.5497, +0.1487, +0.5139, -0.1376,
-0.4082, -0.6474, +0.2995, +0.2718, -1.0549, -0.5648, +0.2498,
-0.0656, -0.6016, -0.0853, -0.0022, -0.9858, +0.5829, -0.2664,
-1.0040, -0.1400, -0.9499, -0.3189, -0.9519, +0.2420, -0.4990,
-0.1585, -0.2386, -0.3771, -0.4614, +0.3374, +0.9149, -0.1293,
+0.1826, +0.1705, -0.5659, +0.0785, -0.2180, +0.3528, -0.4823,
+0.4564, +0.4903, -0.4538, +0.4069, -0.1866, +0.0438, +0.0104,
+0.0877, -0.0139, +0.0428, +0.7354, -0.0088, -0.1079, -0.3406,
+0.0882, -0.2470, -0.3901, +0.0013, +0.3204, +0.0920, +0.0433,
-0.0745, -1.8448, +0.1017, -0.2950, -0.3807, -0.4693, -0.1578,
-0.0062, -0.9509
],
[
+0.0735, +0.1321, +0.3943, -0.1060, -0.2087, +0.0374, +0.2031,
+0.3275, -0.2623, -0.4766, -0.5651, -0.3270, +0.5977, -0.3862,
-1.0126, +0.8957, -0.4264, -0.2896, +0.3246, -0.1566, +0.2141,
-1.0416, +0.0665, +0.3242, -0.6463, +0.2869, -0.2468, +0.5867,
-0.3243, -0.6057, +0.1175, -0.3758, -0.8298, +0.2636, -0.3871,
+0.1890, +0.0788, -0.0283, -0.2311, -0.8094, -0.1637, -0.0869,
-0.6365, -0.0403, -0.0636, +0.4252, +0.0832, -0.4041, +0.2266,
+0.5261, -0.2208, +0.1190, +0.3236, -0.3321, +0.6679, +0.1548,
-0.2739, -0.2599, +0.8133, -0.5076, -0.2133, +0.3947, +0.2446,
+0.1589, -1.2893, -0.3493, +0.5011, -0.3098, -0.0777, -0.1097,
+0.1047, -0.5573, +0.4166, -0.0184, +0.0405, -0.1612, -0.0876,
+0.5153, +0.2051, +0.4426, +0.0784, +0.1342, +0.3865, -0.2819,
+0.2431, -1.7776, -0.0231, -0.2412, -0.1342, -0.4856, +0.2141,
+0.7703, +0.2504, +0.1393, -0.0915, +0.1368, -0.0667, +0.0242,
+0.3334, +0.0918, -0.2404, +0.3281, -0.4851, +0.2643, +0.1716,
+0.2915, -0.6321, -1.5506, +0.4295, +0.1463, +0.9801, -0.1069,
+0.4641, -0.1582, +0.0235, -0.0070, -0.3506, -0.1147, -0.1624,
+0.3232, -0.2713, +0.0645, -0.0997, +0.1111, -0.0303, -0.0156,
-0.3195, +0.1981
],
[
-0.3898, +0.4090, -0.2097, +0.1600, +0.9530, -0.0467, -0.5210,
-0.6534, +0.2393, -0.5750, +0.0579, +0.0129, +0.0012, -0.3364,
-0.6190, +0.4100, +0.4866, -0.1644, +0.3908, -0.2042, +0.6471,
+0.3077, -0.4285, -0.1191, -0.3428, +0.3255, +0.1868, +0.3541,
-0.3422, +0.0310, +0.0242, +0.3750, -0.6444, -0.1468, +0.5030,
+0.3276, +0.3054, -0.0126, -0.5548, +0.3717, +0.1292, +0.1356,
+0.0733, -1.4465, -0.2176, -0.1912, -0.4031, -0.1692, -0.8686,
+0.2757, -0.1966, +0.3493, -0.6788, -0.3001, -0.7071, +0.1474,
+0.1610, -0.5100, +0.1408, +0.0447, -1.2487, -1.2027, +0.5734,
-0.8365, +0.8718, -0.0544, +0.3266, -1.3697, +0.1424, +0.2102,
-0.4280, +0.2247, -0.2800, +0.4358, +0.2002, +0.3223, +0.0825,
+0.2227, +0.2776, -0.7352, +0.0881, +0.1030, +0.1458, -0.9093,
-0.4533, +0.3908, +0.2786, +0.0234, +0.0198, +0.1295, +0.1039,
-0.1075, -0.7748, +0.2882, -0.7246, +0.6064, +0.4672, -0.3293,
-0.8341, +0.4853, +0.1837, -0.0405, -0.2606, +0.8020, +0.5313,
+0.0470, -0.1615, +0.3864, -0.5154, +0.3749, -0.2012, -0.2818,
+0.4120, -0.3524, -0.3061, -0.0957, -0.2543, +0.1877, -0.9297,
-0.3793, -0.4868, +0.0211, -0.2999, +0.4195, +0.3744, -0.4142,
-0.1698, +0.4015
],
[
-0.0032, -0.1157, -0.7626, -0.0068, -0.1563, -0.2504, +0.2543,
+0.2672, -0.6115, -0.1193, -0.0808, +0.1192, -0.2597, +0.1511,
+0.3637, -0.3866, +0.5929, +0.4882, -0.2581, +0.1825, -0.4854,
-0.0445, +0.2782, +0.1469, -0.1412, -0.3135, -0.6042, -0.1404,
-0.0105, +0.0360, +0.3121, -0.8642, +0.6442, +0.7403, -0.1205,
-0.5401, +0.0028, -0.0572, +0.2258, -0.0124, +0.0772, +0.0219,
+0.3767, +0.4778, +0.2339, -0.0446, -0.1991, +0.4794, +0.1058,
-0.2104, -0.4589, +0.0639, +0.5277, -0.6294, -0.3231, +0.3277,
-0.0388, +0.1474, -0.5446, +0.3626, +0.7244, -0.2283, -1.0641,
-0.1651, +0.3294, -0.3860, +0.3972, +0.5262, +0.2624, +0.8364,
+0.0352, -0.0869, +0.0968, -0.2432, +0.3734, +0.2818, +0.1536,
-0.0855, +0.7703, -0.2547, -0.0042, +0.4066, +0.3864, -0.0546,
-0.1226, +0.4063, +0.0661, -0.0131, -0.9015, -0.3502, -0.3724,
+0.2265, -0.3302, -0.4852, +0.0126, +0.4551, -0.9050, +0.4911,
-0.2053, -0.0778, +0.1277, -0.5993, -0.0439, -0.0805, -0.2957,
+0.0837, -0.2367, -0.2302, -0.3742, +0.2723, -0.4829, -0.6283,
-0.0074, -0.1597, +0.7585, -0.1683, +0.1012, -0.5039, +0.1081,
+0.4095, -0.0830, +0.2724, -0.1499, +0.1271, -0.1143, +0.4757,
+0.7004, -0.0638
],
[
+0.5604, +0.1489, +0.7939, +0.0495, -1.6573, -0.0948, +0.0507,
-0.2181, -0.8301, +0.2946, -0.8348, +0.0103, -0.0534, -1.9317,
+0.3094, +0.4461, -0.5899, +0.0193, +0.8027, +0.2224, +0.3142,
-0.0909, +0.1901, +0.3415, -0.7243, +0.1823, +0.1726, +0.4332,
-0.1833, +0.0276, +0.4527, -0.4807, -0.2075, +0.1518, +0.4846,
+0.0998, +0.0544, -0.0686, -0.2238, -0.6958, -0.3039, -0.0180,
-0.8097, -0.0506, -0.0706, +0.0831, +0.1330, -0.4893, +0.3243,
+0.3210, +0.1427, -0.1627, +0.1470, -0.4862, -0.2567, +0.3008,
+0.0347, -0.1611, -0.7610, -0.2009, +0.1149, +0.2351, +0.4647,
-0.5399, -0.3643, +0.2018, -0.0247, +0.0455, +0.0261, +0.2084,
+0.2071, -0.0061, +0.5638, +0.4168, +0.1248, -0.0494, -0.5499,
+0.1191, -0.3897, -0.8652, -0.0281, -0.0855, -0.0485, -0.0882,
-0.3547, -0.2732, +0.5744, +0.3363, +0.2781, +0.1670, +0.2282,
+0.1124, -0.0644, +0.2288, -0.0276, -0.4266, +0.1707, -0.0295,
+0.0060, -0.0297, +0.0298, -0.0614, +0.0064, -0.0780, -0.1962,
-0.3330, +0.1126, -0.1496, -1.5588, -0.1446, +1.0634, +0.0322,
+0.0421, +0.2496, +0.0028, -0.3989, +0.2559, -1.1919, +0.4706,
+0.1022, +0.6683, -0.5075, +0.3810, +0.0101, -0.0736, +0.3099,
-0.1884, -0.0315
],
[
-0.1955, -0.5094, +0.1636, -0.1839, -0.8500, +0.0869, -0.3795,
-0.0824, +0.1572, +0.5188, +0.1795, -0.1984, +0.1677, +0.3621,
+0.1295, -0.3444, +0.6014, +0.1478, -0.0711, -0.2961, +0.0910,
+0.1848, +0.2503, -0.2125, -0.3327, +0.2299, -0.5543, -0.7245,
-0.2514, +0.3123, +0.2954, +0.0990, +0.2281, -0.0702, +0.2690,
-0.2279, -0.3561, -0.2342, -0.2415, +0.2229, +0.4394, -0.1579,
+0.2719, -0.1974, -0.0336, +0.2625, -0.5710, -0.0972, -0.3365,
+0.0324, +0.3265, +0.1474, -0.3497, +0.1704, -0.0507, -0.0087,
-0.8585, -0.0940, -0.6150, +0.2710, +0.6073, -0.6070, -0.2289,
-0.5282, +0.3340, -0.3444, -0.0644, -0.8340, -0.3540, -0.1502,
+0.2084, -0.3327, -0.0523, +0.1642, -0.7461, -0.1801, -1.0584,
-0.4991, -0.3224, +0.1219, -1.1949, -0.5131, -0.0919, -0.5761,
+0.0495, -0.2821, +0.1686, +0.0202, +0.2129, +0.2013, -0.9683,
-0.1908, -0.1073, +0.3261, +0.1326, -0.1326, -0.5122, -0.1264,
+0.0474, +0.0922, -0.2146, +0.1663, +0.1091, +0.0349, +0.1768,
+0.1695, -0.1796, +0.3195, -0.3877, -0.2224, -0.2449, +0.3745,
+0.1159, -1.2097, -0.0515, -0.3228, +0.2604, +0.2458, -0.5800,
-0.3306, -0.6286, -0.3607, -0.4681, -0.8831, -0.2818, -0.2989,
-0.3671, -1.1921
],
[
+0.1324, -0.4846, +0.3543, -0.4279, +0.0218, +0.2834, +0.1271,
-0.2439, -0.5697, +1.1672, +0.1293, -0.1005, +0.3387, -0.7602,
-0.4949, -0.1251, +0.2813, -0.1402, +0.0073, +0.4846, -2.0272,
-0.0837, -0.1081, -0.1338, -0.0291, -0.0520, +0.1935, +0.1562,
-0.4019, -0.2690, +0.1373, -0.2031, +0.1697, -0.0621, +0.1849,
-0.8227, +0.3108, +0.0454, +0.2020, -0.3821, -0.2381, -0.1363,
+0.0512, -0.5040, +0.1104, -0.3400, -0.5262, +0.5317, -0.4527,
-0.8015, -0.0089, +0.0263, +0.1689, +0.0380, -0.1424, +0.3908,
-0.0995, -0.1746, +0.4555, +0.2764, +0.2897, -0.1100, -0.4770,
-1.0901, +0.3817, +0.3746, -0.6783, +0.2379, +0.9173, -0.1580,
+0.1852, +0.6157, -0.1961, +0.1422, -0.9707, -0.4440, -0.0112,
-0.3850, -0.2027, -0.0332, -0.7040, -0.4630, +0.6232, -0.4129,
-0.3921, +0.2035, -0.0832, +0.5634, +0.0469, +0.0092, -0.1565,
-0.0481, -0.2951, -0.0111, -1.7074, +0.1362, +0.1502, +0.2415,
+0.0590, -0.0583, +0.3007, +0.2916, -1.0366, -0.1694, -0.5418,
-1.1429, +0.1295, +0.1362, +0.0207, +0.0179, -0.5855, -0.1739,
-1.1501, -0.1732, +0.6585, +0.3460, -0.4667, +0.0654, -0.3989,
-0.0483, -0.3596, -0.4865, -0.1731, +0.3521, +0.3019, -1.1546,
+0.1897, -0.0856
],
[
+0.0735, -0.0567, -0.8032, -0.4114, -0.6058, -0.2193, +0.6009,
+0.6099, +0.1714, +0.1384, +0.1317, +0.5786, +0.1662, -0.1301,
+0.6285, +0.2156, +0.0117, -0.5567, +0.0561, -1.0620, -0.1751,
+0.1477, +0.3508, +0.0661, -0.0849, +0.2782, -0.0713, -0.0370,
-0.3821, +0.1062, -0.0948, +0.0361, +0.3888, +0.4089, -0.4191,
-1.8165, -0.2541, -0.0063, -0.5197, +0.4507, +0.7076, +0.2521,
-0.1259, -0.1574, +0.0586, +0.3146, +0.1439, -0.4246, +0.4805,
-0.1064, +0.3108, -0.1860, -0.0510, -1.1670, +0.3645, +0.0356,
-0.2300, +0.2261, -0.4346, +0.3613, -0.8356, -0.4161, +0.0780,
+0.1070, +0.1950, -0.8652, -0.1801, -0.2838, -0.5130, -0.7402,
-0.4372, -0.3082, -0.1407, +0.0451, +0.4333, +0.5375, -1.4056,
-0.1902, -0.0910, -0.1582, -0.1891, -0.2806, -0.6402, -0.4763,
+0.2221, -0.2245, -0.5862, +0.2639, +0.3936, -0.3819, +0.3230,
-0.8123, -0.5992, -0.1648, +0.8798, +0.2127, -0.5322, -1.0589,
-1.3485, -0.6906, -0.0448, -1.2024, +0.1072, +0.2755, -0.5748,
+0.0877, +0.2745, +0.0640, +0.2661, -0.3139, +0.2355, +0.4331,
+0.3440, +0.4860, -0.3379, +0.5891, +0.2155, -0.3365, +0.3805,
+0.2537, -0.6251, -0.0326, +0.1532, -0.0125, +0.1749, -0.3645,
-0.9793, +0.0434
],
[
-1.1930, +0.5646, -0.9985, -0.2640, -0.1287, -1.0951, +0.0648,
+0.2532, -0.4144, -0.4100, -0.0835, +0.2912, -1.2506, +0.1132,
-1.2571, -0.0902, +0.6812, -0.3145, +0.0854, -0.5571, +0.0128,
+0.2900, -0.5539, +0.0010, -0.4862, +0.2361, -1.2377, -0.6221,
+0.1594, +0.1456, -0.0369, -0.6333, +0.1119, -0.1009, -0.0694,
+0.1564, +0.5584, -0.0073, -0.3532, +0.1511, -0.2448, +0.3191,
+0.0510, -0.7811, +0.0116, -1.3001, +0.0963, -0.2932, +0.2152,
+0.6290, -0.3466, -0.2318, -0.4833, -0.9795, +0.1221, -0.1789,
-0.4434, +0.3625, +0.1297, -0.2875, +0.1629, +0.2148, +0.3416,
+0.3896, +0.4758, -0.2505, +0.0288, +0.5394, +0.1399, -0.0757,
+0.1861, -0.4772, -0.2275, -0.4032, +0.2681, -0.2982, +1.1032,
-0.2331, -0.1047, +0.3253, -0.1822, -0.1980, +0.0089, +0.3273,
+0.0098, -1.1442, +0.0552, -0.8381, -0.4844, -1.0339, -0.0695,
+0.3731, +0.3991, -0.7210, -0.7972, +0.0382, -0.1339, -0.0447,
+0.0631, -0.0759, +0.3365, +0.2942, -0.5237, +0.1108, +0.2281,
+0.4007, -0.1869, -0.0529, -1.0671, -0.0356, -0.7960, +0.0457,
+0.2884, -1.2189, -0.0712, +0.6715, -0.1247, -0.5548, +0.0852,
+0.2519, +0.1606, +0.1365, +0.4946, -0.6133, +0.0761, -1.3449,
+0.5933, -1.1779
],
[
-0.2804, -0.5471, +0.0551, -0.9886, -0.3381, +0.0967, -0.1828,
+0.3549, +0.8853, -0.4441, -0.3885, -0.3878, -0.1378, -1.2579,
-0.5911, -0.0165, +0.4075, -0.2342, +0.1255, -0.4637, -1.2356,
+0.0905, +0.5726, -1.2384, +0.3039, -0.1166, +0.9449, -0.1286,
-0.1615, -0.6373, -0.6342, -0.7324, -0.4217, -0.2331, -0.1038,
-1.9344, +0.2348, -0.1012, +0.1137, -0.0832, -0.7858, -0.3082,
-0.1527, +0.0916, +0.0603, -0.5602, -0.3892, +0.4406, +0.0072,
+0.5934, -0.2114, -0.5871, +0.7619, +0.4133, -0.8720, -0.3605,
+0.0495, -0.0443, +0.0407, +0.0590, +0.0571, -0.9888, +0.5016,
+0.0917, -1.0617, -0.1703, +0.3349, -0.3760, -0.4340, +0.6685,
+0.3912, +0.2040, -0.1057, -0.1511, +0.2123, +0.4670, +0.1919,
-0.3246, -0.4883, -0.0083, -0.8998, -0.7696, +0.5723, +0.2926,
+0.2666, -0.2535, +0.2142, +0.2284, +0.8592, +0.1695, +0.1796,
+0.3706, +0.5187, +1.1045, -0.3000, +0.3547, -0.7726, +0.2280,
-1.3668, +1.2893, -0.2239, -0.2915, -0.9778, -0.0967, -0.2130,
+0.3331, +0.1535, +0.3457, -0.6926, +0.8179, -0.4072, +1.5560,
+0.2476, -0.0502, +0.5784, +0.6010, +0.2252, -1.1767, +0.0105,
-0.9442, -0.8745, +0.1999, -1.4982, +0.2176, -0.0420, -0.7675,
+0.0954, +0.8451
],
[
-0.4607, +0.9224, -0.0666, +0.2874, +0.0829, -0.2010, -0.0345,
-0.9402, +0.4678, -0.1414, -0.6691, -0.3298, -0.8264, -0.1855,
+0.1957, +0.0315, +0.3370, -0.0251, +0.2687, +0.2016, -0.8409,
+0.4957, -0.0033, -0.3533, -0.0471, +0.1057, +0.4238, -0.7189,
+0.2092, -0.5275, -0.1519, +0.8259, +0.1368, -0.0003, +0.6165,
+0.0160, +0.3227, -0.0074, -0.4793, -0.4458, -0.6210, +0.1793,
+0.0263, +0.6392, +0.0782, +0.1148, +0.1563, +0.6285, -0.0923,
-0.1974, -0.1243, -0.0663, -1.0275, +0.1073, -1.0154, -0.3729,
-0.0186, +0.1367, +0.3499, -0.1006, +0.3770, -0.4031, -0.6277,
-0.2577, -0.5225, -0.4877, +0.0720, -0.5231, -0.5414, +0.0301,
+0.1047, -0.2830, +0.5737, -0.2476, -0.5488, -0.5124, -0.1490,
+0.1823, +0.2839, -0.1406, -0.5689, -0.3808, -0.1889, -0.4013,
-0.4741, -0.3185, -0.1529, -0.7650, -0.2723, -0.5697, +0.4438,
-0.1550, +0.3837, +0.1311, -0.5736, +0.1836, -0.1597, +0.1093,
-0.4329, -0.6388, -0.4136, -0.0996, -0.5276, +0.1312, -0.1520,
+0.0417, -0.1222, +0.1583, -0.2679, -0.7778, -1.3841, +0.0280,
-0.4868, -0.3024, -0.0940, +0.2621, +0.1358, +0.3894, +0.1022,
-0.8691, -0.2127, +0.3902, -0.8880, +0.2733, -0.3573, -0.1029,
-0.2447, +0.1657
],
[
+0.1771, -1.0649, +0.1922, -0.4930, +0.2546, -1.6105, -0.1822,
+0.4909, -1.8516, -1.1523, -0.4807, +0.1944, -0.5609, -0.5628,
+0.0320, -0.8651, +0.5125, -0.1112, -0.7384, +0.4523, +0.0384,
+0.2800, +0.1029, -0.3131, +0.2663, +0.0665, +0.3445, +0.3239,
-2.2262, -0.7737, -0.4730, +0.1132, -0.2555, -0.4740, -0.8098,
-0.5930, +0.1079, -0.0022, -0.0244, +0.0345, -0.4886, -0.1646,
-0.7892, +0.2974, -0.0426, -0.3042, -0.9469, -0.8653, -0.2788,
-0.3556, -0.5344, +0.1168, -0.8683, +0.1483, +0.5287, +0.1890,
+0.0854, +0.0945, -1.1579, -0.1664, -0.0890, +0.1049, -0.7604,
-0.3063, +0.7497, -0.9224, +0.0295, -1.3473, -0.8716, +0.4510,
-0.4206, -0.0661, -0.6026, -0.0489, +0.1988, -0.9539, -0.0979,
+0.1267, +0.1407, -2.0032, +0.2541, +0.4513, +0.2627, -0.4517,
-0.1051, -0.5941, -0.5248, +0.0565, -0.0243, -0.9744, +0.5845,
+0.0379, +0.4668, -0.3622, +0.3820, +0.4215, -0.6691, +0.0417,
-1.6526, -0.4631, -0.4563, -0.2545, -0.2057, -0.0443, +0.4281,
-0.9920, -0.5481, -0.8264, +0.6692, +0.0857, -1.2917, +0.1586,
+0.0600, -1.4808, -0.1486, -0.1216, -1.2193, +0.4264, -0.8464,
-0.6857, -0.0517, +0.0287, -0.1867, -0.6775, -0.3675, +0.5128,
-1.5418, +0.2499
],
[
-0.1612, -0.4239, -0.3609, -0.4407, -1.8796, +0.1811, +0.1288,
-0.0542, -0.9194, +0.3582, +0.1502, +0.1751, -0.0533, -0.0531,
-0.0382, -1.2325, -1.2668, +0.6905, -0.0798, +0.1129, -0.3606,
-0.5942, +0.2636, -0.1080, -0.2369, -0.0980, -0.3586, +0.0497,
-0.6267, -0.2695, +0.7429, -0.0488, +0.5714, -0.6892, -0.8693,
+0.1249, -0.7356, -0.1495, +0.1155, +0.4590, +0.5223, -0.5127,
+0.1505, +0.2809, +0.2153, +0.7752, -0.1908, -0.6606, +0.2257,
+0.0174, +0.0978, -0.5091, -0.6323, +0.1803, -0.3168, -0.3116,
+0.0962, -0.0341, -0.0762, +0.6551, -0.3391, +0.4467, +0.3393,
-0.4190, +0.5773, -0.4536, -0.6370, +0.0585, +0.0999, -0.1559,
+0.5472, -1.2403, -0.1902, -0.0791, -0.3356, -0.0231, -0.3575,
-0.0715, +0.3344, +0.1620, -0.4599, -1.0161, -0.0755, +0.3601,
-0.6676, +0.2324, -0.4592, -0.2415, -0.4137, +0.0879, -0.0214,
-0.6427, -0.7533, -0.3647, -1.3776, -0.3563, +0.1488, +0.5039,
-0.7781, -0.0703, -0.6032, +0.0771, -0.1891, -0.5058, +0.1018,
+0.4503, -0.7568, -0.2828, -0.4956, +0.4676, -1.1091, +0.5466,
-0.5813, -0.6780, +0.3361, -0.6156, +0.2207, +0.3111, +0.0996,
-0.6625, +0.3951, -0.6766, +0.1181, +0.9436, +0.0373, -0.3188,
-0.2978, -0.5654
],
[
+0.0668, -0.3408, -0.1976, +0.4520, -0.7920, -0.0620, -0.5180,
+0.3705, +0.3587, -0.0448, -0.0541, -0.4324, -0.2409, +0.2633,
-0.1210, -0.5174, -0.0161, -0.2320, -0.5553, -0.0265, -0.0198,
+0.0403, -0.1374, -0.2721, -0.4634, -0.1764, -0.4010, -0.8389,
+0.0032, -0.2553, -0.0130, -0.3559, -0.3330, +0.4345, +0.0551,
-0.4100, +0.2560, +0.1938, -0.1803, -0.6254, +0.4951, -0.6375,
-0.0450, +0.2914, +0.2987, -0.4416, -0.6367, -0.0493, -0.2898,
-0.2539, -0.5301, -0.2232, -0.4586, -0.2803, +0.3026, +0.2851,
-0.3451, +0.3517, -0.6863, -0.1376, -0.6062, -0.7945, -0.0853,
-0.1367, +0.3939, +0.0591, +0.1730, +0.6343, +0.5698, +0.1005,
-0.3372, +0.3566, +0.2433, -0.4498, +0.1536, -0.3045, -0.4275,
+0.1514, +0.1296, -1.0830, -0.2265, +0.7224, -0.0265, -0.0444,
-0.4194, -1.2595, +0.0165, +0.3162, -0.3852, -0.3603, -0.3801,
+0.0952, -0.0012, +0.0985, -0.5196, +0.0584, +0.2641, -0.0625,
+0.1562, -0.6879, -0.5486, -0.6414, -0.3643, +0.0262, +0.0134,
-0.5337, -0.3427, -0.6458, -0.4454, +0.2579, -0.3257, -0.6284,
+0.2397, -0.2731, -0.0075, +0.1768, +0.5291, -0.1539, +0.1083,
+0.1832, -1.1263, +0.0974, +0.3043, -1.4778, -0.1009, +0.0417,
-0.5630, +0.4801
],
[
-0.6454, -0.7756, -0.4977, -0.8220, -1.1194, +0.0238, -0.4573,
-0.0432, +0.6942, -0.1904, -0.0429, -0.5213, -0.3837, -0.1483,
+0.1033, +0.3135, +0.4299, -0.1421, -0.6400, -0.0736, -0.4123,
+0.0786, -0.0591, +0.0877, -0.2312, +0.4058, +0.0759, -0.5178,
-0.7740, -0.4119, -0.2352, -0.6448, -0.2426, +0.6444, -0.0683,
-0.9792, -1.3938, -1.5029, -0.7095, -0.2758, +0.0554, +0.1728,
-0.1377, -0.2068, -0.3909, -0.4126, +0.5882, -0.2119, -0.2786,
+0.3562, -1.0845, -0.5862, +0.0659, -1.8884, -0.0195, -0.0630,
-0.0171, +0.3654, +0.2821, -1.0086, +0.3942, -0.0654, +0.1168,
+0.4569, +0.3138, -0.1170, +0.3250, -0.5120, -0.6833, -0.9898,
-0.2593, -0.2547, -0.4772, -0.4333, +0.0239, -0.1653, +0.2902,
-0.1610, +0.1376, +0.1103, -0.3598, -1.0606, -0.3341, -0.1726,
+0.2773, +0.5422, -0.1895, -0.4480, -0.2850, -0.3535, -0.6584,
+0.2745, +0.0285, -0.2066, +0.4448, -0.9888, -0.1846, -0.1153,
-0.1576, -0.1381, +0.3401, -0.7143, -0.3209, -0.0279, -0.3679,
+0.2760, -0.4931, -0.3817, +0.7612, +0.2598, -0.0127, -0.0994,
+0.4239, -0.8268, -0.5012, +0.2769, -0.6953, +0.1870, +0.2750,
-0.8051, +0.2195, +0.6554, -0.9753, -2.0387, -0.1231, -0.4834,
-0.4421, +0.5339
],
[
+0.4335, +0.1656, -0.9974, -0.1176, -0.3960, -0.0638, +0.2079,
+0.1231, -0.0854, -0.0827, +0.2503, -0.2978, -0.3204, +0.4953,
+0.1134, +0.5835, +0.3386, -0.1330, +0.2271, -0.2615, -0.2089,
-0.0507, -0.2240, +0.1935, +0.3242, -0.1012, -0.1488, -0.0782,
-1.1263, +0.1241, +0.3369, -0.3031, +0.1686, -0.3965, -0.3629,
+0.3934, +0.2049, -0.2970, +0.2818, -0.1164, -0.0679, -0.4821,
-0.1474, +0.3424, -0.5797, +0.1040, -0.3827, +0.0046, +0.3052,
-0.3735, +0.0795, +0.1228, -0.1774, -0.5957, -0.1294, +0.3470,
+0.2718, +0.0017, +0.2303, +0.1612, +0.2331, +0.1641, -0.1593,
-0.0877, -0.5581, +0.2669, -0.6158, +0.3496, +0.2728, -0.0912,
-0.5599, -0.1258, -0.0708, +0.0594, +0.1851, -0.1625, -0.1608,
+0.3045, +0.0633, -0.8487, +0.5131, +0.1964, +0.0798, -0.1822,
-0.1556, -0.2373, -0.0818, -0.1272, -0.7765, +0.0968, -1.2447,
-0.3894, +0.2402, +0.2558, -0.0405, -0.3095, +0.2425, +0.0121,
+0.2948, -0.7793, +0.2155, +0.3195, +0.0075, -0.6360, -0.2475,
+0.1176, +0.2035, -0.5655, -0.3994, -0.6486, -0.3726, -0.4562,
-0.7920, +0.9645, -0.3052, +0.5184, +0.2414, -0.7075, -0.0663,
+0.1578, +0.8090, +0.1994, +0.2978, +0.1654, +0.0228, +0.1505,
+0.1746, -0.3233
],
[
+0.3193, +0.0939, -0.2118, +0.6163, -0.0063, -1.3777, +0.0669,
-0.1254, +0.4310, +0.0861, -0.1759, +0.2185, -0.1819, -0.3422,
+0.1034, +0.2122, +0.1399, +0.1688, -0.4949, +0.0118, +0.6199,
+0.0669, +0.1229, +0.3465, +0.1135, -0.1835, +0.0139, +0.0407,
+0.3231, +0.2393, -0.0652, +0.6749, +0.0268, +0.1387, -0.2595,
+0.2993, -0.0990, -0.4262, -0.2514, +0.3848, -0.2463, -0.1556,
+0.1515, +0.1848, -0.4442, -0.2183, -1.3425, -0.2220, +0.4470,
+0.0889, -0.1207, -0.0777, +0.5244, -0.2950, -0.1844, -0.0176,
-0.0992, +0.0521, +0.3979, -0.9022, -0.0738, +0.5636, +0.2176,
+0.0760, +0.2633, -0.0544, +0.1360, -0.0387, -0.1706, -0.3108,
-0.8670, -0.3537, -0.3533, -0.6687, -0.2000, -0.3473, +0.1711,
+0.4867, +0.1795, -0.3960, +0.1990, -0.1117, -0.2524, +0.4683,
+0.7174, +0.1119, +0.5589, -1.3097, +0.8495, +0.1438, -0.4314,
-0.3728, -0.6224, -0.3955, +0.7276, +0.7996, -0.4632, -0.5398,
+0.3639, +0.3985, -0.0895, -0.3919, -0.2611, +0.1941, -0.0824,
+0.0592, +0.8058, -0.4699, -0.3778, -0.2650, +0.2874, -0.3255,
-0.2280, -0.1563, +0.4553, -0.1038, +0.7102, +0.2857, -0.6250,
-0.5684, +0.1135, -0.0182, +0.0845, -0.0283, -0.3868, -0.5412,
-0.2434, -0.1659
],
[
-0.0464, +0.5243, -0.4781, +0.2690, +0.1849, -0.3429, +0.3832,
-0.3084, -0.1094, -0.0967, +0.1260, -0.1243, -0.1423, -0.1049,
-0.2899, -0.3276, +0.6179, -0.1143, +0.2727, +0.4459, -0.0010,
+0.1593, +0.1067, -0.7444, +0.1892, -0.3649, +0.3654, +0.1727,
-0.3584, -0.4054, -0.0355, +0.0589, +0.3136, -0.6080, -0.4424,
-1.1500, -0.2929, -0.1218, +0.0851, +0.1286, -0.1481, -0.0944,
+0.0483, +0.3923, -0.6804, -0.2047, -0.1822, -0.3020, +0.4498,
-0.1132, +0.1351, -0.2379, +0.3005, -0.2661, -0.0353, -0.3425,
+0.1234, -0.3196, +0.3445, -0.6262, +0.2050, -0.4626, +0.2234,
+0.0922, +0.4025, +0.2875, -0.4980, +0.2534, +0.4041, -0.5816,
-0.3658, -0.3838, -0.2730, -0.8847, -0.5349, +0.4322, -0.2092,
+0.2194, -0.5464, -0.0687, -0.7599, -0.0938, -0.5080, -1.0531,
+0.5433, -0.4617, +0.3173, +0.4012, -0.0094, +0.3008, -0.1310,
+0.2291, -0.3119, -0.0245, -0.1378, -0.5103, +0.0308, -0.5093,
-0.0109, -0.8061, -0.5017, -0.3643, +0.5121, -0.8432, -0.7076,
+0.5774, +0.6549, -0.7813, -0.0083, -0.2012, +0.1741, -0.3110,
-0.2896, +0.2991, -0.6618, -0.0971, +0.1034, +0.1374, +0.8942,
+0.0567, -0.1838, +0.7772, -0.0397, -0.0103, -0.4255, -0.1541,
+0.3253, +0.1915
],
[
-0.2430, -0.4043, +0.1766, +0.0398, +0.0914, -0.3983, +0.1439,
+0.1795, +0.2706, +0.3679, +0.1778, -0.2966, +0.0740, -0.1158,
-0.1662, -1.3763, +0.0125, +0.1487, -0.3066, -0.2777, +0.0985,
+0.1373, +0.2813, +0.1631, +0.0871, -0.0837, -0.0411, -0.4317,
+0.5459, -0.2093, -0.5389, +0.1417, -0.4010, +0.0224, +0.1920,
+0.2597, -1.4467, +0.4732, -0.0835, -0.3698, +0.1804, +0.3861,
-0.8786, +0.0130, +0.0713, +0.1911, +0.0763, +0.1476, -0.3351,
+0.3152, +0.1109, +0.3021, +0.0581, +0.4585, -0.1141, +0.0913,
-0.3624, +0.2428, -0.1251, +0.0972, +0.1378, +0.1701, -0.3600,
-0.3366, +0.0519, +0.2137, -0.0586, -0.2494, -0.2853, +0.4357,
+0.0171, -0.2218, -0.7087, +0.2661, -0.3040, -0.1467, -0.4746,
-0.3447, -0.8009, +0.0269, -0.1932, -0.0666, -0.1277, -0.0639,
+0.4825, -0.8051, -0.0217, +0.1795, +0.0144, -0.4030, -0.2109,
-0.1861, +0.1756, +0.4672, -0.1331, +0.2954, +0.0176, +0.2001,
+0.0948, +0.4184, -0.9002, +0.3168, -0.3742, +0.2870, -0.1890,
-0.2389, +0.6784, -0.4299, -0.5234, -0.2825, +0.0168, -0.3289,
+0.5472, +0.0703, +0.2210, +0.3631, +0.4255, +0.2899, -0.3018,
-0.1029, +0.2479, +0.1401, -0.3799, -0.1869, +0.2482, -0.0682,
-0.3796, -0.0130
],
[
-0.4788, +0.2948, -0.0242, +0.0233, +0.2287, +0.0991, -0.4801,
+0.7143, +0.7220, -0.4549, +0.3487, -0.0855, -0.0851, -0.6122,
-1.1795, -0.0827, -0.0839, -0.6036, -0.9077, +0.0458, +0.2859,
+0.0758, -0.2018, -0.0300, +0.0544, +0.4906, +0.3798, -0.4791,
-0.1694, -0.0224, -0.4815, -0.1661, -0.5559, +0.3484, -0.0509,
+0.1270, +0.4359, -0.0170, +0.3239, +0.1996, -0.0626, -0.5734,
+0.0043, -0.0116, +0.2306, -0.2151, -0.8066, -0.6628, -0.2044,
-0.1113, +0.2094, +0.2780, -1.2293, +0.1742, -0.6659, +0.2707,
+0.2288, +0.5875, +0.0937, -0.0826, +0.1510, -0.0347, -0.2975,
-0.9115, +0.0580, +0.2950, +0.1988, +0.1708, +0.1293, +0.1360,
-0.2675, -0.3640, -0.0489, -0.3411, -0.0149, +0.0747, +0.4562,
-0.0167, -0.2461, +0.5418, +0.0244, -0.5008, +0.3172, +0.3555,
-0.0258, -1.2760, +0.0538, -0.5143, -0.1031, +0.2422, -0.0139,
+0.2600, +0.3610, +0.3186, +0.1357, -0.5968, +0.0320, +0.0657,
-0.1833, -0.0577, -0.0921, +0.1382, -0.0592, +0.0876, -0.0436,
-0.2665, -0.1702, -0.0775, -0.1017, -0.1287, +0.4811, -0.1845,
+0.3371, -0.7259, -0.0593, +0.2927, -0.1583, +0.1932, +0.1477,
+0.0772, -0.9719, -0.1581, +0.2555, -0.0088, +0.2792, -0.2661,
+0.0771, -0.0628
],
[
-0.1205, +0.0738, +0.5668, +0.3952, +0.2991, -0.2343, -0.2425,
+0.3841, +0.1064, -0.7505, +0.1889, +0.2206, +0.4329, -0.0102,
+0.5949, +0.1324, +0.1983, +0.5530, -0.0399, +0.0467, +0.3105,
+0.0241, +0.0438, -0.1896, -0.5775, +0.2109, +0.2776, -0.3537,
-0.6479, +0.2617, +0.5693, -0.0925, +0.3954, -0.1472, +0.4712,
+0.1076, +0.1822, +0.5363, -0.2732, -0.0041, +0.0766, +0.2539,
+0.2578, -0.0138, +0.2948, -0.0362, -0.0785, -0.2411, +0.2752,
-0.9657, +0.0666, +0.2308, -0.2310, +0.0951, -0.4527, -0.0697,
+0.0239, -0.0487, -0.4776, +0.3036, -0.0820, -0.4492, +0.3268,
-0.6473, -0.1562, -0.1372, -0.4948, -1.4579, +0.1958, +0.0605,
+0.2382, +0.0434, -0.3164, +0.0126, -0.1310, +0.1430, +0.1571,
-0.7431, +0.1440, -0.1454, -0.2756, +0.1328, -0.3722, -0.5365,
+0.1636, +0.4522, +0.6084, +0.2015, -0.7049, -0.8863, -0.8337,
-0.0482, +0.1404, -0.0012, -0.5127, -0.3220, -0.7337, -0.4108,
-0.0388, -0.7985, +0.3413, -0.8897, -0.0404, -0.3497, -0.1888,
-0.0018, -0.3787, -0.5566, -0.6240, +0.0503, +0.2438, -0.4712,
-0.3124, -1.2848, -0.2774, -0.3625, -0.0763, +0.0441, +0.5113,
-1.0048, +0.1605, +0.0592, -0.0083, +0.2849, -0.0818, -0.4153,
+0.4741, +0.0249
],
[
-0.3259, -0.0924, +0.2141, -0.6682, +0.0653, -0.3126, +0.2141,
-0.5372, +0.0619, +0.4928, +0.6802, -0.0303, -0.1073, -0.0185,
-0.1252, +0.4509, +0.0839, -0.1281, -0.3127, +0.2805, +0.2024,
-0.5651, +0.0838, +0.3462, -0.2815, -0.2832, -0.2357, -0.1275,
+0.6727, -0.1054, -0.2455, +0.1086, -0.2943, -0.2398, -0.4691,
+0.0847, +0.1380, -0.1887, +0.4670, -0.9059, +0.4327, -0.4259,
-0.2343, +0.2994, -0.0059, -0.3699, -0.4257, -0.2617, -0.0120,
-0.3947, +0.1851, -0.3750, -0.1081, -0.6665, +0.3328, -0.0758,
-0.5060, +0.2679, +0.3259, -0.4441, +0.0263, -0.9864, -0.3219,
+0.2595, -0.6472, +0.8043, -0.3170, -0.8339, -0.3836, -0.0252,
-0.0801, -0.1906, +0.2606, +0.8412, -0.1107, +0.0706, -0.0642,
-0.1641, +0.2282, -0.1386, -0.5176, +0.3356, +0.3406, -0.4221,
+0.0099, -0.3131, -0.1490, -0.2464, +0.9962, -0.4005, -0.2326,
-0.0511, +0.2822, -0.3473, -0.3058, -0.8731, +0.7910, +0.1322,
-0.8155, -0.0386, +0.2808, -0.1292, -0.3470, -0.5239, +1.1692,
-0.5421, -0.0557, -0.7708, -0.4259, +0.0206, -0.0570, -0.4799,
-0.1320, +0.5666, -0.0914, -0.3411, -0.2026, +0.0413, -0.3290,
+0.1876, -0.1087, -0.4253, -1.4439, -0.1898, -0.2367, +0.5706,
-0.0462, -0.2875
],
[
-0.5263, -0.5064, -0.4516, +0.2607, +0.0497, +0.4931, +0.2818,
+0.1744, -0.4092, -0.1174, +0.5896, +0.1405, -0.3963, +0.1237,
+0.2961, +0.2657, -0.0256, +0.3371, +0.0303, -0.4256, -0.3957,
-1.3833, +0.2552, +0.5481, -0.0588, -0.1626, -1.3412, +0.2287,
-0.0006, +0.3928, -0.4100, +0.2736, +0.4837, +0.2450, -0.6648,
+0.3226, -0.4540, +0.1576, +0.0255, +0.4007, +0.1668, -0.5768,
-0.5063, -0.7843, -0.2192, -0.1426, +0.0226, +0.0908, +0.3442,
-0.5859, -0.0460, -0.1415, +0.1459, +0.2539, +0.0692, +0.1603,
-0.9199, +0.1091, -0.5698, +0.3860, -0.2066, -0.2292, -0.0759,
+0.4949, +0.1564, +0.1298, +0.1323, -0.4606, +0.4239, -0.0202,
+0.4525, +0.0676, -0.0940, +0.7246, +0.1034, +0.4973, -0.0566,
+0.2889, +0.1821, -0.8860, -0.2605, +0.1120, +0.1359, -1.6559,
-0.0994, +0.1441, -0.2562, -0.0352, +0.2069, -0.2465, +0.7558,
+0.5875, -0.2769, -0.4353, -0.8551, +0.0189, -0.3636, +0.5614,
-0.1159, +0.6795, +0.2753, +0.1151, -0.0356, -0.3354, -0.8142,
+0.3002, -0.0226, -0.1470, -0.3682, -0.5422, -0.3524, -0.0762,
+0.3675, -0.1527, -0.0612, +0.2292, +0.5106, -0.1995, +0.2385,
-0.0520, +0.7068, -0.4076, -0.3832, +0.0323, +0.5738, -0.1864,
-0.4493, +0.1467
],
[
-0.4268, -0.5872, +0.1827, -0.0295, -0.5556, -0.7391, +0.7044,
+0.0806, +0.2893, -0.4276, +0.2567, -0.8649, +0.3852, +0.0466,
+0.4104, -0.3009, -0.2069, -0.4436, +0.3243, +0.4523, +0.1035,
-0.0934, +0.1396, -0.2971, +0.2776, +0.0283, -0.1412, -0.5667,
-0.7793, +0.3937, -0.0179, +0.2908, -0.6341, -0.4567, +0.2814,
-0.3609, -1.2294, -0.3265, -0.0420, +0.2264, +0.0654, -0.5900,
+0.1255, +0.0563, +0.2543, -0.1010, +0.3539, -0.0393, +0.1338,
-0.3963, +0.0427, -0.3492, -0.3314, -0.3047, -0.1329, +0.1481,
+0.1273, +0.1482, -0.1715, -0.4490, -0.6341, -0.1411, -0.1587,
-0.0418, +0.3699, -0.1815, +0.0967, -0.2825, -0.2781, +0.0213,
+0.0603, +0.1463, -0.1261, +0.0788, -0.2587, -0.0294, +0.1550,
+0.1917, -0.1985, +0.2772, -2.3317, -0.4041, -1.1275, +0.1741,
-0.3849, +0.1296, -0.0309, +0.1997, -0.4544, +0.0789, +0.4179,
+0.3871, +0.1782, +0.2553, -0.9304, -0.1356, -0.7131, -0.3142,
+0.3308, -0.2094, -0.1491, +0.6618, +0.1643, +0.3704, -0.1532,
+0.3060, -0.6095, +0.1046, -0.6198, -0.8268, -0.0314, +0.4699,
-0.0570, +0.0696, -0.2731, -0.6359, -0.3539, -0.0665, -1.4877,
-0.0778, -0.5318, -0.2568, -0.2785, +0.4667, -0.3807, +0.1290,
+0.4132, -0.2281
],
[
+0.4144, -0.2453, -0.6352, +0.0797, +0.4837, +0.1756, -0.1689,
-0.0407, -0.1641, +0.3902, +0.1434, -0.0858, -0.3018, -0.3788,
-0.4070, -0.4580, +0.5782, +0.1014, +0.0483, -0.4649, -0.8623,
-0.2727, -0.0816, -0.0794, +0.3690, +0.2358, +0.1840, -0.5061,
+0.4956, -0.0459, +0.1033, +0.1466, +0.4621, -0.2750, -0.5645,
-0.7397, +0.5092, -0.2278, -0.3908, -0.8516, +0.0498, -0.1350,
+0.0435, -0.7773, -0.0719, -0.3933, +0.3576, -1.1889, -0.1261,
-0.3057, -1.5315, -0.0822, -1.0068, -0.1458, +0.1108, +0.1728,
+0.6613, -0.0482, +0.1267, -1.9655, +0.2621, +0.6660, +0.1185,
-0.2526, -0.8080, -0.2140, -0.1064, -0.8375, +0.0504, -0.3653,
-0.4404, +0.1181, +0.4609, -0.3721, -0.6912, +0.4146, +0.6302,
+0.0670, +0.3416, -0.0533, +0.2287, -0.1202, -0.0853, -0.4180,
+0.6715, +0.0844, -0.2472, -0.6553, +0.0278, -0.1189, -0.5933,
-0.8400, -0.7732, -0.4495, -0.1789, +0.4419, +0.3583, +0.2423,
-0.3636, -0.5046, +0.0133, +0.1114, -0.2747, +0.4451, -0.2000,
-0.3484, -0.2960, +0.3360, -0.5738, +0.0549, +0.0452, -0.0862,
+0.0327, +0.9143, -0.0679, -0.5630, -0.3637, -0.5268, -0.2273,
+0.3010, -0.2710, -0.2692, +0.1983, -1.2048, -0.1386, +0.0690,
-0.0446, -0.4564
],
[
+0.1738, +0.1187, -0.3066, +0.4367, +0.1442, -0.3168, -0.5278,
+0.4796, -0.5142, +0.4568, -0.2377, -2.1440, +0.2173, -0.0282,
-1.2387, -0.2722, -0.2450, -0.9980, -0.1381, +0.3996, -0.3626,
-0.2866, -0.7293, +0.1695, -0.0305, -0.3145, +0.2440, -0.0217,
+0.2407, +0.1341, +0.4644, +0.0791, -0.0324, +0.8154, +0.2003,
-0.8524, -0.7770, -1.6000, -0.1595, -0.2486, +0.2989, +0.3587,
+0.1625, -0.0403, +0.4665, -1.1816, -0.7108, -0.1961, -0.3279,
+0.2137, -0.2674, -0.4244, -0.1972, +0.0484, -0.5213, +0.0114,
-0.5583, -1.1815, -0.2730, -2.4739, +0.0351, -0.9367, +0.4868,
-1.1313, -0.2428, -0.1098, +0.1168, +0.0097, -0.0091, -0.0988,
+0.6567, +0.2903, -1.1361, -0.0712, +0.5666, +0.5488, +0.0187,
+0.4147, -0.4554, -0.0554, -0.1015, -0.3783, -0.2677, +0.2319,
-0.0162, -0.3072, -0.0450, -1.5145, -0.3145, +0.2317, +0.0531,
+0.0717, -0.4312, -0.0905, +0.2989, +0.2484, +0.5011, -0.5255,
+0.2265, -1.6048, +0.0203, -0.3383, +0.2683, +0.3878, -1.3090,
+0.0639, +0.6891, -0.1080, -0.3280, -0.0528, -0.4217, +0.0452,
-0.4198, -0.1554, -0.0454, -1.4473, +0.6277, -0.1168, -1.7838,
+0.5812, -0.5257, -0.3076, -1.3098, -0.3146, +0.6357, +0.2521,
+0.3271, -0.3130
],
[
+0.0589, -0.6008, +0.6131, -0.0561, +0.1542, +0.2693, -0.6580,
-0.4314, +0.3777, +0.6637, +0.0180, -0.2727, -0.2689, -0.0286,
-0.1122, -1.1123, +0.4082, -0.6998, +0.3442, -0.5004, +0.7137,
-0.0825, +0.1974, +0.0429, +0.0050, -0.9253, +0.1016, +0.4778,
-0.1797, +0.1576, -0.4610, +0.1361, -0.2568, -0.3893, -0.5261,
-0.2596, -0.3747, -0.9947, +0.9117, +0.0450, -0.0129, -0.0390,
-0.2002, -0.6058, +0.4297, +0.8271, -0.3855, -0.3747, -0.1476,
+0.2770, +0.1003, +0.1974, +0.4767, -0.1774, -0.1524, -0.4341,
+0.3936, +0.3613, -0.1879, -0.5588, +0.1126, +0.0162, -0.0249,
-0.2132, +0.1565, +0.2099, +0.2711, +0.6800, +0.3956, +0.1056,
-0.1320, +0.3389, +0.6546, -0.2477, -0.7566, -0.6385, -0.8820,
-0.7212, +0.0082, -0.0972, -0.5484, -0.6205, +0.2054, -0.6334,
-0.1074, -0.4796, -0.2095, -0.3918, -0.0249, +0.0496, +0.6946,
-0.8485, -0.7488, +0.1659, +0.1668, -0.6838, +0.4973, +0.0243,
+0.3332, -0.6033, -0.0636, -0.2762, +0.3183, +0.0333, +0.0461,
-0.5229, +0.1616, +0.2639, -0.7256, +0.3384, +0.4716, -0.1737,
+0.2796, -1.0387, +0.5968, -0.3691, -0.2509, +0.1983, -0.5982,
-0.2095, -0.3888, -0.0593, -0.4660, +0.2009, -0.2723, -0.3105,
-0.0894, +0.0340
],
[
+0.0328, -0.4288, -0.4086, -0.0870, +0.0050, +0.2962, -0.5815,
-1.1317, +0.2684, -0.3838, +0.3732, -0.0419, +0.1006, -0.0332,
-0.4590, -0.0034, -0.1430, -0.0152, -0.4190, -0.2460, -0.1689,
+0.2852, -0.3491, -0.1469, +0.5112, -0.2610, +0.5671, +0.3549,
-0.1863, +0.1151, +0.0004, +0.1777, +0.0849, -0.0509, -0.0241,
-0.2225, +0.5380, -0.1528, +0.1736, -0.5529, -0.3402, -0.4931,
+0.1928, +0.2212, -0.1292, -0.1949, -0.6881, +0.2509, -0.4402,
-0.1828, +0.4146, +0.0894, +0.2925, -0.3445, -0.3955, +0.2638,
-0.4357, -0.4719, +0.3477, +0.2992, -0.1546, -0.5620, +0.0869,
+0.3226, +0.1604, +0.7277, -0.9342, -0.3394, -0.1522, -0.8674,
+0.1367, -0.1647, -0.2419, -0.9182, -0.0594, -0.7259, +0.3389,
+0.2066, +0.4865, +0.2865, -0.3317, -0.3533, +0.1510, -0.0408,
-0.2310, -0.1214, -0.7874, +0.0594, +0.0637, +0.1704, -0.0509,
+0.4279, +0.1456, +0.0183, +0.0084, -0.3207, +0.3121, -0.4553,
+0.3080, -0.0612, +0.2902, -0.4538, -0.2972, -0.4140, +0.3137,
-0.1422, -0.8175, -0.1954, +0.5369, -0.7929, +0.2202, -1.4705,
-0.1834, -0.1199, -0.1593, +0.2240, -0.3757, -0.3017, -0.0894,
-0.1868, +0.6420, -0.0309, +0.1543, -0.5357, -0.2081, -0.1984,
+0.0857, +0.3100
],
[
-0.2785, +0.1105, +0.4149, -1.5879, +0.0800, +0.2271, +0.2915,
-0.3843, -0.4893, -0.7819, +0.0453, +0.0527, -0.0760, -0.2994,
-0.2641, +0.3183, -0.6400, -0.1805, +0.4145, +0.2248, +0.0725,
+0.1895, -0.0617, -0.2392, -0.0425, -0.0769, -0.0557, +0.2105,
+0.0059, +0.0436, +0.0629, +0.1110, +0.1154, +0.0762, +0.4204,
+0.1830, +0.0864, -0.4372, -0.3877, +0.0984, +0.2884, -0.1137,
-0.2281, +0.4976, +0.0322, -0.3907, +0.1984, +0.4247, +0.4307,
+0.2986, +0.1207, -0.1555, +0.5228, -0.0764, -0.7537, -0.4695,
+0.1507, -0.2460, +0.6236, +0.0654, -0.3903, -0.1480, -0.2316,
-0.1298, -0.0985, -0.6067, +0.0898, -0.9388, -0.0253, -0.6018,
-0.1985, +0.3939, +0.0958, +0.4344, +0.0901, -0.1734, +0.0954,
-0.5844, -0.5133, +0.0663, +0.1024, +0.1432, +0.1262, +0.5046,
-0.8934, -0.0737, -0.2976, -0.0891, -0.1219, -0.4012, +0.4510,
-0.4059, -0.0357, +0.3248, -0.6301, -0.1376, +0.3172, -0.0587,
+0.8964, -0.1738, +0.1822, +0.4807, -0.3443, -0.2458, -0.2195,
+0.1496, +0.6157, -1.0314, +0.1581, +0.5302, +0.0455, +0.3477,
+0.0962, +0.5732, -0.0013, -0.4262, -0.1007, -0.5615, -0.9545,
+0.1694, -0.4215, +0.2925, -0.3688, +0.3731, -0.0066, +0.6691,
-0.4870, -0.3537
],
[
+0.4068, +0.5845, -0.8742, -0.0664, -0.3680, +0.4151, -0.7328,
+0.0857, -0.2333, +0.0135, -0.0799, +0.1949, -0.1770, -0.3081,
+0.3287, +0.2236, +0.1515, +0.1368, +0.2567, -0.0252, +0.0880,
-0.2675, -0.2043, -0.6968, +0.0936, -0.4986, +0.7414, +0.3489,
+0.6269, -0.0687, +0.1289, +0.2688, -0.4887, -0.3381, -0.4586,
-0.0715, -0.4161, -0.4189, +0.1545, +0.2416, +0.4395, -0.1713,
+0.2378, +0.1061, -0.0685, +0.2889, -0.2323, -0.4748, +0.0775,
-0.0366, -0.2849, +0.3293, +0.3429, -0.0311, -0.0716, -0.1996,
+0.2068, -0.0579, +0.0055, -0.4996, +0.5267, -0.2290, -0.0476,
+0.0717, -0.1189, +0.5376, -1.0262, +0.0645, -0.0771, -0.1288,
+0.1105, -0.1917, -0.1174, -0.9960, -0.0879, -0.2593, +0.1000,
-1.1075, -0.3571, -0.0944, +0.0719, -0.1795, +0.0898, +0.1515,
-0.1497, -0.0154, +0.6563, -0.3794, +0.5367, -0.0372, +0.2813,
-0.1864, +0.1888, +0.3343, +0.1813, +0.1186, -0.3605, -0.3599,
+0.0245, -0.1951, -0.2185, -0.3295, +0.4275, +0.5862, -0.3230,
+0.2030, -0.2806, +0.2977, -0.2020, -0.2393, +0.7133, +0.5259,
-0.2807, -0.0618, -0.2883, -0.7191, -0.0670, +0.2811, -0.5409,
-0.3810, -0.6504, +0.3010, -0.0262, -0.4714, -0.6627, +0.2988,
-0.0401, -0.4720
],
[
-0.2626, -1.0124, +0.5541, -0.4718, -0.4002, +1.0249, +0.7575,
-0.9172, +0.3645, +0.4410, -1.9788, -0.3823, -0.4584, -0.1375,
+0.1245, -1.9877, -0.0296, -0.7269, -0.3599, -0.1202, -0.1364,
-0.2387, -0.2567, -0.0231, -0.1633, -0.7615, -0.0839, +0.1308,
+0.3210, -0.0354, +0.1021, -0.5398, -1.2572, -0.3326, -1.0425,
-1.1440, -1.5605, +0.3467, -0.0568, +0.3251, +0.2199, -0.1133,
-1.1209, -0.7836, -0.1683, +0.1539, -0.6069, +0.1850, +0.3215,
+0.0858, -0.3703, -0.1263, -0.1525, -0.4650, -0.2361, -0.0552,
+0.0358, -0.1606, +0.3956, -0.6136, -0.3105, +1.1259, +0.2709,
-0.4181, +0.3087, -0.0557, +0.3173, -1.1064, +0.0733, +0.9819,
-0.6309, +0.0837, -0.1080, -1.0399, -0.5285, -0.1462, -0.5661,
-0.5493, -0.2285, +0.2912, -0.1998, +0.2417, +0.2161, -0.2585,
-0.2857, +0.6020, +0.2718, -0.8665, -0.0585, +0.0802, -0.4464,
-0.7278, -0.3143, -0.4762, +0.6787, +0.1115, +0.3620, -1.2068,
-0.0188, -1.3203, +0.1745, -0.5332, +0.3937, -0.0463, -1.4745,
-0.0737, +0.5788, -0.1706, -0.2145, +0.3874, -0.1163, +0.4487,
-0.0749, -1.4194, -0.0450, -0.4730, +0.2539, -0.4686, -0.3258,
-0.2435, -0.0529, +0.0138, -0.3011, -1.7579, -0.0738, -0.5660,
-0.2687, -0.1031
],
[
-0.1890, +0.1846, +0.1528, +0.1868, -1.0463, +0.1361, -0.6526,
+0.3472, +0.9501, +0.1137, -0.1338, -0.3032, -0.1089, -0.3156,
+0.4740, -0.1239, +0.0710, -0.3375, -0.3874, -0.0470, -0.0551,
+0.5221, +0.5352, +0.0590, +0.0157, -0.1133, +0.2564, +0.0972,
-0.1763, -0.0172, +0.2382, +0.3805, +0.3693, -0.1347, -0.4978,
-0.4696, +0.1159, +0.5663, +0.0888, +0.0899, -0.1063, -0.4584,
+0.3321, +0.4006, +0.1525, -0.2469, -0.2116, -0.4201, +0.1384,
+0.6111, +0.1410, -0.1235, +0.1201, -0.1482, +0.1087, +0.4535,
-0.0998, -0.0626, -0.3032, +0.4636, -0.6396, +0.3708, -0.0050,
+0.4596, +0.5721, +0.2259, +0.2125, +0.0627, +0.3765, -0.7187,
-0.7086, +0.2721, -0.0764, -0.0565, +0.0235, -0.3755, -0.2753,
+0.4597, +0.6486, +0.2965, +0.5531, -0.3629, +0.0782, -0.1587,
+0.0848, -0.1989, -0.3563, -1.4503, +0.2558, -0.2762, +0.5357,
+0.0858, +0.1913, -0.2766, +0.1420, -0.4243, -0.1676, -0.3365,
+0.0941, +0.0516, -1.1257, -0.4089, -0.8515, -0.2830, +0.4711,
+0.3335, -0.1440, -0.0064, +0.4255, -0.2008, +0.4499, -0.5755,
+0.1673, +0.2069, -0.2422, -0.0123, -0.2220, +0.1374, -0.0896,
+0.0227, +0.3576, -0.5969, +0.4685, +0.0131, +0.2259, +0.3857,
+0.2969, +0.5092
],
[
+0.2385, -0.0237, -0.2371, -0.9950, -0.7939, +0.1525, -0.6166,
-0.0463, +0.0723, +0.2182, +0.3742, +0.1939, -0.3690, -0.5012,
-1.2359, +0.2250, -0.2047, -0.7313, -0.3548, +0.0486, +0.3503,
+0.3015, +0.2342, +0.5576, +0.0184, -0.5800, +0.0429, +0.2196,
+0.2226, -0.1715, -0.1384, +0.3288, -0.1334, +0.3295, +0.6067,
-0.0384, +0.0176, +0.0138, -0.1203, +0.0863, -0.0909, -0.3321,
-0.0272, -0.0426, -0.1637, +0.1722, -0.6915, +0.0731, +0.2473,
+0.0653, -0.1243, -0.3751, -0.6515, +0.0765, +0.3489, +0.0208,
+0.3015, +0.0442, +0.1349, +0.1498, +0.6269, +0.8148, -0.4758,
+0.2989, -0.7103, -0.2501, -0.5741, -0.0260, -0.4906, -0.2687,
+0.5507, -1.0255, +0.1971, -0.4078, -0.1656, +0.7028, +0.3517,
-0.8417, +0.2221, -0.1728, -0.0505, -1.1888, +0.3255, +0.0383,
-0.8892, +0.0544, +0.3391, -0.1401, -0.9512, -0.0529, +0.2119,
-0.3345, -0.2999, -0.4352, -0.1427, -0.0157, -0.2630, +0.1494,
+0.3705, +0.4497, -0.4527, -0.1771, -0.3700, -0.2584, -0.5270,
+0.3196, -0.5018, -0.0420, +0.4243, +0.0048, -0.3724, -0.1658,
-0.0970, -0.5838, +0.2219, -0.2543, -0.3992, -0.0216, +0.5249,
+0.1735, +0.5442, -0.0929, +0.1333, +0.1425, -0.1071, -0.1370,
+0.1609, -0.3632
],
[
-0.6062, -0.0107, -0.5037, +0.0280, -0.7645, -0.3818, +0.0495,
-0.6663, -0.1796, +0.0424, -0.7120, -0.2048, +0.1505, +1.1172,
+0.1182, +0.7021, +0.0447, +0.2636, -0.0117, -0.6400, +0.1739,
-2.0563, +0.2643, +0.2216, -0.6307, -0.3034, -1.6683, +0.2710,
+0.5509, +0.0906, +0.1050, -0.6639, -0.1404, +0.0301, +0.8906,
+0.1874, +0.4052, -0.0841, -0.5191, -1.3543, +0.3044, +0.6129,
-0.1700, +0.0759, -1.0354, -0.7217, +0.2234, +0.8004, -0.4027,
-0.1250, +0.2738, -0.9314, -0.0443, -0.1532, -0.4836, +0.5214,
-1.4886, +0.0576, -0.3373, -0.5356, -0.2243, -0.1056, +0.5262,
+0.5279, -0.9975, -1.0158, -0.0009, -2.1319, -0.6019, -0.5193,
+0.8604, +0.5502, -0.1540, +0.2157, +0.2840, -0.1492, +0.0043,
+0.1732, -0.0667, +0.7070, +0.1044, +0.0107, +0.4892, -1.3223,
-0.1142, -1.0157, -0.0481, +0.5053, +0.2333, -0.1292, +0.2140,
+0.1220, -0.3557, -1.2828, -0.1046, -0.1504, -0.0380, -0.0526,
-0.7740, +0.1770, +0.4862, -0.7069, -0.6501, -0.2251, +0.6397,
+0.5812, -0.0295, -0.3300, -0.1545, -0.2888, -0.0862, -0.2176,
-0.1580, -0.9327, -0.3072, -0.2631, +0.3495, -0.8641, +0.4137,
+0.3080, +0.4942, -0.7215, -0.4081, -0.4022, +0.0381, +0.6014,
-0.9209, -0.0248
],
[
-0.0358, -0.2476, +0.0637, -0.2123, -0.9081, -0.0865, +0.3561,
-0.5554, +0.1656, +0.1798, -0.0769, -0.3535, -0.2186, +0.2111,
-0.2864, -0.1337, -0.4597, +0.3778, -0.2488, -0.0923, -0.1262,
-0.0262, +0.2277, -0.3163, -0.0397, +0.0646, -0.4947, -0.2864,
-0.5216, -0.7771, -0.5574, +0.1186, -0.0542, +0.0515, -0.2259,
+0.4735, -0.1975, -0.0630, +0.0789, -0.1683, +0.5125, -0.4719,
+0.6143, +0.0867, +0.6118, -0.0284, -0.2530, +0.3255, +0.2878,
-0.3443, -0.7612, +0.1266, +0.1155, -0.0903, +0.3597, -0.3959,
+0.0233, +0.0967, -0.7554, +0.2049, -0.4178, -0.1526, -0.0914,
+0.2557, +0.3241, +0.3012, +0.1893, +0.2647, +0.2142, -0.1623,
-1.2681, +0.4970, -0.2651, +0.2166, -0.1153, +0.0663, -0.7682,
+0.4426, +0.3446, +0.3132, +0.6882, +0.7522, -0.1070, -0.3160,
+0.1417, -0.4353, -0.3498, -0.2256, -0.1062, -0.4434, -0.3921,
-0.5221, +0.3964, -0.4258, +0.1572, +0.1609, -0.3845, -0.5640,
-0.3708, +0.4984, -1.0621, -0.2547, -0.1994, -0.0205, +0.3775,
-0.4878, +0.0006, -0.0061, +0.0959, +0.0843, -0.1139, -0.2873,
+0.3424, -0.1215, -0.8438, -0.6846, -0.0152, +0.2998, +0.4343,
-0.8696, +0.2929, +0.1009, -0.8495, -0.7728, -0.0976, +0.0524,
-0.2486, +0.0719
],
[
-0.3571, -0.4067, +0.5145, -0.6866, -1.0174, +0.0091, +0.1718,
-0.1745, -0.2513, +0.0362, +0.1269, +0.2780, -0.4117, +0.4818,
+0.3668, -0.1561, +0.1516, -0.8762, +0.1162, -0.0460, +0.6237,
-0.0380, -0.0423, +0.1929, +0.0457, -0.8398, -0.2828, +0.3374,
-0.4903, +0.1736, +0.5767, -1.4534, +0.3863, -0.9543, +0.2292,
-0.2237, +0.4655, -0.0044, +0.0677, -0.0377, +0.2746, -0.5696,
-0.0749, +0.3972, +0.1447, +0.1636, -0.0700, -0.2671, +0.2902,
+0.0804, +0.3041, -0.2553, -0.7488, -0.7124, +0.7484, -0.7365,
+0.3321, +0.4087, -0.4259, -0.2603, +0.5249, +0.2184, +0.3844,
-0.0441, +0.1161, +0.1279, -0.2196, -0.1223, -0.0356, -0.0484,
-0.3438, -0.1186, -0.3836, +0.0660, +0.3879, +0.2712, -0.2012,
+0.1470, +0.3385, -0.1620, -0.3034, -0.5461, -0.0432, +0.2796,
-0.8137, -1.3743, +0.4925, +0.2108, -0.1027, -0.2138, -0.7378,
-0.1209, -0.4294, -0.0851, -0.2624, -0.3597, -0.8611, +0.4464,
+0.0393, -0.9927, -0.0209, +0.1669, -0.1943, -0.0069, +0.2535,
-0.2320, +0.0523, -0.0387, +0.0736, +0.6659, +0.0430, -0.8271,
-0.5452, -1.2268, -0.2558, +0.1095, +0.1778, -0.0051, +0.3514,
+0.5244, +0.0645, +0.4827, -1.0049, -0.2551, -0.3863, +0.4874,
+0.5816, +0.1889
],
[
-1.1268, -0.0477, +0.6551, -0.2983, -0.7391, +0.0787, -0.0184,
-0.7897, -0.0595, +0.5084, +0.2580, +0.0430, -0.7676, -0.5418,
+0.3700, -0.8769, -0.2995, +0.1440, -0.0261, -0.4732, +0.0802,
-0.6458, -0.3006, +0.0732, +0.5992, +0.3819, -0.5000, -0.1366,
-1.0918, +0.0226, +0.1488, -0.0729, +0.1031, +0.1314, +0.2606,
-0.3808, -0.6380, -0.5858, -0.4060, +0.2601, +0.0668, -0.1249,
+0.1666, -0.5117, -0.4081, +0.2744, +0.2560, -0.8061, -0.9125,
-0.1472, +0.2233, +0.0335, -0.3542, -1.0575, -0.2591, -0.0555,
-0.3672, -0.0313, -0.1326, -0.4782, +0.0546, -0.0755, +0.1963,
-0.2694, +0.0272, -0.2004, -0.7292, -0.2204, +0.4913, -0.0399,
-0.6559, -0.0752, -0.2607, -0.0702, +0.2666, +0.2539, -1.0182,
+0.1792, +0.1817, -1.1582, -1.8026, +0.2342, -0.7006, -0.4952,
+0.1345, +0.2998, -0.2620, +0.1336, -0.0174, -0.4578, -0.1101,
-1.5142, -0.4791, -0.2391, -0.9607, +0.3614, +0.2139, +0.3459,
+0.7567, -0.1753, +0.1528, -1.0601, +0.2125, +0.2843, -0.0785,
+0.5537, -0.2362, -0.2993, +0.1568, -0.7571, +0.2312, -0.8912,
+0.2062, -0.3023, -0.2358, +0.5970, -0.5798, -0.5901, -0.1895,
-1.0248, -0.0076, +0.0373, -0.4598, -0.3706, -1.0179, -0.1707,
+0.4706, -0.4109
],
[
+0.5548, +0.0762, -0.9732, +0.4974, +0.0721, +0.0603, +0.0403,
-0.4016, -0.1278, -0.4030, +0.0751, -0.4352, +0.2306, -0.3200,
+0.2516, -0.4887, +0.0665, -0.2368, -0.2360, +0.4221, +0.1742,
-0.2811, -0.2135, -0.4115, -0.2981, +0.3318, +0.0242, +0.2272,
-0.3173, -0.1427, +0.2518, -0.3534, -0.4959, -0.2551, -0.0797,
-0.1724, -0.4159, -0.2069, -0.2414, -0.8762, -0.3802, -0.3595,
-0.1394, -0.0372, +0.0818, -0.1363, -0.8352, -0.4604, -0.1355,
+0.0200, -0.2153, +0.1949, +0.0538, -0.4498, -0.2186, -0.2281,
+0.0772, +0.0655, -0.5071, +0.1983, -0.8508, +0.0344, -0.0185,
+0.2695, +0.5237, +0.3735, -0.0424, +0.0236, +0.1312, +0.5737,
-0.2471, -0.2865, -0.0292, -0.5521, -0.0439, +0.0889, -0.5819,
+0.4158, +0.2471, -0.4975, -0.3082, +0.1817, -0.2181, +0.1859,
-0.3464, +0.1266, -0.1987, +0.0891, -0.8471, +0.2534, +0.0612,
+0.0998, +0.5045, +0.2724, +0.2775, +0.0461, -0.1714, +0.3437,
-0.0115, -0.6069, +0.3813, -0.3904, +0.2130, +0.5443, +0.5034,
+0.2294, -0.2291, +0.2235, -0.1319, -0.0598, -0.1721, +0.2250,
+0.1713, -0.0901, +0.7149, +0.0539, +0.0448, -0.5711, +0.1461,
+0.4580, -0.1875, +0.0457, -0.1837, -0.4180, +0.0479, -0.1208,
+0.1840, +0.4426
],
[
-0.2842, +0.2116, +0.0117, -0.6003, +0.0043, -0.2358, +0.4554,
+0.7943, +0.2894, -0.1744, -0.7563, +0.4605, -0.2773, -0.3848,
-0.5594, -0.3504, -0.2448, -0.0372, -0.3179, +0.1210, +0.1392,
-0.5565, +0.2361, -0.5027, +0.7456, -0.1703, -0.0240, +0.1292,
+0.2535, -0.3699, -0.3059, -0.5017, -0.2633, +0.1405, +0.0046,
-0.3354, -0.2708, -0.1274, +0.0338, +0.1981, -0.0836, +0.6965,
+0.0527, +0.0872, -0.1278, -0.4493, +0.4030, -1.5423, +0.5086,
+0.1008, -0.3396, +0.0430, -0.4026, -0.1746, -1.0235, +0.2083,
-0.2529, +0.4123, +0.2179, +0.1214, +0.6086, -0.0055, -0.5584,
-0.6812, -0.1916, -0.3864, +0.0741, +0.1077, +0.4114, +0.0345,
-0.1636, -2.1677, +0.6062, -0.1717, +0.0664, +0.1639, +0.4483,
-0.6986, -0.0163, -0.2069, +0.2554, -0.3096, -0.0139, +0.3088,
+0.0131, -0.3570, -0.1024, -0.1159, +0.2009, +0.2716, -0.1614,
+0.3146, +0.0674, +0.2829, -0.0565, +0.2379, +0.0764, +0.0750,
+0.0191, +0.0233, -0.1372, +0.2373, +0.2386, +0.0077, -0.2598,
+0.2932, -0.2208, -0.1869, +0.4116, +0.2577, -0.3386, +0.3720,
-0.0159, +0.3788, +0.3233, +0.2683, -0.4094, -0.2736, -0.2217,
-0.1135, +0.1307, +0.2497, -0.5941, +0.0363, -0.0517, +0.1772,
-0.1428, -0.1171
],
[
-0.0162, -0.0627, +0.1236, +0.4031, -0.2465, +0.1629, +0.0719,
+0.4211, +0.1920, -0.1312, +0.0193, +0.2175, -0.1819, -0.6242,
-1.0284, +0.2454, +0.0434, -0.1952, +0.1975, -0.1499, +0.0737,
-0.3134, -0.4515, +0.1174, -0.1765, -0.1006, -0.0569, -0.9097,
-0.7455, -0.4741, +0.2333, -0.3261, +0.3061, +0.1993, -0.2329,
+0.2430, +0.1176, +0.2400, +0.2129, +0.4918, -0.2546, -1.1784,
-0.6182, -0.1537, +0.1395, +0.5393, -0.7461, -0.2201, +0.1386,
-0.2714, -0.2729, -0.1344, -0.2892, -0.7805, +0.1870, -0.4685,
-0.1468, +0.3957, +0.0906, -0.0169, -0.1142, -1.1525, +0.4167,
-0.5574, +0.0122, -0.1523, +0.2502, -0.0223, -0.0227, +0.2885,
+0.0504, -1.7032, +0.0428, +0.3443, -0.4039, +0.2699, -0.0908,
-0.3298, -0.2390, -1.0043, -1.0324, +0.0237, +0.1503, +0.5633,
-0.4685, -0.3616, -0.8335, -1.0535, -0.6771, -0.0776, -0.3784,
+0.4398, -0.6072, -0.5504, -0.5404, -0.3614, -0.3855, +0.0132,
+0.1216, -0.3302, +0.3921, +0.1087, -0.2751, +0.3683, -0.8535,
-0.0739, -0.0324, -0.8358, +0.2640, +0.0494, +0.3263, -0.3085,
-0.0733, -1.1149, +0.2494, -0.1388, -0.3078, +0.1479, +0.6488,
+0.5278, -0.6395, -0.0259, -1.1560, -0.0320, +0.0748, -0.3467,
+0.5657, +0.5099
],
[
-0.1044, -0.6576, -0.0207, -0.2102, -0.2144, +0.0663, +0.3874,
+0.2177, -0.1430, -0.3068, -1.4322, +0.4873, +0.1080, -0.1987,
-0.1861, -0.0218, -0.0747, -0.0191, +0.6192, -0.2742, -0.1812,
+0.2117, +0.6730, +0.3359, +0.5871, +0.1320, +0.0645, +0.3662,
-0.9857, +0.0502, -0.0964, -0.0680, -0.2814, -0.4225, -0.6175,
+0.3815, +0.1500, +0.2513, +0.1783, -0.0825, +0.0526, +0.2218,
+0.2001, +0.1161, -0.1507, +0.2265, +0.1415, -0.6507, +0.2255,
-0.0120, +0.3568, +0.0921, +0.3392, +0.0370, -0.1990, -0.2535,
+0.1636, +0.3225, -0.4935, +0.2109, -0.0501, +0.0897, -1.0855,
+0.1237, +0.0788, +0.5188, -0.4567, +0.0116, -0.5196, -0.1994,
-0.2475, -0.6040, +0.0321, -0.1177, +0.0018, -0.4671, -0.5711,
+0.0513, -0.5653, +0.2688, -0.2311, +0.2756, -0.0531, +0.7042,
-0.3073, +0.3589, +0.3590, -0.1418, +0.5496, -0.4660, +0.5389,
-0.1360, +0.2775, +0.2597, -0.1089, -0.1900, +0.4253, +0.3885,
+0.4209, -0.3493, +0.0138, +0.5685, +0.1056, -0.1521, -0.5811,
-0.1916, -0.0096, +0.0435, +0.1461, +0.0400, +0.2125, -0.3966,
+0.4368, +0.0176, +0.2729, +0.0513, -0.4901, +0.0455, -0.0224,
-0.3494, +0.0116, +0.3173, -0.9230, +0.2972, +0.1613, +0.4970,
-0.2340, +0.0639
],
[
-0.1437, -0.7074, -0.0227, -0.1429, +0.5628, -0.1444, -0.4686,
-0.4664, +0.2558, +0.1615, +0.1201, -0.0562, -1.6423, +0.4704,
-0.6496, -0.7396, +0.1954, -0.1594, -0.5794, -2.0340, -0.0471,
+0.1209, +0.0230, -0.1971, -0.3795, +0.3395, -0.2557, -1.0066,
+0.1323, +0.3637, +0.1311, -0.2849, +0.0459, -0.0762, -0.0104,
+0.6315, -0.3362, -0.1927, +0.4068, +0.0825, +0.4061, +0.0420,
-0.1391, +0.0454, +0.0701, +0.2792, +0.2601, +0.1755, +0.4302,
+0.0462, -1.3625, +0.1624, +0.0497, -0.0207, +0.2457, +0.2754,
-0.4668, +0.1269, -0.0177, -0.0970, +0.0154, -0.5404, -0.0584,
+0.4238, +0.1016, +0.1083, -0.4236, +0.3165, +0.0311, -0.1969,
-0.1508, +0.1011, -0.4269, -0.0473, -1.4787, -0.0548, -2.0101,
+0.3298, -0.4169, -0.1667, -0.2075, -0.5669, +0.2357, -0.1768,
+0.2307, -0.0191, +0.6791, -0.0978, -0.4987, -0.2691, +0.0821,
-0.5149, +0.1673, -0.1167, -0.0717, +0.5303, +0.6750, +0.1378,
-0.1502, -0.4372, -0.0052, -0.0900, +0.0762, -0.4988, -0.2373,
-0.4635, -0.6370, +0.5504, +0.1706, -1.2182, -0.3613, -0.4645,
-0.1783, -0.8491, -0.1739, -0.3553, +0.2115, -0.1156, -1.3149,
-0.2067, +0.3576, -0.2588, -0.5707, -1.1997, +0.3510, -0.5910,
-0.0631, -0.0799
],
[
-0.7265, +0.8917, -0.0751, -1.3913, +0.3771, -0.1059, -0.0873,
+0.3571, -0.8270, -0.1692, +0.1868, -0.3708, +0.0788, +0.4810,
+0.7026, +0.3153, -0.2533, +0.0807, -0.4520, -0.6554, +0.2868,
-0.4496, +0.0535, -0.1686, -0.2701, -0.2813, -0.7436, +0.3309,
-0.2931, -0.4216, +0.3644, -0.1687, +0.5863, +0.8018, +0.6494,
+0.4601, -0.3677, -1.0434, +0.2167, +0.5227, +0.1440, -0.4096,
+0.1883, +0.5111, +0.5387, -0.1108, +0.3406, -0.1422, +0.1151,
-0.1370, -0.7487, -0.0985, -0.4278, -0.2114, +0.2810, -0.0622,
+0.2291, -0.3401, -0.4664, -0.1857, +0.4196, +0.0946, +0.4732,
-0.0479, -0.1954, -0.6938, +0.1261, +0.0103, -0.3667, -0.0102,
+0.1229, -0.2663, +0.2842, +0.2170, +0.0748, +0.3038, +0.6471,
+0.1502, +0.4311, +0.3652, -0.3753, -0.1012, +0.4648, -0.2373,
-0.5606, +0.1091, +0.3855, -0.1598, -0.4532, +0.0894, +0.0529,
+0.2738, +0.1630, +0.6182, -0.2206, -0.2195, -0.7492, -0.2194,
-0.3159, +0.7241, +0.0582, -0.5792, -0.6760, +0.0261, +0.3155,
+0.1989, +0.3230, +0.2225, -0.2388, -0.1559, -0.1574, +0.1876,
-0.2981, -0.0996, +0.0628, +0.1548, -0.1244, -0.9343, +0.0628,
+0.2718, +0.1845, -0.0582, -0.4112, +0.3589, -0.1315, -0.2060,
+0.1827, -0.1181
],
[
+0.3130, +0.1275, -0.4132, -0.4863, +0.2244, +0.2695, +0.4271,
+0.2023, -0.0472, +0.4431, -0.1568, +0.6265, +0.2728, +0.2451,
-0.2830, -0.0059, +0.2365, +0.3480, +0.2522, -0.7088, +0.0403,
-0.0386, +0.1891, -0.3917, +0.0114, +0.4899, +0.0924, +0.3148,
-0.5466, +0.1659, -0.1183, -0.1935, -0.1756, -0.1477, +0.1472,
-0.1724, +0.2143, +0.7332, +0.0079, +0.2761, -0.3160, +0.5058,
-0.0931, -0.1448, -0.0298, +0.7194, +0.1387, -0.6623, +0.0052,
+0.2097, -0.5607, +0.5945, -0.2575, +0.2782, -0.5959, +0.1211,
-0.0791, -0.2460, -0.7347, -0.2956, +0.1763, +0.6106, -0.4464,
+0.2316, -0.2995, +0.4528, -0.8791, -0.7911, -0.1451, -0.2947,
+0.2296, -0.1733, +0.2242, -0.0035, -0.4821, -0.6993, +0.3675,
+0.2964, +0.3193, +0.2363, +0.0551, -0.0624, +0.3850, +0.1241,
-0.1481, +0.2376, +0.0553, -0.3350, -0.1162, -0.2798, -0.1833,
+0.0958, -0.1838, -0.0980, -0.1692, -0.3936, +0.3211, -0.0617,
-0.0247, +0.0837, +0.7681, +0.3259, +0.0790, -0.0632, -0.4515,
-0.5924, +0.3167, -0.4388, +0.5271, +0.0197, -0.0969, -0.2817,
-0.0572, -0.0888, +0.2329, +0.2268, +0.2331, +0.1691, -0.4536,
-0.2887, -0.2485, +0.2667, +0.0362, +0.1168, +0.2150, -0.2858,
-0.0200, +0.4715
],
[
+0.1219, -0.0217, -0.5837, -0.3836, +0.4210, -0.5505, +0.1642,
-0.1091, -0.8230, -0.6983, +0.3176, -0.1022, +0.0676, +0.2756,
+0.4560, +0.2047, +0.1622, +0.1974, +0.3630, +0.2332, -0.2842,
+0.0591, -0.1309, +0.2992, +0.5811, +0.4116, -0.6022, -0.4205,
+0.2960, +0.0698, -0.4935, -0.8271, +0.3858, -0.0605, -0.1359,
-0.2071, +0.5370, -0.6949, +0.4987, -0.2968, -0.0380, -0.4235,
+0.1417, -0.5961, +0.3926, -0.1673, -0.4168, +0.7737, +0.0137,
-1.2432, +0.2691, +0.1803, +0.7565, +0.2014, +0.0604, -0.3547,
+0.0504, -0.1369, -0.7881, -0.2128, +0.0610, -0.0196, -0.1566,
+0.0030, -0.8292, -0.0454, +0.0377, -0.0885, +0.0579, -0.2501,
+0.0370, -0.9455, -0.0527, -0.2292, -0.5440, -0.0517, -0.0122,
-0.2071, -0.0758, +0.0947, +0.0308, +0.1070, +0.1017, -0.3191,
+0.1329, -0.3563, -0.1016, -0.8970, +0.4906, -0.2456, -0.3833,
-0.3038, -0.0509, -0.1365, +0.2201, -0.5577, +0.1567, -0.1963,
-0.4634, -0.0231, +0.1323, +0.4220, -0.4158, -0.6117, -0.1371,
-1.3515, -0.4502, -0.7084, +0.4100, +0.0979, -0.1893, -0.6622,
-0.2095, +0.1093, +0.3789, -0.3222, -0.4348, +0.3956, -0.1112,
-0.1735, +0.2460, +0.0870, -0.4405, +0.1579, -0.0126, +0.3663,
-0.1206, -0.4883
],
[
+0.4090, +0.3428, +0.2512, -1.5906, +0.6582, -0.8021, +0.1520,
-0.0569, -0.4027, +0.2248, +0.2477, -0.4910, -0.9947, -0.4317,
+0.2517, -0.3626, -1.3942, +0.2331, +0.5284, -0.4955, -0.1959,
+0.0347, -0.2575, -0.1463, -0.1766, +0.1963, -0.8659, +0.0555,
+0.4978, -0.9231, -0.4148, +0.5886, -0.3435, +0.3906, -0.2323,
-0.3572, -0.0199, -0.4587, -0.4683, -0.4979, -0.2390, +0.0564,
-0.6407, +0.2675, -0.7272, -0.2480, +0.4758, -0.7479, +0.1223,
+0.0275, +0.1572, +0.0790, +0.4834, -0.3260, +0.0827, -0.3243,
-0.4053, -0.2065, +0.8476, -0.4793, +0.7518, +0.0727, -0.2254,
-0.8287, -0.7180, -0.1953, +0.2458, -0.5847, -0.3380, -0.1605,
+0.0760, +0.8777, -0.0750, -0.2355, -0.3456, -0.5562, +0.0233,
+0.0654, -0.2250, -0.1257, +0.0755, +0.1061, +0.0183, -0.6314,
-0.0276, -0.5709, +0.1427, -0.9943, -0.2089, -0.1618, +0.1928,
+0.0366, -0.4056, +0.1872, +0.3670, +0.5268, -0.2949, +0.4297,
-0.6547, +0.0743, -0.7467, +0.4479, -0.7236, +0.4257, +0.0888,
-1.5198, +0.0594, -0.8880, -0.6984, -0.3094, -0.5483, -0.9416,
-0.1274, -0.4495, +0.5366, -0.4214, +0.2500, +0.3083, -0.6264,
+0.4781, -0.3504, -0.2692, -0.4077, +0.0213, -0.9375, +0.1823,
-0.7678, -0.6795
],
[
+0.4683, +0.4362, +0.3526, -0.0894, -0.2294, -1.1023, -0.3028,
+0.1758, -0.1920, +0.3107, -0.0442, +0.1234, -0.4052, -0.7556,
-1.4301, -0.0456, +0.2569, -1.0682, -0.1381, -0.1230, +0.1955,
-0.3330, +0.1838, -0.3458, -0.5642, +0.1428, +0.2405, +0.6010,
-0.1470, -0.3523, -0.0542, +0.1510, -0.3189, -0.5421, +0.3879,
-0.2164, -0.2662, +0.1630, -0.0339, -0.3344, -1.4772, +0.1192,
-0.6321, -0.0340, -0.0587, +0.3702, -0.8599, -2.0794, +0.4569,
-0.1700, +0.1769, +0.3511, -0.0855, +0.2822, +0.2336, +0.1616,
+0.5258, -0.1024, +0.4351, +0.1208, -0.2025, -0.6205, -0.4838,
-0.1901, -0.7711, -1.0001, +0.3786, +0.3819, -0.2563, +0.1972,
+0.4146, +0.0413, +0.6957, -0.4456, +0.3663, +0.7249, -0.5925,
-1.0268, +0.4177, -0.0087, +0.4056, -0.0701, +0.0594, +0.1083,
+0.2143, -0.4115, -0.0350, -0.0193, +0.3083, -0.7987, +0.1743,
-0.3390, +0.9210, -0.1339, -0.5012, -0.5543, +0.4410, +0.0627,
+0.3689, +0.2465, +0.1989, -0.0166, +0.3254, +0.3029, -0.3015,
+0.0947, -0.2871, +0.0143, -0.0860, +0.5374, -0.5375, -0.0836,
+0.5282, +0.0927, +0.3490, -0.4070, -0.4771, -0.1295, -0.4146,
+0.2507, -0.7222, -0.3260, +0.1335, +0.0136, -0.1393, +0.1429,
-0.2269, -0.2930
],
[
-0.0437, -0.2157, +0.4923, -0.6321, +0.0022, -0.0296, -0.9576,
-0.5569, +0.3181, +0.2091, +0.3334, +0.1503, +0.1709, -1.6208,
-0.4494, -0.8162, -0.7325, -0.9672, -0.3088, +0.3997, -0.1793,
-0.5785, -0.1168, -0.4739, -0.0540, +0.4712, -0.0863, +0.1757,
+0.0740, -0.1534, +0.0955, +0.0096, -0.1520, -0.3343, +0.3403,
-0.2325, -0.3957, -0.2039, -0.1898, +0.4041, +0.0474, +0.5879,
-0.1018, -0.3031, -0.1018, -0.1546, +0.3044, +0.0565, -0.3444,
-0.0236, +0.1412, +0.2696, -0.1557, -0.8822, -0.1266, +0.2104,
-1.0103, +0.1355, +0.6538, +0.3990, -0.8994, -0.2313, +0.4144,
-0.2434, -0.2434, -0.5452, -0.3693, -1.0065, -0.2526, +0.1606,
+0.1587, -0.0940, -0.7770, +0.2070, +0.5299, +0.1534, +0.0182,
-0.5653, -0.2528, -0.2816, +0.3887, +0.3633, -0.0236, +0.0537,
-0.3035, +0.0553, -0.3131, +0.2767, -0.6836, +0.2380, -0.3762,
+0.1924, +0.2998, -0.1018, -0.5629, +0.1175, -0.0172, -0.2261,
-0.1764, +0.2219, +0.0248, +0.0758, +0.4091, +0.5297, -0.1721,
+0.2416, -0.0586, -0.0440, +0.1363, -0.9104, -0.1781, +0.1013,
-0.6489, -0.0025, +0.4615, -0.0297, +0.0765, +0.2838, -0.4600,
-0.1713, -1.6520, -0.4385, +0.0017, -0.9159, +0.2006, -0.2593,
-0.2150, +0.1949
],
[
+0.0339, -0.0568, -0.5621, -0.0932, +0.0583, -0.9944, +0.0075,
+0.3368, -0.6028, +0.2755, +0.1404, +0.3316, +0.3873, +0.0802,
-1.0236, -0.0446, +0.4943, -0.2195, +0.2907, +0.2503, -0.7739,
-0.3247, +0.2905, +0.3955, +0.1023, +0.5193, +0.0899, -0.2487,
-0.4851, +0.0584, +0.0920, -0.6081, +0.1348, -0.1767, +0.0742,
-0.5411, -0.3979, -0.1991, +0.5524, +0.0053, +0.4063, +0.1606,
+0.2620, -0.1135, -0.5295, +0.2818, -0.3286, -0.4359, -0.3249,
-0.2776, +0.5612, +0.0728, -0.5318, +0.2386, -0.8625, +0.0792,
+0.3288, +0.3633, +0.3718, -0.2269, +0.7100, -0.3257, +0.3669,
-0.3163, +0.0914, +0.1124, -0.7857, +0.0114, +0.0858, +0.0580,
+0.0056, -0.7910, -0.3239, -0.0388, +0.2442, -0.1195, +0.3433,
-0.1431, -0.1119, +0.0972, +0.1949, -0.3508, +0.0935, -0.5958,
+0.7940, -0.7228, +0.6034, -0.4416, -0.0069, -0.0365, -0.0326,
-0.0376, +0.3765, +0.4202, -0.1681, -0.6389, +0.0301, +0.3833,
-0.0720, -0.6577, +0.3238, +0.5271, +0.6202, -0.1260, -0.6644,
-0.1715, +0.4823, +0.1443, -0.0962, +0.1568, -0.3111, +0.2790,
-0.7418, +0.2376, +0.2314, +0.8497, +0.0939, -0.1574, -0.3043,
-0.7649, +0.0571, +0.3874, -0.3846, -0.0944, +0.5540, -0.4897,
+0.0387, +0.5230
],
[
+0.4302, -0.2172, -0.6644, +0.5350, -0.6361, -0.1944, -0.8780,
-0.2447, +0.2381, -0.5389, -0.2404, +0.2404, -0.4202, -0.1248,
-0.3898, +0.3124, +0.1769, +0.3764, +0.3986, -0.5957, +0.3145,
+0.2409, -0.3809, +0.1314, -0.0018, -0.6858, +0.3517, +0.0978,
-1.3349, +0.0992, +0.1196, -0.2485, +0.2190, -0.5817, +0.1557,
-0.1075, -0.2156, +0.3588, -0.8234, +0.5031, +0.0244, +0.0619,
-0.3212, -1.0465, -0.0554, -0.0448, -0.6694, +0.1508, -0.9232,
-0.1322, -0.1564, -0.3022, -0.1517, -0.1223, -0.2044, +0.2124,
-1.0978, -0.1057, +0.0144, +0.1217, -0.3823, -0.0009, +0.1590,
+0.0353, -0.1922, +0.2753, -0.3672, -0.3458, -0.0228, -0.4366,
-0.5541, +0.1015, -0.4054, -0.0574, -0.7021, -0.3660, +0.4662,
+0.1551, +0.0294, -0.2665, -0.1404, -0.5847, -0.6307, -0.3790,
-0.2180, +0.1872, -0.6189, +0.1271, +0.0037, -0.3821, -0.5943,
-0.0343, -0.2771, +0.0432, -0.7665, +0.3099, -0.2629, -0.0641,
+0.0618, +0.6157, +0.2150, -0.1869, -0.8548, +0.0362, -0.3927,
+0.4267, +0.0765, -0.3128, +0.1172, -0.0424, +0.3030, -0.4808,
-0.0301, +0.5784, +0.2766, +0.1312, +0.1552, -0.5810, +0.1366,
+0.3775, -1.1347, -1.1573, +0.1438, +0.0798, +0.0821, +0.0965,
+0.2632, -0.1688
],
[
+0.0486, +0.4445, +0.8387, +0.2568, -1.1572, +0.4689, +0.1263,
+0.2519, -0.6219, -0.0639, -0.0465, -0.3888, +0.1773, -0.0798,
-0.3921, +0.1864, -0.2427, -0.6167, -0.2716, +0.3918, +0.1758,
-0.1026, -0.3277, -0.0350, -0.4220, +0.2290, -0.3666, +0.0481,
+0.1537, -0.1635, +0.2875, -0.1457, +0.3795, -0.1145, -0.3384,
+0.2957, -0.1083, +0.3636, +0.4398, -0.4989, +0.3521, +0.6306,
+0.3257, +0.8925, +0.0012, -0.3577, -0.4091, -0.2389, +0.3009,
+0.2426, -0.1863, -0.4598, -0.5801, +0.4239, +0.7071, -0.6306,
-0.0318, +0.7025, -0.6715, +0.1279, -0.1172, -0.4279, -0.3312,
+0.3969, +0.3163, +0.0021, +0.2250, -1.1076, +0.3326, -0.0329,
-0.7956, -0.2683, +0.3947, +0.1141, +0.3432, -0.3208, +0.4752,
-0.3115, -0.2513, +0.4078, +0.3549, -0.3452, +0.4530, +0.5450,
+0.3026, -0.2971, -0.3374, -0.7781, +0.1321, -0.4501, -1.0434,
-0.2007, +0.1588, +0.3823, -0.2005, -1.5922, -0.4523, -0.5334,
-0.4728, -0.7466, -0.1689, +0.3333, +0.9141, -0.2566, +0.1104,
-0.1154, +0.0838, -0.1758, +0.1811, +0.6251, +0.3063, +0.1442,
-0.3461, -0.0870, -0.3754, -0.2758, -0.5202, +0.3635, -0.8359,
-0.1352, -0.4679, +0.5761, -0.9381, +0.1186, -0.6600, -0.4856,
-0.1229, -1.3795
],
[
-0.4596, -0.4665, -0.7232, +0.4427, -0.5278, +0.2214, +0.1276,
-0.7940, -0.8310, +0.2893, +0.1474, -0.0802, +0.4358, -0.3326,
+0.4273, -0.2247, -0.0812, +0.1327, -0.4622, +0.5568, -0.1326,
-0.2577, -0.6547, -0.0465, +0.3134, +0.1999, -0.1509, -0.0916,
-0.7934, -0.5564, +0.2215, +0.1797, +0.4205, +0.0757, +0.0866,
+0.0747, -0.6790, +0.1504, -0.0331, -0.6397, +0.4036, -0.1117,
+0.7006, -0.8588, -0.1314, -0.2884, -0.6283, -1.0481, -0.9171,
-0.2741, +0.7294, +0.1341, -0.5690, -0.2267, -0.1509, -0.1553,
+0.4570, -0.0115, +0.1082, -0.0045, -0.0435, -0.0790, +0.1057,
+0.1385, +0.5353, -0.1467, +0.3976, -0.1944, +0.1313, +0.0635,
-0.0690, -0.0125, +0.3091, +0.1871, +0.2987, -0.1033, +0.0188,
+0.2802, +0.0149, -0.5585, +0.3224, +0.2919, +0.3109, -0.0414,
+0.1248, -0.4365, +0.3245, -0.0306, -0.0583, -0.2472, +0.0395,
-0.6055, +0.0278, +0.1616, +0.5013, -0.2901, -0.0443, -0.0004,
-0.4235, -0.6453, -0.2101, -0.2178, -0.1750, -0.2190, +0.0142,
+0.1256, -0.3123, +0.4291, -0.3367, +0.1954, +0.3119, +0.1210,
-0.5771, +0.0519, +0.1849, -0.1615, -0.0733, +0.3759, -0.1807,
-0.3813, -0.5297, -0.1774, -0.0199, +0.2418, +0.3511, -0.0563,
-0.1167, -0.4282
],
[
+0.0952, +0.2561, -0.2844, -0.2665, -0.0839, +0.7566, -0.1682,
+0.2183, -0.2247, -0.4824, -0.0646, -0.3158, -1.2641, -0.3514,
-1.0803, +0.5559, +0.4353, -0.4389, +0.2619, -0.3615, +0.2897,
-0.7813, -0.3570, -0.0004, +0.3873, +0.3192, +0.0727, -1.0953,
+0.0653, +0.1268, -0.0092, -0.1782, +0.0052, +0.2457, -0.8414,
+0.1563, +0.0886, +0.0362, +0.3103, -0.1490, -0.2650, -0.5943,
-0.2877, -0.0656, -0.5241, +0.2399, -0.7845, -0.2950, -0.3548,
-0.5025, +0.5442, -0.1611, -0.1232, -0.0572, +0.2483, -0.0855,
-0.3825, +0.0387, -0.2524, +0.2587, +0.6535, -0.0393, +0.0975,
-0.3105, +0.2879, +0.3140, -0.1188, -0.0446, +0.2561, -0.1164,
+0.3638, +0.0178, -0.3148, +0.3328, +0.5837, +0.0712, +0.1361,
+0.2261, -0.4964, +1.0193, -1.0342, +0.3773, +0.1262, -0.1145,
-0.4670, -1.0224, +0.0325, -0.4796, -0.1071, -0.0128, +0.2066,
+0.2780, -0.5035, -0.3934, -0.0626, +0.2125, +0.5664, +0.3072,
+0.4962, -0.7859, +0.4584, -0.1682, -0.4819, +0.0270, -0.0845,
-1.3404, +0.7211, +0.0027, +0.2755, -0.1331, -0.2127, -0.3678,
+0.0117, +0.2418, -0.2516, -0.2810, -0.4912, +0.0286, +0.1692,
+0.0112, -0.5188, -0.2282, -0.8814, -0.5661, +0.2812, +0.5454,
-0.4854, +0.3009
],
[
+0.1870, +0.3014, -0.5271, -0.1075, +0.0156, -0.3080, +0.1362,
-0.9902, -0.3459, -0.6937, -0.4316, +0.0986, +0.2262, +0.0075,
+0.6713, +0.5430, +0.0630, -0.3519, -0.1376, +0.0561, -0.0776,
+0.2371, +0.1225, -0.0702, -0.4031, -0.0734, +0.0244, -0.5890,
-0.6585, +0.0734, -0.1912, -0.2719, +0.4767, +0.2162, +0.0277,
+0.0434, +0.4320, -0.1501, +0.0798, +0.0340, +0.1550, +0.3855,
+0.0754, -0.0348, -0.1188, +0.1943, -0.0932, +0.4244, +0.2955,
+0.4376, -0.0588, -0.3101, +0.6338, -0.8185, -1.4382, +0.0284,
+0.1375, -0.2207, +0.3143, -0.3735, +0.4699, -0.2343, +0.3439,
+0.2575, +0.0560, -0.5537, +0.1837, -0.3537, +0.4204, -0.2469,
+0.4694, -0.4818, -0.0800, +0.1547, +0.1375, -0.3788, +0.1500,
+0.7913, -0.1740, +0.5279, +0.4443, -0.3273, +0.1345, -0.1566,
+0.1651, -1.1800, +0.4721, -1.2701, +0.1045, +0.0280, -0.9942,
-0.4377, -0.8253, +0.4465, -1.0180, -0.8197, -0.1882, -0.5531,
+0.1068, +0.5739, -1.6526, -0.4122, -0.4316, -0.6513, -0.7149,
+0.2063, -0.8572, -1.2181, -0.0730, +0.1810, +0.0309, +0.1499,
-0.7045, -0.2607, -0.7460, -0.3004, -0.4975, +0.1208, -0.0947,
-0.0603, +0.2271, -1.2883, -0.1305, +0.1314, +0.0737, +0.0324,
-0.1463, -0.3041
],
[
+0.0458, -0.0894, -0.2151, -0.6742, -0.2983, -0.6282, +0.1355,
-0.1223, +0.6234, +0.5262, -0.6218, +0.3284, +0.0273, +0.0397,
-0.7536, -0.2314, -0.1161, +0.4722, -0.5954, +0.0380, -0.0210,
+0.0786, -0.3814, -0.7529, +0.2928, -0.0075, -0.6387, -0.7448,
-0.8791, +0.2035, +0.2726, -0.3057, -0.3138, -1.4312, +0.3530,
-0.1899, -0.9923, -0.5255, +0.4026, -0.6432, -0.0311, +1.1196,
+0.5319, +0.0608, -0.4065, -0.3264, -0.0691, -0.4776, -0.0131,
-0.1747, -0.0769, -0.3585, -0.1498, +0.0016, -0.5073, +0.7675,
-0.6077, -0.5249, -0.3288, -0.7887, +0.0849, +0.2405, -0.2989,
-0.0863, +0.4390, +0.0930, -0.2713, +0.0888, -0.3606, -0.7837,
-0.7676, -0.2362, -0.4155, -0.2206, +0.7188, -0.5565, -0.2318,
-0.9128, -0.5840, -0.2630, -0.9346, +0.1868, -0.2590, -0.9223,
+0.2309, -0.0921, -0.0357, -0.3798, -0.0737, -0.7020, +0.0108,
+0.4331, -0.5297, -0.1514, +0.1932, +0.1644, -0.3387, +0.1624,
-0.0290, +0.3106, -0.1923, -0.5176, -0.1456, -0.9456, -0.0735,
-0.1717, -0.5372, -0.3042, -0.5604, -0.0275, -0.1044, -0.2694,
+0.1343, +0.2035, +0.0755, +0.2408, -0.3585, +0.3206, +0.0377,
-0.6399, -0.6233, -0.7337, -0.8767, +0.1049, -0.8633, -0.2215,
+0.1198, +0.0252
],
[
-0.3981, -0.9485, +0.1615, -0.1278, +0.3925, -0.1676, +0.1565,
-0.2447, -0.4455, +0.0189, -0.2803, +0.0478, -0.0118, -0.0795,
-1.2917, -1.3892, -0.7834, -0.2114, -0.5573, +0.2180, -0.0635,
+0.4844, +0.1714, -0.1521, +0.1799, +0.1737, +0.0494, +0.6905,
+0.0204, -0.1172, +0.0560, -0.1334, -0.5119, +0.0524, -0.3917,
-0.1034, +0.3487, -0.1477, +0.3384, +0.3475, -0.1820, -0.3406,
+0.1093, -0.1681, +0.6878, -0.2988, +0.6641, -1.0641, -0.1253,
-0.3916, -0.2225, +0.1199, -0.0494, -0.0510, -0.7834, +0.1818,
-0.3580, +0.1913, +0.2561, +0.1417, +0.1223, -0.8103, -1.1695,
+0.2879, +0.2757, +0.2811, +0.3511, -0.1775, -0.7569, +0.2796,
+0.0768, +0.1223, -0.1397, -0.1127, +0.2790, +0.2812, +0.0628,
-0.6584, +0.2361, -0.0339, +0.2359, +0.0154, +0.3348, +0.2586,
+0.3898, -0.1157, +0.1350, -1.0472, -0.3809, +0.0340, +0.1931,
-0.3670, +0.0556, -0.0287, +0.0381, -0.2873, +0.1255, +0.2827,
-0.4592, -0.1651, +0.0170, +0.0656, +0.4979, +0.0304, -0.5095,
-0.3564, -0.1532, -0.2341, +0.1253, +0.0679, +0.3635, -0.5372,
+0.5080, -1.3852, +0.2097, +0.1693, -0.4685, +0.0981, -0.3357,
-0.8061, +0.3398, +0.0947, -0.5268, -0.1303, +0.2309, +0.0951,
+0.3498, +0.2407
],
[
+0.2665, -0.0532, -0.0694, +0.0253, -0.3748, +0.0328, -0.4113,
+0.0673, -0.2931, -0.0414, +0.1795, -0.0323, -0.3450, -0.1150,
+0.2287, -0.2842, -0.7729, -0.3042, -0.2463, -0.0702, -0.2516,
-0.0768, -0.1726, +0.5449, +0.0152, -0.0060, -0.2021, +0.0711,
-0.2867, +0.0975, -0.3351, -0.0997, +0.0162, +0.1531, +0.3364,
-0.0450, -0.0111, +0.1189, -0.1334, -0.6545, -0.4830, +0.0474,
+0.3154, +0.4580, -0.1064, +0.2929, +0.4214, -0.2082, -0.6345,
+0.0035, +0.6610, -0.1338, +0.1163, +0.4461, -0.2158, -0.8791,
-0.3630, +0.0719, +0.1888, +0.5635, +0.0478, -0.1271, -0.0503,
-0.5142, -0.3326, -0.3739, -0.4792, -2.6695, +0.0054, -0.5449,
+0.4130, +0.1073, +0.1240, +0.6417, +0.0663, +0.3216, -0.7660,
-0.0552, -0.3331, +0.2927, +0.3266, +0.0314, -0.1650, +0.4794,
-0.0994, +0.6366, -0.1988, -0.2426, -0.4117, +0.0769, +0.0283,
-0.6750, -0.8874, -0.3598, -0.7254, +0.0542, -0.0244, -0.2915,
+0.2490, -0.5648, -0.1576, -0.8881, +0.1953, +0.4120, -0.7454,
+0.0790, +0.7552, -0.2105, +0.2571, -0.0879, +0.0528, -0.7195,
-0.2049, +0.2198, -0.8199, -0.2095, -0.1536, -0.2254, -0.3881,
-0.0710, -0.0862, -0.0728, -0.4765, +0.0377, -0.2601, +0.5323,
-0.4757, +0.0056
],
[
-0.5613, -0.0316, +0.5095, -0.3853, -0.1999, -0.2166, +0.2410,
-0.1814, -1.0522, +0.0969, +0.9236, +0.2533, +0.0451, -0.0508,
+0.0811, +0.3947, -0.3305, -0.2005, +0.0055, +0.4741, -0.6295,
-1.6430, +0.3491, +0.0489, +0.2295, +0.0272, -1.0309, +0.3897,
+0.0007, +0.2152, -0.6022, -0.1800, -0.9179, +0.0791, +0.0380,
-0.1480, +0.1407, -0.1113, +0.1436, -0.0275, -0.0495, -0.1882,
-0.0968, -0.5008, -0.5220, -0.0886, +0.3981, -0.1246, +0.0909,
+0.1886, +0.3237, -0.5167, +0.2581, +0.3119, -0.0412, +0.0391,
-0.2006, +0.1589, -0.0481, +0.0569, -0.3648, +0.6140, +0.0046,
+0.4178, -0.4773, +0.1720, +0.1110, -0.4165, -0.3226, -0.2994,
-0.1289, +0.0845, +0.3111, +0.2951, +0.4875, -0.6199, +0.1223,
+0.4922, -0.3103, -0.1180, +0.1047, +0.6244, -0.4275, -0.2973,
+0.1429, -0.6453, +0.1289, -0.7252, +0.1454, +0.6175, +0.3022,
+0.2301, -0.5072, -0.1339, +0.2775, -0.1535, -0.2081, +0.2216,
+0.1102, -0.1702, -0.1977, +0.2827, -0.0540, -0.0717, -1.2428,
-0.4476, -0.7154, -0.5406, -0.3560, -0.2525, -0.3185, -0.1646,
+0.0909, +0.6647, -0.1795, -0.1505, -0.1118, -0.3078, -0.0913,
-1.1243, +0.2496, +0.1140, -2.2075, -0.4422, +0.6752, +0.3675,
-0.5279, -0.3644
],
[
+0.4551, -0.0319, -0.1932, +0.2266, -0.1468, -0.3072, -0.1329,
+0.1246, -0.2067, -0.2614, +0.3834, -0.4731, -0.0135, +0.0497,
-0.6278, -0.1505, +0.3569, +0.1982, +0.5036, -0.6404, +0.4569,
-0.3846, -0.0121, +0.3326, -0.2668, +0.2187, +0.0431, +0.0004,
-0.1539, +0.2131, +0.1308, -0.0228, -0.7287, -0.5572, +0.6008,
+0.1236, +0.2499, +0.1608, +0.4676, -0.5957, -0.1967, -0.4873,
+0.2819, -0.0164, -0.1783, -0.0527, -0.1868, -0.1273, -0.2112,
+0.0099, -0.0311, +0.4071, +0.4450, -0.5066, +0.1030, +0.2545,
+0.6692, +0.3454, -0.5475, -0.7075, +0.2926, -0.0085, -0.2729,
-1.2123, -0.4895, +0.7207, -0.4900, -0.1653, +0.2227, +0.2817,
+0.0906, -0.3284, -0.5089, +0.2956, +0.5533, -0.1981, +0.0924,
-0.2792, +0.3144, +0.2213, -0.6193, +0.2680, +0.1391, +0.3618,
+0.0205, -0.1751, +0.5281, -0.4151, +0.0489, -0.0676, -0.1298,
-0.1577, +0.2879, -0.0986, -0.5174, -0.1008, +0.2711, +0.1748,
+0.2306, -0.9342, +0.1570, +0.3747, +0.2685, +0.0691, +0.1287,
-0.0570, +0.2586, +0.4351, -0.1724, +0.0868, +0.2339, +0.0590,
+0.3207, -1.0457, +0.1448, -0.4016, -0.3144, -0.3675, -0.1135,
+0.5790, -0.9001, +0.0850, -0.6078, -0.1776, -0.4906, +0.5657,
+0.3524, +0.4872
],
[
-0.7267, -0.3766, -0.7996, -0.8690, -0.1338, -0.5856, -0.9790,
-0.6182, -0.4302, -0.5680, +0.2210, +0.6662, -0.0394, +0.0189,
+0.0192, -0.0539, +0.5934, +0.7768, -0.1917, +0.5445, -0.4624,
+0.0003, -0.5339, -0.3818, -0.7532, +0.6063, -0.0766, -0.1849,
+0.1387, -0.1513, -0.2904, -0.4841, -0.8696, -0.6494, +0.3400,
-0.4493, +0.7742, -0.5473, +0.3086, -0.7270, -0.3335, +0.0908,
+0.0757, -0.4248, -1.3225, -1.2498, -0.5104, -0.1792, -0.0900,
-0.1979, -1.1543, -0.6224, +0.3853, -0.4153, -0.1675, +0.3672,
+0.0385, -0.2739, +0.3483, +0.1304, -0.2027, -1.0781, +0.0487,
+0.9165, +0.3448, +0.1050, +0.6307, -0.0179, -0.2644, -0.6719,
+0.8630, +0.2447, -0.7928, -0.7747, -0.4640, +0.3419, -0.3761,
+0.4682, -0.3551, -0.7790, -0.1836, -1.2261, +0.0014, -0.0157,
+0.7793, +0.0543, -0.0427, -1.2934, +0.4384, +0.6282, -0.4136,
-0.7120, -0.0312, -0.1617, +0.3792, +0.3923, +0.4131, -0.0939,
+0.0765, -0.0514, -0.6875, -0.6300, +0.0322, -0.1902, +0.0416,
-0.0501, -0.3586, -0.0728, +0.1699, +0.6857, -0.2486, +0.4669,
-0.8803, -0.7892, -0.0981, +0.1722, -0.9295, +0.4021, -0.8335,
-0.7050, +0.0081, -0.6816, +0.3777, +0.2073, -0.2389, -0.5322,
+0.1118, -0.0305
],
[
+0.1518, +0.5124, +0.2856, +0.3206, -1.0614, -0.7988, +0.6489,
-0.3330, +0.0626, -0.7497, -0.3054, -0.3453, +0.3817, -0.3338,
-0.5490, -0.6551, -0.2633, +0.8902, +1.0987, -0.1853, -0.5848,
+0.1062, +0.4732, -0.8558, +0.2518, -0.6830, +0.0990, +0.0484,
+0.6778, -0.6857, -0.2717, -0.0487, +0.2458, +0.0854, -0.5820,
-0.2739, +0.2974, -0.0806, +0.0331, -0.1789, +0.3143, -0.2483,
+0.3278, -0.8205, +0.2240, +0.2381, +0.4927, +0.5869, -0.0209,
+0.2602, +0.0947, -0.5313, +0.1366, +0.7339, +0.1222, -0.1071,
+0.3209, -0.6647, +0.3956, -1.1462, +0.1540, -0.6027, -0.6462,
-0.0196, -0.0257, +0.2174, +0.0618, -0.4277, -0.2348, -0.1575,
+0.0113, -0.1159, -0.2370, -0.7871, +0.6167, -0.5572, +0.3090,
-0.8006, -0.1515, -0.3669, +0.7213, -0.8533, -0.3268, +0.3503,
-0.3551, +0.2349, +0.5376, +0.2798, -0.2976, +0.0167, -1.5816,
+0.0884, -0.3257, -0.0122, +0.0005, +0.5181, +0.4005, +0.2125,
+0.0817, -0.1632, -1.3130, -1.2145, +0.1358, +0.5636, -0.3694,
-0.3336, -0.6762, +0.3142, +0.6745, -1.0468, +0.3761, -0.0945,
-0.0999, +0.1924, -0.0761, +0.4415, -1.3590, +0.4247, -0.8823,
-1.2536, -0.6982, -0.6742, -1.6875, +0.2924, -0.2236, -0.2251,
-0.0849, +0.1060
],
[
-0.2809, -0.5262, -0.2225, -0.2377, +0.4993, -0.6156, -0.4075,
-0.0431, -0.0557, +0.1105, -0.6226, +0.2642, +0.0313, -0.1356,
+0.2995, -0.1605, -0.2844, +0.0588, -0.7481, +0.3667, +0.3974,
-0.5936, -0.8852, +0.3301, +0.0846, -0.2080, +0.6778, -0.4275,
+0.5747, +0.1188, -0.1728, -0.6314, +0.1443, -0.5844, +0.4162,
-0.5322, -0.4064, -0.8819, +0.0508, +0.2179, +0.1001, +0.7090,
+0.4653, -0.9416, +0.0668, -0.2436, -0.2129, +0.3427, -0.5489,
-0.9096, -0.0917, +0.0727, -0.3989, +0.5349, +0.1648, -1.0449,
-0.4291, -0.1728, -0.2757, -0.1645, -0.0187, -0.4139, +0.4843,
-0.2568, -0.0983, -0.2277, +0.0900, -1.8861, -0.5909, +0.2268,
+0.0799, -0.2358, -0.3471, -0.0441, -0.3774, +0.0699, -1.4087,
-0.0041, -0.0716, -0.4921, -0.4239, +0.3420, -0.0662, +0.9081,
+0.1914, -0.1240, -0.3794, +0.5857, +0.0536, -0.0472, -0.3365,
-0.2709, -0.0922, +0.1280, +0.1297, +0.2072, -0.3271, -0.0874,
-1.2091, +0.2147, -0.1462, +0.0871, -0.8567, -0.2144, -0.6922,
-0.5972, -0.2472, -0.0717, -0.0147, +0.2899, +0.1847, -0.1391,
-0.5344, -0.6827, +0.4351, +0.0889, -0.9915, +0.4124, +0.0220,
-0.2009, -0.9442, +0.0113, -0.9631, -0.0650, -0.4303, +0.3435,
-1.2977, -0.2697
],
[
-0.9292, +0.2608, +0.1856, -0.2181, +0.2864, +0.4665, +0.1183,
-0.8772, +0.0637, -0.6156, +0.2125, +0.0861, +0.3298, +0.0282,
+0.1922, +0.1052, +0.2205, +0.0923, +0.0565, -0.5392, -0.2462,
+0.4160, +0.3582, -0.2283, -0.7549, +0.2029, +0.4387, +0.1588,
-0.3804, +0.3135, +0.0810, -0.5952, -0.7394, +0.1698, +0.4097,
-0.7306, -0.0494, +0.0300, +0.0150, -0.1442, +0.1094, -0.3293,
+0.3067, +0.3319, -0.0988, +0.3934, +0.1500, -0.5475, -0.1318,
-0.2197, -0.5067, -0.1792, +0.0558, +0.2852, -0.3734, +0.8150,
-0.1424, +0.0324, -0.6778, -0.8345, -1.0479, -0.0120, +0.1208,
-0.2252, -0.1481, -0.3718, +0.1413, -0.3404, -0.9226, +0.4955,
+0.3686, -0.8111, -0.1841, +0.0163, -1.0335, +0.6153, -0.2061,
+0.0014, -0.1898, +0.2517, -0.7933, -0.5624, +0.0223, -0.1111,
-0.7173, -0.5490, -0.0761, -0.2222, -0.0284, +0.1807, -0.4117,
-0.0473, -0.2069, -0.7928, -1.1569, -1.3606, +0.0030, +0.0516,
+0.0198, +0.6680, +0.2385, -0.2643, +0.5895, -0.7420, +0.4909,
+0.2861, -0.0732, +0.6386, -0.2367, -0.1159, +0.0837, -0.7266,
-0.4135, +0.0526, -0.3135, +0.1495, -0.0127, -2.0576, -0.0804,
+0.0785, -0.4744, +0.2081, -1.2179, +0.7396, -0.3955, +0.2333,
-0.0346, +0.2791
],
[
-0.2268, +0.2013, -0.5402, +0.2770, -0.0082, +0.4595, -0.1749,
-0.2543, -0.1718, +0.0961, +0.0349, +0.3766, +0.0737, +0.1959,
-0.3471, -0.1843, -0.0979, +0.1185, +0.3418, +0.1544, -0.1183,
-1.2213, -0.6225, +0.0391, -0.5351, +0.3344, -0.5902, -0.1954,
-0.0066, -0.4488, -0.0837, -0.2661, -0.6075, -0.4927, +0.0208,
+0.1308, +0.2957, -0.1448, -0.3114, -0.6437, -0.1392, +0.2135,
+0.5756, -0.4650, -0.0871, -0.0378, +0.0438, -0.7491, +0.1258,
+0.2093, +0.3463, +0.3314, -0.1335, -0.2222, -0.1071, +0.4345,
-0.2314, -0.4647, -0.6163, -0.7831, -0.2002, +0.3730, -0.2890,
+0.2425, -0.6488, +0.1770, -0.1440, -0.2539, -0.2734, +0.4355,
+0.6381, -0.3941, +0.1792, -0.2053, -0.5537, -0.1537, -0.5183,
+0.2566, -0.1217, +0.2459, +0.6899, -0.0281, +0.2930, -0.5520,
+0.2116, -0.1514, -0.2412, +0.2833, -0.1344, +0.1574, +0.0028,
-0.5203, -0.2195, -0.0272, +0.5438, -0.0241, -0.5652, +0.0420,
+0.2021, -0.0554, +0.1088, -0.4577, -0.4282, +0.0337, +0.3764,
-0.4121, -1.2664, +0.3727, -0.0548, -0.2052, +0.4138, -1.0687,
+0.1104, +0.3876, -0.0396, -0.2886, +0.2004, -0.1510, +0.0247,
-0.3260, -0.6714, +0.0787, -1.5699, +0.3883, -0.1314, +0.5150,
-0.3001, -0.2822
],
[
+0.1684, -0.1801, -0.3803, +0.3242, -1.3898, -0.0164, +0.3353,
-0.0684, -0.1274, -0.5869, -0.4406, -0.0327, -0.0911, -0.1410,
-0.0482, -0.4738, -1.2314, +0.2128, -0.0070, +0.1838, +0.0874,
+0.0349, +0.2578, +0.2648, +0.5699, -0.4537, -0.2206, +0.3551,
+0.3482, -0.3540, -0.1552, +0.1978, -0.9056, -0.3204, -0.2323,
-0.6219, -0.4323, -0.1937, -0.0836, +0.1794, -0.0247, +0.1785,
+0.1420, +0.0799, +0.2649, +0.3340, -0.0749, -0.3701, +0.3743,
+0.3156, -0.1766, -0.1078, +0.5056, +0.4657, +0.4107, +0.2601,
+0.6687, -0.0331, -0.2322, +0.2948, -0.1161, -0.0473, -0.7238,
-0.1265, -0.3066, -0.3860, +0.2964, -0.2473, -0.0809, -0.3711,
-0.7312, +0.0084, +0.3872, -0.0947, -0.6261, -0.0878, +0.1778,
-0.0130, +0.1784, -0.9601, +0.2736, +0.0141, -0.1347, -0.7251,
-0.2413, +0.3626, -0.1804, -1.2693, +0.1459, -0.6756, -0.2493,
+0.0932, +0.3681, -0.5659, -0.0836, +0.0400, +0.2949, -0.3150,
+0.2663, +0.1656, -0.6306, -0.0984, -1.2519, +0.6206, +0.0805,
+0.1215, +0.3839, -0.1390, +0.7582, +0.2061, -0.1455, -1.3268,
+0.3533, -1.6798, -0.5648, -0.4298, -0.8972, +0.3650, -0.6773,
-0.0183, +0.0872, +0.4950, -0.9771, +0.2133, +0.1443, -0.0497,
+0.4870, -0.2217
],
[
+0.6267, +0.1301, -0.3306, -1.0258, +0.2057, +0.4931, -0.2351,
-0.2018, -0.2368, -0.5251, +0.3094, -0.2823, +0.4636, -0.1351,
-0.1550, +0.1763, -0.8259, -0.6003, -0.3396, +0.1493, +0.4048,
+0.1490, +0.2579, -0.1343, -0.0206, -0.6311, +0.3482, -0.0913,
-0.8055, +0.0169, +0.7410, -0.1831, +0.2173, -0.1678, -0.0208,
+0.5627, -1.4239, -0.0164, -0.0073, -0.2144, -0.4044, +0.1796,
+0.2598, +0.2679, +0.4010, -0.2579, -0.3336, -0.0400, -1.3157,
-0.0357, -0.4101, -0.0237, -0.1238, +0.5636, -0.3515, +0.4706,
-0.2472, -0.7447, +0.1059, -0.2435, +0.4814, +0.5299, +0.4779,
-0.7503, -0.1554, -0.6424, -1.2135, -0.6341, +0.2854, +0.5496,
+0.8835, -0.1009, -0.4304, +0.2574, +0.2398, -0.1378, +0.1702,
-0.8172, +0.4276, +0.2123, -0.0892, -0.2800, +0.2954, -0.8317,
+0.2422, -0.0725, -0.0830, +0.3638, +0.0008, +0.1038, -0.3386,
-0.2826, +0.6363, +0.1712, -0.3421, -0.0179, +0.3188, -0.2417,
+0.0737, +0.4939, +0.1590, -0.4242, -0.2644, -0.2834, +0.1430,
-0.0259, -1.2540, +0.1671, +0.1528, +0.4099, +0.1172, -0.2910,
+0.2072, -1.3059, -0.9609, -0.5290, +0.0630, -0.0445, -0.9980,
-0.1157, -1.6957, +0.2090, +0.3895, -0.3709, +0.1529, +0.0459,
-0.2081, +0.5258
],
[
+0.4619, -0.5045, +0.0551, -0.9881, -0.8347, +0.2548, +0.1978,
-0.7792, -0.4077, +0.4406, +0.6625, -0.3996, +0.2176, -0.4900,
+0.2775, -0.5605, +0.0858, +0.0350, +0.0021, -1.0448, -0.4682,
+0.0814, +0.0182, -0.1981, +0.3167, -0.0874, -0.4012, -0.4910,
-0.9434, -0.7696, -0.1997, -0.2376, +0.1798, -0.6100, -0.9803,
-0.5586, -0.2755, +0.2490, -0.0105, -0.0703, -0.1876, -0.5880,
-0.6362, -0.0205, -0.3295, +0.1129, -0.5366, -0.1106, +0.4670,
+0.5399, -0.7514, +0.2281, +0.5209, -0.9634, -0.2351, +0.3960,
+0.0656, -0.2068, +0.0328, -0.3803, +0.4883, +0.7068, -0.0755,
+0.3051, -0.1799, +0.3548, +0.0245, -0.8406, +0.5388, -0.3920,
-0.0589, +0.2305, -0.0688, +0.3521, -0.1127, -0.5709, +0.7565,
+0.2302, +0.1326, +0.0851, +0.2450, -0.0791, -0.4171, +0.0232,
-0.2033, +0.0597, -0.1151, -0.0953, -1.1862, +0.1883, -1.5003,
-0.7315, -0.4973, +0.0377, -0.3782, -0.9304, -0.5009, -0.1716,
-0.3631, +1.0673, -0.1637, +0.0956, +0.1959, -0.1385, -0.3929,
+0.6728, -0.8928, -0.6624, +0.2334, -0.1005, -0.6072, +0.5098,
+0.2577, -0.8387, -0.0504, +0.0006, -0.4881, -0.2785, -0.6791,
+0.2444, -1.6745, +0.7535, +0.1534, -0.6352, -0.1115, -0.8862,
+0.4983, -0.3777
],
[
+0.0039, -0.2421, -0.6910, -0.3335, -0.0837, -0.2838, +0.2400,
-1.2017, -0.1120, -0.0139, -0.5140, -0.1658, +0.5439, +0.0746,
+0.0851, -0.1371, -0.7936, +0.1147, +0.5279, -0.0356, -0.2768,
-0.0167, +0.1016, -0.2588, -0.3150, +0.2725, -0.0054, +0.2662,
+0.0579, -0.1121, -0.0991, +0.2667, +0.7342, -0.7297, +0.4873,
-0.1084, -1.1444, +0.0317, -0.1765, -0.0591, -0.0372, +0.3193,
+0.0158, +0.3735, -0.1446, +0.2017, -0.1544, +0.4685, +0.0265,
-0.6809, -0.4011, -0.0152, +0.4425, -0.2115, -0.2186, -0.1628,
+0.1875, -0.0227, -0.3668, +0.1109, +0.2440, +0.0213, -0.2574,
-0.0887, -0.1618, +0.0969, -0.0538, -0.1107, +0.5406, +0.0602,
-0.2506, -0.5262, -0.4628, +0.0089, +0.2437, +0.1938, +0.2170,
-0.3531, -0.7044, +0.1436, +0.2583, +0.3916, -0.2783, -0.2629,
+0.0761, +0.0875, -0.9802, +0.0204, +0.1054, +0.2970, +0.7322,
-0.5680, -0.0099, -0.3184, -0.1267, -0.3843, -0.0616, -0.8026,
-0.2121, +0.2040, -0.3985, -0.0946, +0.1960, +0.1519, +0.4106,
-0.0654, +0.5344, -0.3274, -0.3358, -0.2711, +0.6550, +0.4539,
-0.7734, +0.1422, -0.2948, +0.1008, -0.0853, -0.2603, +0.7007,
-0.6726, +0.0125, +0.1267, -0.6258, +0.1602, -0.2965, -0.1978,
-0.6674, -0.4518
],
[
-0.9500, +0.5158, -1.1744, +0.0740, +0.0436, +0.0438, -0.4280,
-0.2762, +0.2074, -0.1028, +0.0739, -0.0115, -0.1277, -0.2505,
-0.4827, -0.2839, +0.1152, +0.1610, +0.3091, +0.6768, -0.2460,
-0.0686, -0.4819, -0.3605, +0.1447, +0.4809, -0.4239, -0.4925,
+0.5307, +0.0843, +0.1355, -0.0367, +0.2052, -0.3004, +0.1302,
+0.4693, +0.3006, -0.4246, +0.4669, +0.1178, +0.1144, -1.1948,
-0.1282, +0.4038, -0.4283, +0.1599, -0.4491, +0.2429, -1.0034,
+0.0174, +0.5517, +0.4962, -0.2714, -0.3836, -0.2347, -0.4319,
-0.1098, -0.6308, +0.9688, -0.2591, -0.8816, -0.7226, +0.4885,
-0.1887, +0.5752, +0.2882, +0.1037, -0.0317, +0.3730, -0.1366,
-0.4621, -0.2760, +0.0279, +0.5956, -0.9843, -0.4147, +0.0957,
+0.5332, -0.2361, -0.2058, -0.0305, -0.2593, -0.1523, -0.5416,
+0.2070, -0.0619, -0.0003, +0.1300, -0.4867, +0.6031, -0.6785,
+0.1582, -0.1877, +0.0956, +0.2111, -0.6048, +0.4825, -0.1882,
+0.0376, +0.2428, -0.1485, +0.1739, -0.3589, -0.4452, -0.3057,
-0.4095, -0.2261, -0.8174, -0.6000, +0.4134, -0.3563, +0.3176,
-0.0916, -0.0814, +0.5660, +0.0409, -0.7521, +0.6180, +0.2267,
-0.5896, -0.1423, +0.0642, -0.2697, -0.2845, +0.0757, +0.2297,
-0.2817, +0.6343
],
[
+0.3890, +0.0535, +0.3087, +0.2975, -0.7018, -0.0931, +0.4065,
+0.4104, -0.1657, +0.2489, +0.0326, +0.3264, +0.2226, -0.2367,
+0.5259, -0.3036, -0.9529, +0.4010, +0.1735, -0.0055, +0.1524,
-0.6000, +0.5753, +0.1844, -0.1587, -0.3337, -1.9309, -0.1105,
-0.4555, +0.4847, +0.0744, +0.0622, +0.2596, +0.5020, +0.2214,
+0.1037, +0.5817, +0.1068, -0.1771, +0.0581, +0.3138, -0.0515,
+0.3535, -0.8904, -0.5829, +0.2976, +0.2191, -0.0195, -0.8137,
-0.6905, +0.2786, -0.1527, -0.4237, +0.1000, +0.2552, +0.0808,
+1.0988, -0.1274, +0.2045, +0.4628, +0.1749, -0.3866, +0.6104,
-0.3980, -0.0178, -0.3929, -0.1803, -0.8545, -0.0878, -0.4427,
+0.3839, -0.5144, -0.3475, -0.0654, -0.1235, +0.2331, -1.4457,
-0.4737, -0.6052, +0.5628, -0.2034, +0.3515, -0.2301, -0.0919,
+0.3140, -0.3579, -0.6903, +0.3909, -0.9177, +0.2810, +0.0279,
-0.3726, +0.4125, +0.2434, +0.0410, +0.0660, -1.0987, -0.0353,
+0.2336, +0.1051, -0.4656, -0.4937, +0.0350, -0.2236, -0.0849,
+0.5482, -0.6291, -0.2668, -0.7553, +0.4446, -0.1521, +0.2322,
-0.0618, -0.7684, -0.0211, -0.7521, -0.4413, -0.2209, +0.1516,
-0.3465, -0.2536, -0.3436, -1.3926, -0.0055, -0.0946, -0.2139,
+0.4978, +0.1550
],
[
+0.1824, +0.0143, -0.1206, +0.3375, -0.3361, +0.3083, -0.0700,
+0.8453, +0.2253, +0.2099, -0.1996, -0.3395, +0.3576, -1.3679,
-0.0428, -0.7877, +0.3538, +0.4466, -0.0145, +0.5967, +0.1201,
+0.2372, -0.3292, -0.3154, -0.1127, +0.1202, -0.1974, +0.6890,
+0.3200, -0.0740, +0.1855, +0.0077, -0.0945, +0.2474, -0.1007,
+0.0700, -0.4022, +0.1296, +0.4136, +0.5564, -0.1594, -0.2018,
-1.2523, -1.2306, -0.1650, -0.0376, +0.0669, +0.0331, -0.0342,
-0.2498, -0.3276, +0.4391, +1.0456, +0.0483, -0.2005, +0.2550,
-0.6899, -0.0965, +0.4833, +0.3001, -0.6011, -0.0260, -0.2868,
-0.7893, -1.0207, -0.0175, -0.0952, -0.2876, -0.7890, +0.2103,
+0.4201, -0.4367, +0.4146, -0.3874, -0.2699, -0.2160, +0.1711,
-0.8880, -0.1532, -0.2323, -0.4310, -0.2285, -0.1300, -0.0841,
-0.6891, +0.2807, -0.0937, -0.3057, +0.6814, +0.2866, +0.5278,
+0.8035, -0.5203, -0.1778, -1.0515, +0.6584, +0.1471, -1.0273,
-0.2658, -0.5072, +0.0127, -0.0615, -0.2851, +0.0409, -0.3019,
+0.2049, -0.0702, -0.3491, +0.0534, +0.7843, +0.3878, -0.5921,
+0.5680, +0.2656, +0.5469, -0.1330, -0.0072, +0.1807, -0.4021,
+0.0038, +0.2884, -0.4720, +0.1204, +0.4983, -0.1370, -0.0990,
+0.7926, -0.3488
],
[
+0.2300, +0.6087, -0.0082, -1.2226, +0.1576, -0.4915, -0.3726,
-1.0621, -0.6511, +0.2293, -1.1797, +0.5666, -0.7113, +0.0993,
-0.2472, -0.4969, -0.9459, -0.0998, +0.1155, -0.3605, -0.3714,
-0.0703, -0.9070, -0.8149, +0.4075, +0.3847, +0.3870, -0.6099,
+0.3129, -0.5348, +0.2247, -1.0771, +0.5544, +0.3654, -0.5315,
-1.0543, -0.4763, -0.4465, -0.6050, -0.2096, -0.1478, +0.3596,
-0.3324, +0.3449, +0.2356, +0.0944, -0.2123, -0.2224, +0.2490,
-1.4370, -0.0835, -0.2723, +0.1200, -0.3455, +0.1009, -0.3616,
-0.3534, -1.7871, -0.3896, -0.2862, -0.2670, -0.3593, -0.5910,
+0.5745, -0.2602, +0.0340, +0.1708, -0.4089, +0.3851, -1.6957,
-0.2035, -0.6983, +0.0559, -0.5295, +0.4757, +0.2245, -1.7969,
-0.2094, +0.1555, +0.4289, -1.2022, +0.0317, -0.1430, -0.2117,
+0.4984, -0.4803, +0.7204, -0.3257, -0.6799, +0.0338, +0.3276,
+0.3263, -0.3260, -0.9453, -0.0989, +0.3106, -0.5899, +0.0706,
-0.0357, -0.1701, -0.4282, -0.4814, -0.8425, -1.0043, -0.2107,
+0.0957, -0.8044, -1.7443, +0.2266, +0.0735, +0.1612, -0.1467,
+0.0684, -0.8959, -0.7182, -1.7431, +0.0837, +0.2855, +0.2861,
-0.3948, -1.2593, -0.1133, +0.1008, +0.3739, -0.0448, -0.2614,
-1.0801, -0.0383
],
[
-0.0219, +0.4673, +0.0720, +0.1025, -0.1718, +0.4312, +0.3162,
+0.1224, -0.0772, -0.2335, +0.3989, +0.5195, +0.0886, +0.0311,
-0.9016, +0.4079, +0.3557, +0.0124, +0.2610, -0.0354, +1.1624,
+0.1489, +0.2329, +0.0525, -0.7647, +0.1217, -0.2150, -0.1314,
+0.1197, +0.2997, +0.0565, +0.6098, -0.0841, -0.0375, +0.6587,
-0.0039, -0.2714, +0.7896, -0.3866, -0.7326, +0.3856, +0.7797,
-0.0919, -0.1280, -0.2440, +0.5504, +0.2908, -0.2026, +0.1612,
+0.1853, -0.2838, +0.5732, -0.0413, -1.3385, -0.7872, -0.4109,
+0.6555, +1.0085, -0.6796, +0.1276, -0.0543, -0.6212, -0.4915,
-0.5643, +0.1743, +0.0960, +0.1293, +0.0850, +0.5357, +0.5093,
+0.4603, +0.2881, -0.1999, -0.4185, +0.1500, +0.4510, -0.4934,
-0.4103, -0.1194, -0.0130, +0.2831, -0.0871, +0.5230, +0.1946,
-0.4303, +0.7498, +0.4965, +0.0346, -0.3671, -0.7613, -0.2100,
-0.3338, -0.1799, -0.1297, -0.2704, +0.5702, -0.3001, +0.2092,
-0.4677, +0.0012, +0.4288, -0.2035, +0.1332, +1.0783, +0.0607,
+0.4426, +0.5940, +0.6015, -0.8767, +0.3162, +0.3726, -0.2623,
+0.1870, -0.0285, +0.1607, -0.0021, +0.0700, +0.2217, +0.3027,
+0.2830, -1.4722, -0.0339, +0.3761, -0.3429, -0.2930, -0.0237,
+0.1299, -0.4305
],
[
+0.1671, -0.2823, +0.1159, -0.7041, -0.5511, -0.4782, -0.4659,
-1.1058, +0.1460, -1.3718, +0.2498, +0.1930, -0.2512, -0.1289,
-0.2683, -0.5802, -0.0859, +0.0886, -0.0273, -0.0261, -0.4940,
+0.0258, -0.0169, +0.1049, +0.2472, +0.1351, +0.7329, -0.1584,
+0.1520, -0.3350, -0.6529, +0.1952, -0.9995, +0.0622, -0.4440,
+0.2660, -0.8487, +0.1427, +0.7882, +0.2495, +0.2777, +0.1706,
+0.0289, +0.6014, -0.7555, -0.3365, +0.3145, -0.5503, -1.6347,
-0.6634, +0.4050, -0.5163, -0.6031, +0.4128, -0.9969, -0.3725,
-0.1041, -0.7641, -0.1003, -0.3806, +0.2152, +0.0487, -0.0878,
-0.0076, +0.1628, -0.6543, +0.0234, -0.2593, -0.2289, +0.1173,
+0.1792, -1.0842, -0.9257, -0.4630, -0.3395, -0.6874, -0.0962,
-1.0616, -0.5033, +0.5055, -0.5397, -0.8901, +0.0775, -0.3459,
-0.3129, -0.0488, +0.3182, -0.2662, -0.7964, -0.1391, -0.2649,
+0.2875, +0.4105, -0.7456, -0.3695, +0.2845, -0.0605, -0.0508,
-0.2737, +0.1793, -0.5491, +0.4235, -0.0912, +0.3686, -0.7618,
-0.1382, -0.5352, -0.2718, +0.4649, +0.5763, +0.4316, -0.3403,
-0.0679, -0.4345, +0.2904, -0.3905, -0.9981, -0.2377, +0.2218,
-0.3052, -1.3612, -0.4851, +0.5302, +0.1600, -0.2865, -0.2246,
-0.4073, +0.3381
],
[
+0.0889, +0.0775, -0.6289, -0.1875, +0.4177, -0.2482, -1.6165,
-0.7609, -0.1220, +0.1504, +0.4350, +0.4812, +0.0481, -1.1510,
+0.2691, +0.3924, -0.5124, +0.2843, +0.0126, -0.1909, +0.2830,
+0.3845, -0.5436, +0.1834, -0.3164, +0.1342, +0.6956, +0.5842,
-0.2587, -0.6035, +0.4699, -0.4336, +0.0916, -0.7320, +0.1821,
-0.2561, +0.4985, +0.1053, -0.6610, +0.7626, -0.6151, -0.0714,
+0.4482, -1.2292, -0.1627, -0.2995, -0.7809, +0.2100, -0.5498,
-0.0004, -0.4434, -0.0608, +0.2343, -0.0094, +0.5446, +0.2986,
-1.0180, -0.0752, -0.3189, +0.1246, -0.8013, -0.5674, -0.1408,
+0.4468, +0.3148, -0.2493, +0.3091, -1.9250, -0.7569, +0.3410,
-1.9147, -0.2626, +0.1818, -0.2512, -0.2214, -0.3516, -0.0064,
-0.2877, +0.0865, +0.5975, -0.2466, +0.1317, +0.0568, -0.6254,
-0.0446, +0.3834, -0.4067, +0.3229, -0.5899, +0.4960, -0.4747,
+0.5292, -0.7871, +0.3493, -0.6473, +0.5501, -0.0895, -0.3299,
+0.4775, -0.1642, +0.0202, +0.1572, -0.3672, -0.3397, -0.8822,
+0.0939, -0.2822, +0.1689, -0.1651, +0.0127, +0.4674, +0.3544,
-0.5526, -0.0664, +0.1906, +0.7608, -0.4556, -0.4964, -0.3022,
+0.2400, -0.6743, +0.1732, +0.3559, -0.0506, +0.1518, -0.1480,
+0.2606, +0.0297
],
[
+0.1881, -0.1553, -1.5724, -0.3950, -0.0466, -0.2909, -0.0651,
-0.1796, +0.0579, +0.6609, -0.0485, +0.0586, -0.3038, -0.0271,
-0.4263, +0.4364, +0.0581, -0.1549, +0.0078, +0.0661, -0.7348,
-0.2544, +0.1467, +0.3350, -0.5166, -0.0372, -0.2058, +0.5173,
-0.1624, +0.5320, +0.3230, -0.6647, -0.3546, -0.3545, -0.3915,
-0.0860, -0.5308, +0.1087, +0.1293, -0.4919, -0.0958, -0.8816,
+0.5965, +0.3580, -1.1357, -0.1426, -0.0060, -0.3153, +0.3681,
-0.0367, +0.6846, +0.2793, +0.6284, -0.2864, -0.5706, +0.3115,
-0.0177, +0.2115, +0.3319, +0.0076, -0.0807, +0.1127, +0.0774,
+0.3027, +0.3027, +0.9219, -0.3770, +0.0644, -0.3406, +0.0139,
+0.0360, -0.1875, +0.1214, -0.0015, -0.2289, -0.1416, -0.3921,
-0.1148, -0.3307, -1.1176, -0.4131, -0.0516, +0.3109, -0.6787,
+0.3690, +0.0135, -0.1024, -0.0886, +0.0624, +0.1677, +0.8001,
-0.0035, -0.1321, -0.0525, +0.1772, +0.0420, -0.6397, -0.4285,
+0.4524, -0.5925, -0.1888, +0.3598, -0.0754, -0.3101, -0.3709,
+0.1504, -0.2078, -0.8417, -0.5245, -0.1972, -0.5917, -0.5529,
-0.2298, -0.3431, +0.3858, +0.3420, -0.3509, +0.7314, +0.6297,
-0.0504, +0.0196, +0.1993, +0.3695, -0.6020, +0.1692, -0.3026,
-0.0274, +0.0532
],
[
-0.0535, -0.0434, +0.3107, -0.0591, -0.4013, +0.0417, +0.5899,
+0.0130, +0.5355, -0.5781, +0.3742, -0.1657, -0.0823, +0.1356,
+0.2386, -0.1355, -0.4844, -0.4785, -0.1089, +0.2514, +0.1284,
+0.6192, -0.5746, -0.0572, -0.3031, -0.0263, +0.3194, -1.4349,
-0.7038, -0.0708, +0.0176, +0.1220, +0.1893, +0.3585, -0.0543,
-0.0984, +0.0147, +0.5180, -0.9553, -0.3607, -0.6300, -0.2242,
-0.3692, -0.3634, -1.2479, +0.6050, +0.4687, +0.1761, -0.1653,
+0.4686, +0.5265, -0.1602, -0.0254, +0.2749, -0.0829, -0.6103,
-0.2437, -0.3340, +1.1699, +0.2183, +0.0314, -0.3264, -0.6732,
-0.1501, -0.3775, +0.1754, -0.6461, +0.0506, +0.4287, -0.1066,
+0.8648, -0.3817, -0.1776, +0.2415, +0.4775, +0.2610, -0.3349,
-0.1050, -0.4431, +0.2219, +0.7191, -0.2294, +0.3001, +0.8845,
+0.1041, -0.2900, -0.2438, -0.3065, +0.3172, +0.4716, +0.6355,
+0.3926, +0.0178, -0.9722, +0.7055, -0.1003, -0.0791, +0.0472,
-1.5021, -0.7299, +0.0327, +0.3117, -0.7694, -0.2055, -0.3508,
-0.4653, -0.1949, -0.9672, -0.1199, -0.0421, +0.0697, -0.5390,
+0.1305, -0.8102, -0.1820, +0.3357, -0.3483, -0.7162, +0.5876,
-0.3791, -0.3721, -0.9082, -0.7910, +0.5692, +0.4901, -0.6508,
-0.1578, +0.2336
],
[
-0.0991, -1.9675, -1.1434, -0.0578, -0.4255, -0.0507, -0.3146,
-0.7487, -0.4001, +0.1678, +0.7082, -0.1235, -0.0231, -0.3190,
-0.0053, -1.8510, -0.7238, -0.5839, -0.2875, +0.2034, -0.0091,
+0.0977, -1.2770, -0.2555, -0.3162, -0.1805, +0.1029, +0.0722,
-0.1223, +0.2691, -0.4410, +0.3330, -0.1966, +0.3145, +0.2016,
-0.1409, -0.4752, -0.0351, -1.0175, -0.0973, +0.2214, +0.0240,
+0.5960, -0.3388, +0.1563, +0.3184, -0.6502, -0.9855, -0.9964,
-0.1694, +0.4374, -0.0568, +0.2252, +0.0092, -0.4467, +0.0417,
-0.5258, -0.0546, -0.6569, -0.3540, -1.2910, -0.7955, +0.1682,
-0.1409, +0.1721, -0.4994, -0.0251, -1.7718, -0.6462, -0.4405,
-0.7201, +0.3206, -0.0858, -0.0252, +0.1274, +0.2316, -0.5193,
+0.1882, +0.2965, +0.4450, -0.3046, -0.5045, +0.5530, -0.2995,
+0.4155, -0.2074, -1.0422, -0.9297, -0.6145, -0.2277, +0.4566,
+0.1720, +0.0193, -0.3610, -0.0938, -0.6841, -0.3188, -1.2707,
-0.0435, -1.0730, -0.3487, -0.0927, -0.0735, +0.0962, -0.2547,
-0.0440, -0.6346, +0.1233, +0.0456, +0.5358, +0.0747, -0.1466,
-0.4682, -0.2771, +0.1966, -0.2784, -0.4773, -0.4910, -0.2745,
+0.4193, -0.2568, -1.9416, -0.5763, -0.3306, -0.1720, -0.0927,
-0.8908, +0.1176
],
[
+0.2536, -0.5662, -1.4522, -0.5358, -0.2006, +0.0220, -0.0610,
+0.0789, -1.8121, +0.1627, -0.3495, -0.6294, +0.4751, +0.1971,
-0.4895, +0.0809, -0.6595, -0.5438, +0.7782, +0.6407, -0.0433,
+0.1764, -0.2216, -0.0686, +0.0906, -0.6181, -0.3142, -0.4193,
-0.5353, +0.2141, -0.2462, -0.4703, -0.5101, -0.5080, -0.3058,
-0.0421, -0.3943, -0.1619, +0.3189, +0.7229, -0.5070, +0.4745,
-0.5701, -0.2622, -0.0397, -0.5832, +0.8179, +0.3112, +0.4857,
+0.5036, +0.5898, -1.3488, -0.5989, +0.0316, -0.0931, -1.2044,
-1.3549, -0.4709, +0.2934, -0.5466, -0.1476, +0.3979, -0.4231,
+0.1120, -0.3600, -0.7282, +0.3255, +0.1035, -0.6146, +0.1888,
-0.0499, +0.1143, +0.3409, -0.0101, +0.1368, -0.0465, -0.5839,
-0.3748, +0.2394, +0.9458, -0.0254, -0.4362, +0.1776, -0.0580,
+0.3576, -1.2487, -0.5310, -0.1680, +0.5085, -0.0916, +0.0656,
+0.4350, -0.1020, +0.8172, +0.1495, -1.1081, -0.7834, -0.3577,
-1.9330, -0.3652, -1.1755, +0.0698, -0.3965, -0.2653, +0.1445,
-0.0767, -0.5884, -0.2787, -0.0916, -0.0178, -0.9965, +0.0826,
+0.0155, +0.2869, +0.2088, +0.5580, -1.0579, -0.1831, -0.5635,
-0.1048, -1.1000, +0.3504, +0.0230, -0.4332, -0.3865, +0.0846,
-0.3690, -0.4813
],
[
-0.4787, +0.2002, +0.2028, -0.0158, -0.2770, +0.1566, +0.1161,
-0.0419, +0.2129, -0.2984, -0.1568, -0.0816, -0.0431, -0.2907,
+0.0723, -0.1756, -0.0761, +0.0398, -0.3367, +0.2629, +0.3230,
+0.0630, +0.1386, +0.0527, +0.2323, +0.0386, +0.1846, -0.0896,
-0.1129, -0.1477, +0.0972, +0.1004, -0.1855, -0.3664, +0.3041,
+0.2854, +0.2017, +0.1173, +0.4246, +0.2498, +0.3192, -0.3189,
+0.4909, +0.2758, -0.5229, -0.1006, +0.2822, +0.1657, -0.6560,
-0.3957, +0.4602, +0.1682, -0.5488, +0.1815, +0.1368, +0.0183,
+0.2881, -0.1455, +0.2657, -0.8587, -0.1013, +0.4186, +0.3135,
-0.8613, +0.2746, +0.5370, -0.2433, -0.2217, +0.3542, +0.2261,
+0.1301, -1.1449, -0.9689, -0.7366, +0.1102, -0.0582, -0.2559,
-0.1644, +0.0449, +0.1363, -0.1750, +0.2502, -0.1844, -0.1620,
-0.2977, +0.5287, -0.4214, +0.6418, -0.2855, -0.0037, -0.4145,
-0.3059, +0.0475, -0.0043, -0.8526, -0.5528, +0.3328, +0.2346,
+0.1871, -0.5865, +0.6419, -0.1642, +0.3508, -0.0835, +0.2359,
+0.0595, -0.1623, +0.1377, -0.1352, -0.3811, +0.4506, +0.2045,
+0.0203, -0.0315, -0.0122, -0.3571, -0.1826, +0.5790, -0.3454,
-0.4992, -0.9696, -0.5495, -0.3217, +0.1935, -0.6748, +0.0319,
+0.1601, +0.0082
],
[
+0.1543, +0.2776, +0.2125, -0.1371, +0.4160, +0.3294, -1.0957,
+0.2656, +0.2960, -0.4023, +0.2340, -0.1207, -0.3459, +0.1713,
+0.6184, +0.5171, -0.2404, +0.1536, -0.2846, -0.5125, +0.3653,
-0.5599, +0.0234, -0.3938, +0.0080, +0.4929, -0.9391, -0.0329,
+0.1044, +0.0408, +0.2737, +0.9848, -0.6877, -0.5425, +0.0472,
+0.2561, +0.2861, +0.2821, -0.2904, -0.5215, -0.3803, -0.6176,
-0.9423, +0.1615, -0.1899, +0.3273, +0.0132, +0.4926, -0.3626,
-0.4278, -0.1285, +0.0150, +0.1360, -0.3941, -0.0030, -0.1985,
-1.1165, +0.1333, -0.2363, +0.3486, +0.1008, +0.2353, -0.2938,
-0.4431, -0.2364, -0.2566, +0.1370, -0.9465, +0.1358, +0.3459,
+0.2047, +0.2755, -0.3602, -0.1038, -0.4590, -0.3507, -0.9952,
+0.0772, -0.0768, -0.2582, -0.5881, -0.2763, +0.4777, +0.0767,
-1.0746, +0.4439, -0.8703, -0.5357, -0.0913, +0.2446, -0.5459,
+0.2101, +0.0278, -0.1329, +0.0798, +0.3622, +0.0351, -0.4444,
-0.2385, +0.1734, -0.1854, +0.1911, +0.0680, -0.1799, +0.0571,
-0.4096, +0.3666, +0.0189, -0.8021, +0.0096, +0.2552, -0.1276,
-0.0449, -0.0363, +0.4031, -0.5259, +0.2744, +0.1172, +0.2298,
+0.5545, +0.4626, +0.5044, -0.0241, -0.4306, -0.1280, +0.3301,
+0.1978, -0.3536
],
[
+0.1044, -0.7254, +0.2392, -0.2555, +0.5187, -0.1259, -0.0267,
-0.0483, -1.1547, -0.4744, +0.4596, -0.2068, +0.2914, -0.1488,
-0.5269, +0.5604, -0.9596, -0.6791, -0.1505, +0.4476, -0.6482,
-0.4336, +0.2112, +0.1545, -1.2010, -0.2492, -0.1578, +0.0995,
-1.7680, -0.0496, -0.2035, +0.7748, +0.3368, -0.1282, +0.8091,
+0.1360, -0.2153, +0.5392, -0.3572, -1.0836, -0.4843, -0.0050,
-0.1601, +0.2288, +0.0453, -0.3114, -0.1580, -0.3662, +0.3852,
-0.3149, +0.3257, +0.0765, +0.3093, +0.1396, +0.1867, +0.5917,
+0.3227, +0.0719, +0.0757, +0.3152, +0.0477, +0.8263, +0.4331,
-0.3088, -0.3377, -0.1028, +0.0539, +0.0915, -0.6793, -0.4235,
+0.3025, +0.3221, -0.3192, -0.0481, -0.0763, +0.2885, -1.1171,
+0.6361, +0.7532, -0.7269, +0.0376, -0.2484, -0.1291, +0.3408,
+0.3876, -0.3870, +0.7317, +0.3827, -0.2842, -0.5059, +0.4893,
+0.0877, +0.1553, +0.9659, -0.0103, -0.3438, +0.9302, +0.1913,
+0.3300, +0.1906, -0.5161, -0.5922, -0.2896, +0.2806, -0.1469,
-0.7416, -0.3286, -0.1041, +0.3214, -0.8853, -0.3089, +0.3274,
+0.2492, -0.8162, -0.3591, -0.2656, -0.4484, +0.0786, -0.7396,
-0.2242, +0.4194, -0.0522, +0.2031, +0.5960, +0.5511, -0.2235,
-0.3685, +0.1638
],
[
-0.3699, -0.3807, +0.0028, +0.3853, -0.3473, +0.3783, -0.3137,
+0.4999, -0.4700, +0.1855, +0.0250, +0.1907, +0.0717, -0.0765,
+0.2081, -1.0673, -0.0229, +0.8743, +0.1154, -0.2901, -0.0200,
+0.5757, -1.0128, -0.6901, -0.3488, -0.5791, -0.6534, -1.1944,
+0.6867, -0.5736, -0.1345, +0.4021, +0.5175, +0.4459, +0.1793,
-0.1494, +0.4040, +0.4946, +0.0618, +0.1245, +0.1907, -0.9389,
-0.0765, -0.6841, +0.0778, -0.3652, -0.0633, +0.1939, -0.1546,
+0.1572, -0.3063, -0.1174, +0.2252, -0.0803, -0.1216, -0.9586,
-0.0063, +0.5818, -0.3919, +0.4517, -0.3592, +0.0098, +0.0147,
-0.1723, +0.2093, +0.4135, +0.8197, +0.2438, +0.0600, +0.4231,
-0.1608, +0.1411, +0.3974, -0.4794, -0.2719, -0.2291, -0.1298,
-0.1203, -0.3707, +0.1493, +0.1233, -0.0575, +0.2616, +0.0770,
-0.3670, -1.1465, -0.4462, +0.1386, +0.1485, -0.2544, -0.5235,
+0.0563, +0.0058, +0.1420, -0.3137, -0.2150, -0.8871, +0.0340,
-0.1614, +0.6602, -0.2263, -0.4392, +0.0526, +0.2306, +0.5134,
-0.3948, -0.1330, -0.0663, -0.7350, +0.0353, +0.0651, -0.1853,
+0.4299, -1.3736, +0.2536, +0.1521, -0.0121, +0.6278, +0.3257,
-0.1518, -0.2645, +0.4809, +0.5553, -0.0840, -0.1191, +0.1542,
+0.1102, -0.2089
],
[
-0.0419, +0.2234, -1.4385, -0.1830, +1.3311, +0.6173, -0.2102,
+0.0031, -0.4902, +0.0924, +0.1009, +0.0748, -1.5489, -0.1538,
-0.2732, -0.0818, -0.6964, +0.2878, +0.2451, -0.4405, +0.1007,
-0.0500, +0.3956, -0.2394, -0.4406, +0.1492, +0.1290, -0.3666,
-0.3905, +0.1184, +0.4340, -0.5971, +0.0667, -0.1793, -0.5301,
+0.5881, -0.7302, -0.5281, -0.2048, +0.0339, -0.6472, -0.9960,
-0.6116, +1.3143, +0.5008, -0.5765, -0.1432, -0.1568, -0.2412,
-1.2500, -0.4832, +0.0049, +0.1551, +0.6098, -0.0762, +0.0962,
+0.1702, -0.4485, -0.9531, -0.2700, +0.2681, +0.1645, -0.9228,
-0.2238, +0.0731, +0.1705, +0.3593, +0.4110, +0.3440, -0.4387,
-1.1249, +0.5916, +0.0850, +0.1453, +0.4015, +0.2324, -0.2840,
+0.0777, -0.1384, -0.0731, +0.0235, +0.0055, +0.3287, -0.1665,
-0.3519, -0.2227, -0.4549, +0.0024, +0.1338, +0.0871, +0.3680,
+0.5274, -1.4743, -1.1676, -1.4731, -0.2902, -0.5654, -0.3894,
+0.6527, -0.3981, +0.2669, +0.1013, -0.8804, -0.9715, -1.0932,
-0.7290, +0.0872, +0.0093, +0.4981, -0.8423, -0.2592, -1.1377,
-0.1514, -0.1664, -0.6785, -1.5269, -0.0527, +0.9430, -0.0178,
-0.6142, -0.6908, -0.5368, -1.6182, +0.4248, -0.5426, -0.3651,
-0.6827, -0.7300
],
[
-0.5645, -0.0951, -0.8245, -0.2915, +0.0918, +0.1296, -0.8500,
+0.8293, +0.0947, -0.0885, -0.1730, -0.3815, -0.0692, -0.1989,
-0.0563, +0.4089, +0.4482, -0.0204, -0.0750, -0.0014, +0.2101,
+0.3434, -0.0062, -0.1062, +0.2121, -0.3191, +0.1604, -0.5890,
-0.8282, -0.0523, +0.2027, +0.1242, +0.3165, +0.3932, +0.3421,
+0.1213, -0.3542, -0.2865, -0.6644, +0.2467, -0.1323, +0.2825,
-0.2337, -0.5893, -0.0853, -0.8551, -0.4441, -0.5042, -1.1045,
+0.0843, +0.0584, -0.7077, -1.2498, -0.6926, -0.3114, -0.0837,
-0.6698, -0.2653, +0.0489, -0.3547, -0.3114, +0.2408, -0.0194,
-0.4708, +0.1986, +0.0656, +0.3541, +0.1654, +0.0352, -0.2591,
-0.8707, +0.2080, -0.2313, +0.0107, -0.1276, +0.2932, -0.0761,
+0.5935, +0.4470, -0.2565, -0.1063, -0.5294, +0.3806, +0.0910,
-0.4328, -0.2868, +0.4114, -0.8960, +0.3032, +0.5253, -1.0797,
-0.1307, -0.6211, +0.0088, +0.0559, +0.1488, +0.2288, -0.2893,
-0.4812, -0.6787, -0.8455, -0.6098, +0.0031, -0.2632, -0.0195,
-0.1413, -0.7222, -0.4115, -0.5803, +0.4494, -0.2986, -0.4152,
-0.3315, -0.2730, -0.7341, -0.7955, +0.3027, +0.2047, -0.6978,
+0.1749, -0.0703, -0.7464, -0.1227, -0.6139, +0.1868, +0.1686,
-0.4433, -0.5345
],
[
+0.5373, -0.5023, +0.3304, +0.1281, +0.3533, -0.6281, +0.2297,
-0.4778, -0.1587, -0.6127, -1.2827, -0.3540, +0.2693, +0.0097,
+0.1842, -0.2062, +0.3589, +0.0386, +0.0940, -0.6110, +0.5074,
-0.4471, +0.0641, -0.4592, -0.1442, +0.3023, -0.3187, -0.5908,
-1.2507, -0.6518, -0.1777, +0.8415, +0.0770, +0.0154, -0.0216,
+0.1021, +0.1544, -0.5672, -0.5015, -0.6091, -0.8604, -0.0805,
+0.2870, -0.4945, -0.0334, +0.1428, -1.0402, -0.4387, +0.4980,
+0.1699, -1.3964, -0.6875, -0.4314, +0.2615, -1.1531, +0.7509,
-0.3166, +0.5292, +0.2292, -0.3682, -0.2563, -0.0884, -0.0064,
+0.3462, -0.0136, -0.4905, -0.5082, -0.7308, -0.0257, +0.2141,
+0.4705, +0.0467, -0.0859, -0.5980, +0.4867, +0.1216, -1.3656,
-0.5548, -0.6892, +0.1645, -0.1880, -0.3532, +0.5091, -0.0183,
+0.4596, -0.2019, -0.1287, +0.6507, -0.1666, +0.7605, -0.1819,
-0.3911, +0.7056, +0.0560, -1.0917, +0.1926, +0.1152, -0.5854,
-1.2015, -0.2047, +0.2450, +0.4330, -0.1793, +0.0967, +0.3471,
-0.8313, -0.5207, +0.4464, +0.3575, +0.0050, +0.0109, +0.3466,
-0.6398, +0.0550, -0.3470, -0.1641, -0.5482, -1.6662, -0.3640,
+0.4271, -2.0190, -0.2807, -0.4156, +0.5930, -0.2044, +0.2586,
+0.3294, -0.0410
],
[
-0.3317, -0.0433, +0.7553, +0.0780, -0.1600, +0.3487, -0.0899,
+0.4752, -0.5306, -0.5418, -0.3998, -0.2042, +0.0573, -0.2629,
-0.3835, -0.2808, -0.7365, +0.0468, +0.1259, +0.0197, +0.1999,
-0.0067, -0.0952, -0.5982, +0.6603, -0.3708, -0.4107, -0.2107,
+0.1856, +0.0801, -0.0317, -0.1030, +0.6148, +0.0689, +0.0360,
+0.5873, +0.6373, -0.4274, +0.3931, +0.6688, -0.0991, -0.8677,
-0.1208, -0.6740, -0.0005, -0.3582, -0.0956, -0.5954, -0.9473,
+0.2657, +0.4797, -0.1288, -0.5544, +0.2299, -0.3652, -0.2259,
+0.0398, -0.3877, +0.1457, +0.1774, -0.3009, +0.2741, -0.2197,
+0.1699, +0.0348, +0.2522, +0.5760, +0.3269, -0.2433, +0.3860,
+0.5211, -0.6615, +0.2792, -0.5678, +0.5221, +0.0590, +0.1282,
+0.5853, -0.1564, +0.1237, -0.0384, +0.3060, -0.3530, +0.0635,
-0.1059, -0.8397, +0.0054, -0.5784, -0.1677, +0.6336, +0.4885,
+0.0895, +0.0792, +0.7527, +0.0445, -0.4034, +0.1270, -0.0362,
-0.2035, +0.0901, +0.2301, -0.6577, +0.1569, -0.0478, -0.7499,
+0.0353, -0.5966, -0.0205, -0.5135, +0.4103, +0.1819, +0.1055,
+0.0828, -1.3878, -0.6138, +0.0191, -0.2161, +0.2299, +0.1145,
-0.0208, +0.5890, -0.4278, -0.2275, -0.6212, +0.2328, +0.4505,
+0.1880, -0.2761
],
[
-0.2579, -0.1070, +0.5948, -0.4495, -0.1056, -0.5783, +0.7626,
-0.0990, -0.4974, -0.3786, +0.0336, +0.1340, +0.0176, +0.3846,
-0.1908, -0.0323, -0.1243, +0.3143, -0.1815, -1.2845, -0.0969,
+0.4785, -0.1254, +0.0742, -0.5627, +0.3179, -1.1277, +0.6710,
-0.3180, -0.4783, -0.7258, +0.0998, +0.0103, -0.3908, -0.6606,
+0.0864, -0.1134, -0.7322, -0.0021, -0.3648, -0.1118, -0.0069,
-0.0316, -0.5033, +0.0592, -0.4308, -0.1743, -0.3476, +0.5224,
-0.2702, -0.6365, -0.4019, +0.1359, +0.2903, +0.6723, -0.2187,
-0.6024, +0.2384, +0.0491, -0.0900, +0.1348, -0.2781, +0.0112,
+0.2478, -0.1627, -0.1756, +0.5521, -1.3692, -0.1336, +0.4796,
-0.1884, +0.1825, -0.1318, +0.6813, +0.3728, -0.4068, -0.2569,
+0.2171, +0.5401, +0.0103, -0.2397, +0.2136, +0.6071, -1.1484,
+0.1637, +0.5346, -0.3671, -0.9121, -0.2676, -1.7229, -0.4905,
-0.1932, -0.1517, -0.4315, +0.0992, -0.0417, +0.0596, +0.1999,
+0.0300, -0.1353, +0.3451, +0.3250, -0.6266, +0.6477, +0.4953,
+0.8932, +0.1584, +0.3444, -0.2406, +0.0892, -0.3448, +0.0994,
-0.2997, +0.3734, -0.0937, +0.0631, -0.3965, +0.1908, +0.3220,
+0.3505, +0.0664, -0.0990, +0.5812, +0.3055, -0.6768, +0.0508,
-1.1169, -0.2971
],
[
-0.2826, +0.3017, -1.0716, +0.5454, -0.2297, +0.2570, -0.2675,
-0.3280, +0.5000, -0.6632, -0.1680, +0.0455, -0.5645, -0.2240,
-0.0925, +0.0285, +0.5507, +0.3751, +0.4763, +0.3696, +0.0971,
+0.0564, +0.0225, +0.1475, +0.4914, -0.2191, -0.6406, -0.1431,
-0.6880, +0.2840, +0.5321, -0.1326, +0.1830, -0.2063, -0.6520,
+0.2547, -0.0021, -0.0909, +0.2077, +0.0425, +0.3344, -0.5121,
+0.5924, -0.6228, -0.2005, -0.7171, -0.1157, +0.0358, -0.4479,
+0.0911, +0.2311, -0.2368, -0.3824, -0.4127, +0.0186, +0.3056,
-0.2722, -0.4932, -0.6576, +0.6565, -0.2643, -0.2520, +0.0746,
+0.6838, +0.3458, +0.3152, -0.5829, +0.6142, +0.2486, -0.6898,
+0.0375, -0.0663, -0.0938, +0.0495, +0.5894, +0.5695, -0.0793,
+0.0908, +0.3887, -0.1746, -0.7917, +0.0934, -0.3565, -0.7500,
+0.2916, -0.2257, +0.4372, -0.0252, -0.2825, -0.5158, +0.6199,
-0.2699, -0.0864, +0.1938, -0.7209, -0.2632, -0.6214, -0.0095,
-0.7482, -0.1812, -0.0885, -0.3705, -0.4529, -0.7320, +0.1304,
-0.2019, -0.3944, -0.1590, -0.2112, -0.0378, +0.0883, -0.1832,
+0.4093, +0.0177, -0.9067, -0.0438, +0.3012, +0.1318, -0.1884,
-0.1744, +0.6493, +0.2527, +0.3349, -0.0854, +0.1585, +0.5028,
-0.0638, +0.4182
],
[
+0.2906, -0.2576, -0.2176, -0.4541, +0.1092, -0.0287, -0.3137,
-0.2885, +0.1561, +0.2431, -0.1863, +0.1705, -0.0177, +0.0854,
+0.0712, -0.4275, -0.3382, -0.6201, -0.7132, +0.3172, -0.4919,
+0.4363, +0.2332, +0.3105, +0.3940, -0.4142, +0.4294, +0.0273,
+0.7051, +0.1778, -0.3064, +0.0910, +0.1420, +0.6417, -0.0247,
-1.1611, +0.1286, +0.2238, -0.0254, +0.0795, -0.9549, +0.9174,
-0.2747, -0.2260, +0.1629, +0.3881, +0.6297, +0.3515, -1.0133,
+0.5295, +0.2742, +0.3474, +0.6074, -1.1186, +0.0476, +0.2279,
-0.3011, -0.1067, -0.1332, -0.2300, -0.2225, -0.3024, -0.2980,
-0.1358, -0.1022, -0.6499, -0.3039, +0.1199, -0.3481, -0.7577,
+0.4489, -0.4166, +0.5674, -0.9603, -0.0928, -0.1741, +0.2443,
-1.2442, -0.2971, +0.5504, -0.1294, -0.6932, +0.5146, +0.3808,
-0.0010, -0.2611, -0.2290, -0.3077, -0.3866, +0.0346, +0.0652,
-0.0467, -0.2865, -0.1689, +0.5712, -0.0300, -0.1392, +0.5242,
-0.4779, -0.0639, -0.2994, +0.1722, -0.5602, +0.5796, -0.5325,
+0.4226, +0.0793, +0.6263, +0.1931, +0.3964, -0.0242, -1.0088,
-0.1576, -0.2754, +0.3829, -0.0360, -0.1474, +0.5173, -0.3908,
+0.5383, +0.3459, -0.3353, +0.2198, -0.1983, -0.2535, -0.8102,
-0.1733, -0.2498
],
[
-0.2845, +0.2532, -0.3930, +0.0176, -0.0401, +0.3929, -0.2219,
-0.0176, -0.6583, -0.8225, -0.3858, -0.6111, -0.3160, +0.1729,
-0.5304, -0.4961, +0.4348, +0.1502, -0.0024, +0.5605, -0.5965,
-0.3403, -0.0525, +0.4160, -0.1761, -0.5333, +0.6485, -0.1001,
-0.3320, -0.2852, +0.0461, +0.3127, +0.2732, +0.4850, -0.7572,
+0.3340, +0.1216, -0.2146, +0.5141, -0.0446, +0.3435, -0.1889,
-0.0703, +0.1833, +0.6237, -0.2869, -0.1359, +0.3557, -0.6367,
-0.2098, -0.3921, +0.2821, -0.6608, -0.7367, +0.1877, -0.3778,
-0.2733, +0.2592, +0.3895, +0.5889, +0.1726, -0.8440, -0.1420,
+0.2875, -0.5746, +0.0062, +0.1275, +0.3359, +0.3913, +0.3068,
-0.4358, -0.1234, -0.0252, +0.3891, +0.2927, +0.5481, -0.1240,
-0.7086, +0.2341, +0.0585, -0.0918, +0.4686, +0.2239, +0.2763,
-0.4061, +0.5489, -0.3236, +0.2883, -0.0754, -0.4910, -0.4830,
+0.0780, +0.4842, -0.1382, +0.4660, -0.5561, +0.1641, -0.1805,
-0.2352, -0.4031, +0.3937, +0.0209, +0.1165, -0.4197, -0.1378,
-0.0776, -0.1210, -0.0451, -0.4686, -0.4069, +0.3611, -0.2608,
-0.3837, -0.6298, -0.1267, -0.3378, -0.0777, +0.1820, -0.0371,
+0.4034, +0.0692, +0.0375, -0.2704, -0.4865, -0.4545, +0.3542,
-0.1009, -0.8298
],
[
+0.0277, +0.4274, +0.0410, -0.3348, +0.2236, -0.2796, +0.3243,
-0.3701, +0.3331, +0.0335, -0.2298, -0.0108, -0.7326, +0.2532,
-0.0554, +0.1458, -1.0869, +0.5397, -0.3616, -0.4258, +0.0051,
+0.5826, +0.0720, +0.7102, -0.4815, -0.5512, -1.3826, -0.0495,
-0.0009, -0.2800, -0.2969, +0.1124, -1.1290, -0.6278, +0.2533,
-0.6351, -0.8564, +0.2894, -0.4166, +0.0631, -0.3520, -0.1019,
+0.0933, -0.5000, -0.5518, +0.0643, +0.0005, -0.3435, +0.3769,
-0.1839, +0.3340, -0.5945, +0.0447, +1.3741, -0.3691, -1.1582,
-0.8776, -0.0717, -0.0328, +0.0922, -0.5648, +0.1482, +0.5882,
-0.3070, +0.0753, -0.3949, +0.5848, -0.0525, -0.1069, -0.1223,
+0.2160, -0.6329, +0.2816, -0.0776, +0.3009, +0.8861, +0.4515,
-0.3619, +0.1494, +0.4558, -1.4952, -0.0373, +0.4441, -0.4643,
+0.3820, +0.0463, -0.4576, -1.4621, -0.7752, -0.3164, -0.2166,
+0.3246, +0.0032, -0.3442, -0.9252, +0.2383, -0.5751, -0.2526,
-0.3966, -0.0106, +0.0551, -0.2484, -0.5117, +0.0920, +0.3756,
-0.3718, -0.3283, +0.6047, +0.3816, +0.1391, +0.2320, +0.0394,
+0.0631, -0.0844, +0.1382, -0.0184, -1.0713, -1.0758, -0.9147,
-0.1541, -0.0780, -0.0554, +0.3733, +0.1043, +0.0783, +0.0085,
+0.0763, +0.0499
],
[
+0.2028, -0.0972, -0.0420, +0.3582, -0.2659, +0.0275, -0.3126,
+0.0726, -0.3374, -0.1147, -0.4659, -0.3320, +0.2382, -0.1582,
+0.2975, -0.5718, +0.1347, +0.3142, -0.2734, -0.0589, -0.4717,
-0.2501, -0.1617, +0.1413, +0.3922, -0.4995, -0.1628, +0.2263,
-0.2123, +0.2214, +0.2072, -0.3154, -0.1453, -0.1673, +0.1661,
-0.2971, -0.0208, +0.2328, +0.1944, -0.6479, +0.0608, -0.3874,
-0.1620, +0.0104, +0.2743, +0.1931, -0.6089, +0.1236, -0.2400,
+0.0071, -0.4273, -0.5542, +0.4053, -0.3175, +0.2030, +0.5557,
+0.3351, +0.4363, -0.7274, +0.3224, -0.0487, -1.1022, -0.3274,
-0.0646, +0.3601, +0.2220, -0.1501, +0.4314, -0.2996, +0.4053,
-0.4276, +0.5074, -0.1857, +0.1020, +0.1378, +0.1655, -0.8446,
-0.3429, -0.1343, -0.3011, -0.0274, +0.4764, -0.7874, -0.6551,
+0.2766, -0.2518, +0.1279, +0.2594, -0.2784, -0.3953, +0.1520,
-0.0223, +0.2754, +0.3359, +0.2621, +0.1815, -1.5922, +0.2841,
-0.0714, -0.6989, -0.1654, -0.5011, +0.1637, -0.1394, -0.2840,
-0.3650, +0.2890, +0.0167, -0.1014, -0.1577, +0.2075, -0.6913,
+0.3595, -0.9863, -0.1042, +0.0475, -0.3596, -0.4279, -0.0539,
+0.0853, +0.5694, -0.0971, +0.1162, +0.1094, +0.3452, +0.1604,
+0.1195, -0.2351
],
[
+0.1900, +0.7089, +0.3496, -0.2038, -0.3084, +0.2608, -0.6953,
+0.5173, -0.1191, -0.2883, +0.0648, +0.2797, +0.0287, -0.7875,
+0.2421, +0.3568, +0.1613, +0.0608, -1.1179, +0.3233, +0.2435,
+0.2525, +0.6172, -0.7511, +0.5085, +0.2840, +0.5009, -0.1700,
+0.2576, -0.0248, +0.2106, +0.2717, +0.0067, +0.6714, +0.4278,
-0.6350, +0.2729, -0.4513, +0.0890, -0.2815, -0.2417, +0.4750,
-0.2657, -0.2394, +0.0306, +0.6279, -0.5832, +0.2397, -0.0580,
+0.6375, +0.5022, +0.2646, +0.2561, +0.2462, -0.2213, +0.2257,
-0.2983, -0.6287, +0.3809, -0.0921, -0.2330, -0.3067, +0.0942,
-0.0820, +0.2682, -0.6325, +0.5378, +0.1533, -0.0488, -0.2197,
-0.7697, +0.1914, -0.0968, -1.1765, +0.1117, +0.3153, +0.1784,
+0.3065, +0.5665, -0.2415, -0.3054, -0.1691, +0.0961, +0.0841,
-0.2884, -0.0720, -0.1946, -0.4453, -0.4094, +0.7505, +0.9860,
+0.4382, +0.0251, +0.6391, -0.9557, +0.4968, +0.3713, -0.6909,
+0.3377, +0.5932, -0.0497, -0.7133, -0.0375, +0.6459, +0.1891,
+0.2072, -0.3249, -0.9538, +0.0223, +0.1423, -0.4737, +0.4759,
+0.3123, +0.2271, +0.3975, +0.1807, -0.0245, -0.4997, +0.0934,
+0.3119, -0.6308, -0.6775, +0.1343, -0.9224, +0.3735, +0.3916,
+0.3533, -0.2392
],
[
+0.2814, -0.0062, +0.0933, -0.7245, -0.0320, -0.1942, +0.2133,
+0.4642, +0.3284, -0.3294, +0.3022, +0.1587, -0.2439, -0.5907,
+0.2743, -0.4863, +0.0067, -0.5499, -0.3214, +0.2841, +0.0214,
-0.0561, -0.0420, +0.4335, -0.4909, +0.1226, +0.6087, +0.2644,
+0.0271, +0.3237, -0.2749, -0.2716, -0.1286, +0.7110, -0.5399,
-0.8197, -0.2695, -0.5925, -0.9547, +0.2210, -0.0892, +0.3999,
+0.4132, +0.5972, -1.0397, -0.2844, +0.3267, -0.3480, +0.1662,
+0.9957, -0.1922, -0.0408, -0.7742, -0.1346, -0.2137, +0.1514,
+0.5590, +0.1207, -0.4661, -0.4641, +0.1634, +0.2661, -0.1209,
-0.1571, -0.1591, -0.9180, -0.5031, +0.6926, +0.4839, -0.1161,
+0.2329, +0.1166, -0.0009, -0.3337, +0.2277, +0.8259, -0.9418,
+0.4854, +0.1511, +0.0460, -0.6269, +0.1148, +0.2744, -0.4243,
+0.4802, +0.0008, +0.3037, -0.4941, +0.5982, -0.0887, -0.0670,
+0.2764, +0.9553, -0.2374, +0.3233, +0.2260, -0.2309, -0.0966,
+0.0436, +0.3731, +0.1013, +0.0365, -0.1977, -0.1226, -0.3751,
-1.1944, -0.7115, +0.5875, -0.4934, +0.7552, +0.6172, +0.9160,
+0.0696, -0.1216, +0.5968, +0.5816, -0.6307, +0.2768, -0.2808,
+0.8865, -0.3658, +0.2391, +0.8127, +1.0817, +0.2553, +0.0153,
+0.5750, -0.8275
],
[
+0.0082, +0.0867, -0.7852, +0.0827, +0.1291, +0.0101, -0.4648,
+0.2129, +0.1481, +0.2185, +0.0041, +0.1023, -0.0985, -0.4019,
-0.1962, -0.4703, +0.2551, +0.0781, -0.2133, -0.2784, +0.4115,
+0.1406, -0.6719, +0.0922, +0.0814, -0.2462, +0.1721, -0.0595,
-1.2963, -0.1522, -0.2524, -0.7243, +0.0807, -0.4981, +0.2275,
+0.2468, -0.4768, -0.1847, +0.4707, +0.8096, -0.2368, -0.0147,
+0.0535, -0.6021, -0.0384, +0.2849, +0.3360, -0.1571, -0.5368,
+0.0510, +0.2195, -0.0610, +0.3591, -0.6108, +0.2859, +0.4386,
-0.3010, -0.1109, +0.4162, -0.1174, +0.4276, -0.2966, +0.1508,
+0.3311, -0.2374, -0.1032, +0.3684, +0.2232, -0.2740, +0.1488,
+0.4066, -0.3463, -1.3788, +0.0910, -0.6708, -0.5257, +0.2128,
-0.7257, -0.5533, +0.3904, -0.8414, -0.3692, -0.4447, +0.2932,
+0.3695, +0.1974, -0.0307, -0.0207, +0.2861, +0.0093, -0.3723,
+0.0885, -1.1285, +0.5602, -0.4984, +0.0470, -0.7330, -0.3404,
-0.0676, +0.0126, +0.2968, -0.6792, -0.1664, -0.0904, -0.0668,
+0.0039, +0.3229, -0.3829, -0.0114, +0.0498, +0.2746, +0.3235,
-0.7055, -0.9976, -0.1135, +0.7543, +0.0806, +0.3088, -0.2352,
+0.0764, -0.7089, -0.9394, +0.2522, -0.3699, -0.1798, -0.2856,
-0.1290, +0.1445
],
[
+0.1170, +0.1270, -0.3989, -0.3419, +0.0840, -0.9038, +0.0866,
+0.2107, -0.4077, +0.2281, +0.0722, +0.3743, -0.0479, +0.1286,
-0.7060, -0.0320, -0.5403, -0.0098, -0.4120, -0.4272, +0.2438,
-0.4637, -0.7268, -0.0662, -0.3122, -0.3862, +0.2022, -0.0472,
-0.4934, -0.0315, -0.1665, -1.2208, -2.0757, +0.0802, +0.3474,
-0.0081, -0.2626, -0.3028, -0.2964, -0.0162, +0.1108, -0.6319,
+0.3311, -0.1075, -0.5383, +0.1789, +0.0341, +0.0732, +0.0174,
-0.6334, +0.1319, +0.2015, +0.0771, -0.5511, +0.0671, -0.2133,
-0.0329, -0.1802, -0.4966, -0.0583, -0.4500, +0.3364, +0.3630,
-0.2253, -0.4866, +0.1868, -0.0692, -0.2527, -0.0107, -0.1891,
+0.2936, -0.4164, -0.5263, +0.1735, -0.1163, -0.5953, +0.1242,
+0.4013, -0.1936, -0.9705, -1.2014, +0.2352, -0.2794, -0.5115,
-0.0017, -1.5073, +0.1604, -0.3522, -0.0468, -0.6165, +0.1721,
+0.2418, -0.4455, -0.0092, -0.1756, +0.2693, -0.6080, +0.4912,
-0.1329, +0.4136, -0.2932, +0.0115, -0.0402, +0.2502, -0.0197,
-0.4421, +0.0759, -0.6872, -0.7786, -0.0250, +0.3514, +0.2312,
+0.1051, -0.9007, +0.3253, +0.1189, +0.1139, -0.2096, -0.2937,
+0.0679, -0.8154, -0.4950, +0.6838, -0.0920, +0.0793, -0.2171,
-0.0949, +0.1958
],
[
-0.2691, -0.6323, -0.1146, -0.7969, +0.3603, +0.0558, -0.3731,
-0.8005, -0.0474, +0.0781, -0.1341, -0.2605, -0.1887, +0.4685,
+0.8360, +0.4681, +0.0750, +0.0393, +0.1146, +0.0619, -0.6425,
+0.2530, +0.1581, +0.1261, +0.0122, +0.0781, +0.1272, -0.3750,
-0.6580, -0.1638, +0.1062, +0.3031, -0.3352, +0.2163, -0.1018,
-0.8000, -0.6169, -0.2181, +0.3027, -0.9585, -0.2230, -0.1886,
-0.4316, -0.4782, -0.5062, -0.9208, -0.7954, +0.1160, -0.5291,
-0.8184, -0.0061, -0.1857, -0.9126, +0.5665, -0.2163, +0.0021,
-1.0223, +0.0705, +0.2187, -0.2989, +0.6618, +0.0370, +0.2902,
-0.1797, +0.0894, +0.1833, -0.1715, -0.0093, +0.3925, -0.3233,
-1.0342, -0.4808, +0.1210, +0.3849, +0.5812, +0.2901, +0.0822,
-0.8464, +0.3258, +0.2498, -0.0856, +0.1311, -0.0029, +0.4667,
-0.0699, -0.2817, -0.7217, +0.2866, +0.0190, +0.1614, -0.5219,
-0.0784, +0.0649, +0.0512, +0.4715, -0.5501, +0.1089, -0.7258,
+0.2023, +0.2316, -0.2090, -0.0795, +0.4351, -0.3446, -0.4372,
-0.1601, -0.3571, +0.0786, +0.3685, -0.4748, +0.3770, +0.2971,
-0.0189, -0.6198, +0.0504, -0.2866, -0.2560, +0.1544, -0.6670,
-0.9603, -1.4674, -0.1516, +0.2214, -0.4532, -0.1952, +0.6401,
-0.2176, -0.8166
],
[
-0.0285, -0.3986, +0.1484, -0.2599, -0.1303, +0.0441, +0.4181,
-0.4833, -1.0472, -0.4699, -0.5243, +0.4656, -0.6172, -0.8212,
+0.1187, -0.0418, +0.0160, -0.1062, -0.0762, +0.3476, +0.3903,
+0.0679, -0.1581, -1.0278, +0.0026, -0.4655, -0.2266, +0.1243,
-1.2957, -0.9490, +0.3861, +0.3092, -0.4318, +0.8314, -0.7161,
-0.4065, +0.3536, -0.1895, +0.0566, -0.2526, -0.4563, +0.3996,
+0.2692, -0.9285, -0.0938, +0.0857, -1.2679, -1.3199, +0.1553,
-0.5119, -0.5065, +0.4989, -0.7397, +0.0820, -0.4976, +0.3495,
+0.1541, +0.3716, -0.3373, +0.4194, -0.4855, +0.3548, -0.6374,
-0.2918, -0.4379, -0.2620, +0.6569, +0.3145, +0.4310, +0.0951,
-0.1676, +0.1585, +0.7460, +0.0699, +0.0143, -0.0661, -1.1089,
+0.3126, +0.2163, -0.4875, +0.1745, +0.2242, +0.2786, +0.3927,
-0.5813, +0.3231, -0.1843, -0.0234, -0.2786, +0.0067, +0.1955,
-0.0388, +0.1289, +0.6098, +0.2552, +0.7925, -0.2620, +0.1436,
-1.0747, -0.2431, +0.3825, -0.1392, -0.5851, +0.4949, +0.0018,
+0.0258, -0.1703, -0.1578, +0.4324, +0.0654, -0.0453, +0.0184,
+0.5218, +0.4344, +0.1994, -0.2861, -0.7278, -0.1987, -0.4662,
-0.1155, -0.0638, -0.1569, -0.9375, +0.8239, -0.0273, +0.1714,
+0.6239, -0.2999
],
[
+0.3140, -1.1413, -0.1982, -0.6762, +0.6062, +0.3005, -0.4418,
-1.0130, -0.2415, +0.1512, -0.0788, -0.2140, +0.2363, -0.0995,
-0.1775, -0.2692, -0.2319, -0.1664, -0.0480, -0.2605, -0.4175,
+0.5565, +0.1244, +0.0817, -0.2587, -0.1275, +0.1042, -0.2228,
+0.0417, +0.1563, -0.0341, +0.1533, -0.2492, -0.1530, +0.0599,
+0.4991, -0.9855, -0.2737, +0.2038, -0.3164, +0.0745, -0.1019,
+0.0571, +0.5334, +0.0129, +0.0066, +0.0281, +0.4225, -0.3663,
-0.1974, -0.4947, +0.2419, -0.0759, -1.3532, +0.1309, +0.0166,
-0.2925, -0.0691, -0.2376, +0.3762, -0.8569, -1.2457, -0.0893,
+0.3526, +0.0858, +0.2560, -0.9409, -0.2089, +0.1291, -0.1224,
+0.1125, +0.3010, -0.6485, -0.1327, +0.2401, +0.0642, -0.7335,
-0.0607, -0.0949, +0.1965, -0.0402, +0.7876, -0.2581, -0.0049,
-0.5583, +0.3317, -0.5640, +0.7555, -0.4633, +0.3739, +0.1979,
+0.7038, -0.1302, -0.4236, -0.7923, -0.0621, +0.3316, +0.4299,
-0.3998, -0.4017, +0.3687, +0.2797, -0.0302, -0.0357, +0.1949,
-0.0202, +0.3365, -0.0781, -0.2587, -0.4195, +0.1678, -0.4627,
-0.1905, -0.3720, -0.2279, -0.4517, +0.1817, -0.2728, -0.4155,
+0.4474, +0.2705, +0.5126, -0.1381, -0.7768, +0.0570, +0.3170,
-0.1461, +0.1520
],
[
+0.2711, -0.7083, +0.0868, +0.2666, -0.8103, -0.0690, -0.0300,
-0.7091, +0.1559, -0.3225, -0.3553, -0.1263, +0.2223, -0.2423,
+0.3708, -0.0383, -0.3726, +0.1577, +0.0520, +0.1042, -0.5502,
-0.1205, -0.0600, +0.1708, +0.3769, +0.6033, -0.0370, -0.2149,
+0.0203, +0.2205, +0.1817, +0.0686, +0.2958, -1.5378, -0.3753,
-0.5048, -0.8244, +0.3607, +0.0226, -0.3186, -0.6852, +0.1654,
-0.2479, +0.1836, -0.5161, -0.1248, -0.7217, -0.4131, -0.2711,
-1.2179, +0.5068, -0.7894, -0.2289, +0.1742, -0.4522, -0.2113,
-0.1300, +0.1045, +0.1983, -0.0241, +0.0476, +0.2304, +0.1034,
-0.7503, +0.0238, -0.4383, +0.0790, -0.8974, -0.1478, +0.3492,
-0.7141, +0.2589, +0.0790, -0.2512, -0.2460, -0.0059, -0.3912,
-0.5338, -0.3274, +0.0376, +0.1176, -0.2580, -0.3363, +0.0977,
+0.4249, +0.1062, -0.3632, -0.1051, -0.1989, +0.1453, -0.3825,
+0.4364, +0.3469, -0.0374, -0.2044, -0.1771, -0.5730, -0.1720,
+0.6108, -1.2931, +0.0127, +0.0761, -0.1373, +0.0288, -1.2347,
+0.5185, -0.1560, +0.3861, +0.1622, -0.5163, -0.1247, -0.1058,
-0.2902, -0.2007, +0.6575, -0.1101, -0.6524, +0.0326, -0.4036,
+0.0191, +0.1843, +0.1588, +0.0258, -0.0628, +0.1276, +0.1841,
+0.5514, +0.0580
],
[
+0.3696, -0.2226, -0.5965, +0.0039, +0.0826, -0.0328, +0.2075,
+0.3482, +0.1155, -0.6658, -0.6598, +0.0239, -0.2714, -0.2931,
-0.0308, -0.1994, +0.3646, -0.0805, +0.1486, +0.2427, -0.0741,
+0.3452, +0.0704, -0.0799, +0.6901, -0.2128, -0.2928, +0.0834,
-1.0479, -0.1028, +0.0560, -0.4974, -0.1569, +0.0858, +0.2110,
+0.0329, +0.0708, -0.2342, -0.2855, +0.5861, -0.1896, +0.1390,
-0.3496, +0.3056, -0.8322, +0.1572, -0.4228, +0.3259, -0.6415,
+0.4092, +0.4396, -0.1126, +0.2508, -0.8185, -0.4606, -0.0904,
-0.5178, -0.6965, -0.3374, -0.1446, -0.5244, -0.1085, -0.7043,
+0.0682, +0.2373, -0.0717, -0.8114, +0.3448, +0.0167, +0.2812,
-0.7497, -0.4294, -0.3268, -0.1051, -0.1815, -0.3281, -0.0849,
-0.4348, +0.2231, -0.9791, -0.2975, +0.5974, -0.3474, +0.1663,
+0.0978, -0.3039, +0.1740, +0.2455, -0.1384, +0.1306, -0.0091,
+0.3224, -0.4614, +0.2875, -0.7874, +0.5825, -0.0400, -0.3206,
+0.4512, -0.3062, +0.1509, -0.2496, +0.0852, -0.2199, +0.1985,
+0.1227, -0.2818, -0.1475, +0.3922, +0.1838, +0.0896, +0.4384,
+0.1683, -0.4647, +0.0348, -0.1814, +0.1457, -0.8301, -0.5137,
+0.1269, -0.3037, +0.0274, -0.2094, +0.0461, -0.2276, +0.2731,
+0.2495, +0.1495
],
[
-0.0085, +0.2052, +0.1864, -1.2076, -1.0113, +0.0311, +0.0778,
-0.5844, +0.4804, +0.3460, -0.4087, +0.3961, +0.5099, -0.4606,
-1.3054, -0.7246, +0.4725, +0.4758, -0.1597, -0.6904, -0.1142,
-0.0945, -0.1305, -0.2708, +0.3056, -0.1290, -0.4496, +0.1660,
+0.1616, -0.5353, +0.2048, -0.0921, +0.4782, -0.4734, -0.5955,
-1.1665, -0.6502, -1.0449, +0.0884, -0.6652, +0.6585, -0.1359,
+0.2576, -0.6263, -0.0742, -0.0931, -0.3415, -0.8582, -0.0877,
-0.5015, -1.0665, +0.3704, -0.1931, -0.4724, -1.0108, +0.0551,
-0.5867, +0.1302, -0.0530, -1.2517, +0.2389, -0.6020, +0.2280,
+0.2903, -0.4259, -0.0438, -0.0464, -0.0403, -0.5162, -0.7994,
-0.3000, +0.0190, -0.1081, -0.1555, -0.0449, +0.1102, -0.8093,
+0.1541, +0.1101, +0.1885, +0.0181, +0.1531, -0.5090, -0.3342,
+0.0650, +0.2316, -0.5225, -0.4101, -0.0930, -0.2833, -0.0896,
-0.8806, +0.6488, -0.1290, -0.4551, +0.3343, -0.0093, -0.3612,
+0.2181, -0.1496, +0.1006, -0.7886, -0.3799, -0.4418, +0.0317,
-0.3366, +0.0126, +0.5321, -0.1736, +0.2711, -0.1181, -0.1883,
+0.6228, -0.4832, +0.0469, +0.2955, -0.1980, -0.7133, +0.5580,
+0.1149, -0.5710, -0.1318, +0.4555, -0.9994, +0.1088, -0.1644,
+0.0360, +0.4702
],
[
-0.2252, -0.4254, +0.6137, +0.4718, -0.5521, -0.2921, -0.5468,
+0.7702, -0.5979, +0.2184, -0.2062, +0.0745, -0.4151, +0.1969,
+0.7839, +0.0988, +0.1779, -0.2031, +0.1115, -0.2029, -0.2319,
-1.0614, -0.1879, -1.2515, -0.0887, -0.2592, +0.2736, +0.0759,
-0.4067, -0.2098, +0.2684, -0.9703, -0.2628, -1.4381, -2.2795,
-2.2924, -0.2015, -1.1202, -0.0949, -1.5898, +0.1963, -0.2292,
-0.1158, +0.5034, +0.0557, -0.4832, -0.1155, -0.5655, +0.0922,
-0.3294, -1.1680, +0.2987, -1.2463, -0.3976, -0.5490, +0.0465,
-0.0955, -0.1976, -0.0012, -0.5143, -0.1448, -0.0414, +0.0126,
-0.4112, -0.0247, -0.1230, +0.1532, -0.2767, +0.3638, +0.1027,
-0.0585, +0.3002, +0.3676, -0.3005, +0.2677, +0.3266, -1.5747,
+0.3166, +0.1465, -0.1974, -0.1422, +0.0436, -0.0890, +0.1626,
-0.2840, -0.1864, -1.2519, -0.1626, +0.2748, -0.5678, -1.4952,
-0.0378, +0.4490, +0.3409, +0.1429, -1.2740, +0.1465, -0.3097,
-0.4857, -0.0346, +0.0447, -0.0255, -0.1301, -0.0094, -0.5234,
-0.6989, +0.2882, +0.1444, -0.6770, +0.4679, +0.1677, +0.7201,
-0.2564, +0.1432, +0.0667, -0.0215, -0.1456, -0.4922, -0.4803,
-0.3481, -1.7805, +0.2801, -0.2172, +0.3164, +0.0410, +0.2213,
+0.3071, -0.1733
],
[
-0.1136, -0.0677, -0.4356, +0.8331, -0.8691, -0.5827, -0.0215,
-0.5241, -0.5871, -1.4385, -0.8891, -0.1442, +0.0689, -0.4092,
+0.3884, -0.7465, -1.0275, +0.2555, +0.3577, +0.2575, +0.0209,
-0.1352, +0.0379, -0.0821, +0.0465, -0.0177, +0.0115, -0.3766,
-0.1401, -0.0996, +0.1216, -0.8895, +0.0781, -0.0920, -0.1415,
-0.2272, +0.4005, +0.3169, +0.1274, +0.0498, -0.5372, +0.1034,
-0.1719, -0.0153, +0.6849, +0.0268, +0.0420, +1.1373, +0.2045,
+0.0175, +0.2241, +0.3592, +0.3729, -0.0936, -0.3730, -0.6442,
+0.3203, -0.0835, -0.0376, -0.1133, +0.1059, -0.2747, -0.6443,
+0.1147, -0.0212, +0.1011, +0.0599, +0.0543, +0.1753, +0.1356,
-0.2370, -0.8859, +0.3428, -0.5916, -0.5082, -0.6873, +0.1431,
-0.3359, -0.3301, +0.3276, -0.2136, -0.6678, +0.1320, +0.1852,
-0.0982, -0.0495, -0.2122, +0.2887, +0.0018, -0.1580, -1.1534,
+0.6158, +0.5112, +0.1241, -0.3766, +0.3000, -0.1223, -0.3958,
+0.2922, -0.0240, -0.5557, -0.0616, +0.2368, -0.2329, +0.1579,
+0.0014, -0.3179, -0.2191, -0.1892, +0.2321, +0.0039, +0.7091,
-0.3429, -0.4592, +0.1506, +0.3075, -0.1510, +0.1857, +0.1459,
+0.1891, -0.4055, +0.0411, -0.6508, +0.0743, -0.4859, +0.0052,
+0.4068, +0.1873
],
[
+0.4762, -0.3284, -0.1555, +0.2002, -0.1366, +0.1701, +0.1370,
-0.3074, -0.2124, -0.5547, +0.2435, -0.1142, -0.0982, -0.1635,
-0.1310, -0.2785, +0.0024, +0.0102, -0.2526, -0.4488, -0.1647,
-0.0045, -0.3931, -0.0204, +0.2716, -0.0475, +0.1861, +0.6183,
-0.4493, -0.0702, +0.2247, -0.1569, +0.1372, -0.3291, -0.8000,
-0.6748, +0.4074, +0.5101, -0.7657, -0.6029, -1.0873, -0.1586,
+0.4112, -0.5648, -0.0312, +0.1353, -0.9385, +0.0067, -0.2617,
-0.3750, -0.1196, +0.5406, -0.7990, +0.4246, -0.4306, -0.2984,
+0.3182, +0.0648, +0.4050, +0.1368, +0.2012, -0.2350, -0.2371,
+0.3948, -0.5506, +0.0713, -0.4612, +0.2157, +0.3576, -0.9170,
-0.8457, +0.2411, +0.4271, -0.1765, -0.1865, -0.2401, -0.4079,
-0.1546, -0.0300, +0.3523, +0.1223, -0.4455, +0.1529, -0.2628,
-0.4528, -0.9364, -0.8459, -0.1533, -1.0921, -0.3128, -0.5306,
-0.2182, +0.5052, -0.8434, +0.5184, +0.1367, +0.0152, -0.1304,
+0.7222, -0.2846, +0.0592, -0.1285, +0.1846, +0.2904, +0.4518,
+0.1123, -0.3481, -0.3457, +0.1827, -0.0044, -0.0458, +0.0374,
-0.1125, +0.0941, -0.3499, -0.0423, -0.4055, -0.2829, +0.7004,
+0.1410, -0.3933, -0.3708, -0.4537, -0.5014, +0.1018, +0.5535,
+0.3223, +0.1110
],
[
+0.6492, +0.1605, +0.1305, -0.4848, -0.6308, +0.1230, -0.6904,
+0.4446, -1.0387, -0.4102, -0.1002, +0.0278, -0.0709, +0.0744,
-0.0401, -1.0359, -0.0711, +0.7240, -0.1909, -0.2549, -1.1637,
+0.0829, +0.1114, -0.2705, -0.0319, -0.4715, -0.7474, -0.5330,
-0.0540, -0.5394, +0.5775, -0.1039, -0.4891, -0.8315, -0.5596,
-0.8209, -0.9092, -0.7611, -0.3003, +0.1847, -0.1835, -0.5320,
+0.3177, -0.0634, +0.0724, -0.3234, -0.0891, -0.0884, -0.0462,
-0.1958, -0.5008, -0.3015, +0.1186, -0.8094, +0.4712, +0.0014,
+0.2931, +0.0220, -0.4707, +0.1514, +0.1064, -0.4192, -0.1069,
+0.5645, -0.1316, +0.0881, +0.1463, +0.1614, +0.1803, +0.2927,
-0.6776, +0.0035, -0.1937, -0.1933, -0.6986, +0.0889, -0.6174,
+0.2617, +0.1724, +0.2762, +0.0236, +0.4052, -0.3615, -1.8092,
+0.0611, +0.1489, -0.6541, +0.3291, -0.6879, -0.1768, -0.2767,
-0.1210, -0.3152, -1.0731, +0.4075, -0.0101, -0.9484, -0.1233,
-0.3091, -0.3765, +0.3507, -0.5516, +0.1996, -0.3513, +0.4850,
+0.6935, -2.8095, +0.3908, +0.1824, -0.5622, -0.6693, -0.1793,
-0.1975, -0.5803, +0.0783, -1.4139, -0.1627, -0.5159, -0.0761,
+0.1372, -0.0913, +0.1193, -0.6293, +0.6834, +0.1290, +0.6432,
-0.0958, +0.2212
],
[
+0.2521, +0.2630, -0.4733, +0.3795, +0.0419, -0.4095, +0.2999,
+0.3761, +0.3422, -0.2029, -0.3041, +0.2830, +0.3336, -0.1059,
-1.3482, -0.0119, -0.5787, -0.6648, -0.3205, -0.6331, -0.1909,
-0.0394, -0.7875, +0.0651, -0.0273, +0.1049, +0.5533, -1.2675,
+0.1238, -0.3109, -0.6440, +0.1942, -0.3646, -0.3191, -0.0766,
+0.2061, -0.1044, +0.0561, +0.3081, -0.2792, -0.3319, -0.1471,
+0.3511, +0.0339, -0.1977, -0.6400, +0.0097, -0.6432, +0.1321,
+0.1810, +0.1756, -0.1767, +0.1977, +0.3617, -0.3304, -0.5127,
-0.8530, +0.0533, +0.1787, -0.1992, +0.2893, -0.2216, -0.6566,
+0.0704, -0.1389, -0.4372, +0.1779, -0.3765, -0.1337, -0.0503,
+0.3742, -0.1786, -0.3624, -1.1077, -0.1552, +0.4102, -0.5214,
-0.5095, +0.0131, +0.1564, +0.6065, +0.1712, +0.1516, -0.3438,
+0.3225, -0.0621, +0.3922, -0.4830, +0.1927, -1.4449, -0.9013,
-0.0744, +0.2114, +0.0668, -0.1440, +0.2017, +0.4194, +0.2080,
+0.1666, -0.0075, -0.1641, -0.9365, -0.1810, +0.0262, -0.2943,
+0.2071, +0.1249, -1.5124, -0.0661, -0.3135, -0.1278, -0.3549,
-0.0708, -0.2791, -0.0023, -0.1236, +0.1872, -0.0021, +0.4197,
-0.5973, -1.3331, -0.7616, -1.0685, -0.4284, -0.0643, -0.0932,
-0.7615, -0.4550
],
[
-0.6894, +0.2387, -0.8501, -0.2360, +0.1588, -0.3975, -0.2485,
+0.0576, +0.1002, -0.2252, -0.5013, -0.3919, +0.3921, +0.0695,
-0.9917, -0.8031, -0.2710, +0.3396, +0.5580, -0.0996, -0.0319,
-0.9168, -0.0259, -0.3321, +0.3293, -0.0079, -0.4774, +0.5074,
+0.2986, -0.5248, -0.2066, -0.5032, -0.7702, -0.4826, +0.0393,
-0.3987, -0.7677, +0.2482, -0.7121, -0.1591, +0.3542, +0.2598,
-0.1925, -0.1286, -0.1442, -0.2242, +0.2685, -0.5320, -0.1839,
+0.3071, -0.0286, -0.2077, -0.1724, +0.3268, -0.1637, +0.1221,
-0.3711, +0.3796, +0.1677, +0.0798, +0.0081, -0.2726, +0.3648,
+0.2464, +0.2526, +0.5343, -0.3083, +0.2492, -0.3297, +0.4204,
-0.2402, -0.2899, +0.3629, -0.4292, +0.4970, +0.2512, -0.0435,
-0.3246, -0.5294, +0.6179, -0.2060, +0.4789, -0.0905, +0.1975,
+0.6006, -0.1819, +0.1146, -0.6750, +0.4257, -0.0368, +0.3578,
-0.0778, +0.1832, +0.3916, -0.0713, -0.0925, -0.0996, +0.1648,
+0.1729, -0.6876, +0.1275, -0.4039, +0.8193, +0.5192, +0.2734,
+0.3087, +0.1873, -0.1174, +0.0053, -0.5225, +0.2610, -0.1341,
+0.5482, -1.1334, -0.1744, +0.0211, +0.2616, -0.6873, -0.1929,
-0.5472, -0.1666, +0.3171, -0.0373, +0.1694, -0.1680, -0.0884,
+0.2463, -0.2079
],
[
-0.3374, -0.3851, -0.1854, -0.1772, +0.1176, +0.0174, -0.0545,
-0.2007, -0.2658, -0.1292, -0.2258, -0.1332, -0.3853, -0.6779,
+0.4847, -0.0840, +0.0082, -0.4100, +0.1835, -0.3973, -0.2225,
-0.3307, -1.2065, -0.5345, +0.2386, +0.1299, -0.6485, -0.0198,
+0.3878, +0.4176, +0.1944, +0.4104, -0.3611, +0.1750, -0.2422,
+0.3888, -0.3439, -0.3678, -0.5949, +0.3910, -0.0784, -0.2476,
-0.6963, -0.5632, +0.2662, +0.3285, +0.2715, -0.3387, +0.0520,
-0.3993, -0.1589, +0.4355, -0.1047, +0.4287, -0.2753, +0.1810,
-0.2075, +0.2192, -0.3173, +0.0667, -0.2311, -0.0826, +0.3369,
-1.4102, +0.1262, -0.0202, +0.2389, -0.2517, +0.4403, +0.1408,
+0.0129, +0.1655, -0.2732, +0.1485, -0.0319, +0.2674, -1.2042,
-0.5246, +0.1175, -0.9084, -0.1473, +0.7580, -0.0608, -0.1657,
-0.3441, -0.2765, -0.2576, +0.1945, -0.4545, +0.0471, +0.2464,
-0.1764, -0.3006, -0.4309, +0.5991, +0.7491, +0.3486, +0.2068,
-0.4579, -0.0765, +0.1341, -0.0713, -0.3125, -0.3926, -0.5280,
-0.4428, -0.8202, -0.2347, +0.1489, -0.1848, +0.0109, +0.2817,
+0.3943, +0.5811, -0.0619, -0.0500, -0.3740, -0.0044, -0.3349,
+0.2791, -0.1661, -0.3395, +0.2254, -0.8203, +0.5381, -0.0089,
-0.0524, -0.0049
],
[
-0.3129, +0.3310, -0.0912, -0.2157, +0.1215, -0.3659, -0.0721,
+0.2889, +0.3868, +0.0040, -0.4878, +0.2934, -0.0622, +0.1938,
+0.2447, -0.4872, +0.5489, -0.5351, -0.8669, -0.5864, +0.1900,
+0.3462, +0.3815, -0.1919, +0.1619, -0.3233, +0.1040, +0.3066,
-1.1438, -0.1468, -0.2173, +0.3603, -0.0183, -0.5742, -0.2445,
-0.1754, -1.7395, -0.2586, -0.0417, +0.2469, +0.2116, +0.0549,
+0.2056, -0.0176, +0.1254, +0.3214, +0.3466, -0.2643, -1.6379,
+0.1342, -0.3079, +0.0917, +0.2076, -1.6489, -0.8988, +0.1965,
-0.2954, -0.2331, +0.2279, -1.2496, -0.0440, -0.2847, +0.0058,
+0.6779, +0.3672, -0.2293, -0.5255, -0.2264, +0.5899, -0.4577,
-0.8445, -0.3349, +0.4782, -0.0213, +0.5035, -0.1462, -0.0816,
-0.0924, -0.3762, +0.0949, -0.5648, +0.4525, +0.5809, -0.0342,
+0.1381, -0.3033, -0.4382, -0.1888, -0.5410, +0.1169, +0.7043,
-0.4616, +0.0419, -0.3368, -0.0198, +0.0373, +0.2670, -0.9157,
+0.5766, -0.8848, +0.2547, -2.4214, +0.0289, +0.0294, +0.3200,
+0.2833, +0.7030, +0.1917, +0.0806, +0.0319, +0.1276, +0.3893,
+0.1187, +0.0187, -0.4040, +0.9615, -0.2946, -0.0678, -0.3491,
+0.3768, -0.6789, -0.1654, +0.3386, -0.2474, -0.0224, -0.5732,
+0.2590, +0.2987
],
[
+0.0499, -0.0441, -0.3651, +0.2152, -0.3977, -0.5853, -0.1028,
-0.4648, -0.0573, -0.0586, +0.0939, +0.7476, -0.0733, -0.1556,
-1.2404, +0.2962, -0.3082, +0.2758, +0.4992, -0.5321, +0.2672,
-0.3016, -0.1122, -0.9559, +0.0373, +0.1253, +0.0423, -0.3638,
-0.6725, -0.8164, +0.4092, -0.6592, +0.2284, +0.0237, +0.1574,
-0.4085, -0.0249, -1.7635, +0.3639, +0.1628, -0.1960, +0.1105,
+0.2272, +0.1142, -0.2709, +0.3293, -0.2763, -0.7405, +0.0259,
-0.4482, -0.3133, +0.4974, -0.1307, +0.2308, -0.3896, +0.0042,
+0.0054, -0.2991, -0.0221, -0.4111, -0.0786, -0.0812, +0.3746,
+0.0347, -1.2763, +0.1178, -0.5588, -1.6551, -0.1173, -0.6023,
-0.1478, -0.0474, +0.0818, -0.0719, -0.4102, -0.4633, +0.4273,
-0.2414, +0.5594, -0.0444, -0.5016, -0.4598, +0.2786, -0.1450,
+0.1030, +0.3086, +0.1437, -0.2915, -0.4826, -0.4433, +0.2210,
-0.5075, -0.1776, -0.0854, +0.2941, +0.1911, -0.4371, -0.2531,
-0.1352, -0.2630, +0.6148, -0.2859, -0.0440, -0.3877, -0.4516,
+0.0055, -0.4590, -0.3747, -0.0737, +0.0759, +0.0977, -0.5848,
-0.6917, +0.2776, -0.1383, -0.1147, -0.0361, -0.0117, +0.2566,
-0.4038, -0.3312, +0.4280, -0.0726, -0.9503, -0.3591, -0.0391,
+0.1074, +0.2458
],
[
+0.5959, -0.3278, +0.0867, +0.2935, +0.1105, +0.2164, +0.2708,
-0.0261, -1.1600, -0.2612, -0.2102, -0.2892, -0.1252, -0.1953,
+0.3011, -0.2992, +0.2181, -0.0139, +0.1810, +0.2124, -0.2466,
-0.0319, -0.4725, -0.1248, +0.7223, -0.1485, +0.0357, -0.1122,
-0.6130, -0.1449, +0.3717, -0.5425, +0.2772, +0.3600, +0.0771,
+0.2939, -0.2337, +0.6262, -0.1961, -0.6447, -0.3119, +0.1573,
-0.3272, -0.6251, +0.0969, -0.9605, -0.0935, -0.1721, +0.3689,
-0.1753, -0.7054, -0.1648, -0.7860, +0.3147, -0.3318, -0.4041,
+0.0940, +0.7184, -0.3698, +0.5603, +0.2647, +0.2764, -0.5868,
-0.0931, -0.1904, -0.1462, +0.3757, -0.1287, -0.2810, +0.0976,
-0.2559, +0.2289, +0.1456, +0.0947, +0.1649, -0.0449, +0.0254,
-0.0624, +0.2917, -0.1126, +0.1546, -0.4106, +0.3425, -0.2223,
-0.1813, +0.3036, +0.2795, +0.1488, +0.1489, -0.0121, -0.6496,
+0.1909, +0.0567, -0.1244, -0.1460, -0.1951, +0.1426, +0.0090,
-0.8283, +0.1649, -0.3020, +0.2870, -0.5023, +0.1188, -0.5140,
-0.1743, -0.0307, +0.0629, +0.0458, -0.1941, -0.1876, +0.4947,
+0.4413, -0.7051, +0.0857, +0.2037, -0.1575, -0.0895, -0.4297,
+0.3757, +0.0474, +0.0595, +0.1458, +0.3288, +0.2035, -0.1370,
+0.1637, +0.2244
],
[
-0.0494, -0.5338, +0.3075, +0.3866, -0.0238, +0.0546, +0.3774,
+0.1667, -1.4458, -0.3139, -0.0358, +0.2484, +0.0907, -0.1575,
-0.6370, -0.0251, -0.1844, -0.0867, +0.0449, +0.2515, -0.5639,
-0.2377, +0.0194, +0.2508, +0.3766, -0.0566, -0.5726, +0.2667,
+0.3919, -0.1322, -0.1609, -0.4608, +0.1251, +0.5491, -0.0081,
-0.1707, -0.5703, -0.1431, +0.3946, -0.1315, -0.1477, +0.3808,
-0.4407, +0.0204, -0.0408, -0.0296, +0.2634, +0.3544, -0.1645,
+0.3577, -0.1202, +0.0353, +0.6943, -0.2540, +0.4272, +0.4394,
-0.0890, +0.0457, +0.2672, -0.0779, +0.2240, -0.3899, +0.2615,
-0.3007, -1.1412, -0.2154, -0.0115, +0.3824, -0.2625, +0.2427,
+0.5087, +0.1939, +0.0251, +0.1826, +0.2723, +0.3710, +0.0138,
-0.3563, -0.4064, -0.2353, +0.0655, -0.4434, -0.0499, +0.1209,
+0.1625, +0.0255, +0.1468, +0.3519, +0.0685, -0.2707, -0.3953,
+0.2012, -0.3277, +0.1075, -0.9070, +0.0664, -0.0505, +0.9489,
-0.2441, +0.2456, +0.1766, -0.0657, +0.4403, +0.0069, -0.5148,
-0.0763, +0.5377, +0.1470, -0.2591, +0.2392, -0.5094, -0.6703,
-0.0883, -0.1473, +0.4173, -0.5030, +0.5998, -0.1939, +0.2321,
-0.0096, +0.0813, +0.0249, -0.0330, +0.4237, +0.3936, +0.7550,
-0.1033, -0.4626
],
[
-0.1749, +0.4291, +0.1856, +0.8326, -0.2389, -0.1074, +0.0163,
-0.4133, -0.1674, -0.4193, -0.2264, +0.0122, +0.1945, +0.3477,
+1.1767, -0.2818, +0.2058, +0.1774, -1.2170, -0.0517, -0.1186,
+0.1058, +0.5971, -0.1671, -0.5438, +0.2372, +0.1234, -0.8853,
+0.0058, +0.1098, -0.2242, -0.1922, -0.5617, -0.1774, -0.2270,
-0.2765, -0.0398, -0.2875, -0.2003, +0.4318, -0.0017, +0.6389,
-0.3364, -0.1669, +0.1289, +0.0043, +0.1394, +0.0054, -0.0405,
-0.1790, -0.3323, -0.0987, -0.6836, -1.5795, -0.7282, +0.8996,
-0.1916, +0.4577, -0.3145, +0.3770, -1.1245, -0.1127, +0.3954,
+0.3256, +0.6970, -0.0398, -0.4260, -0.1392, +0.2852, -0.3624,
+0.5735, -0.3427, -0.5304, -0.1201, +0.0896, +0.0079, -0.4158,
+0.7538, -0.2106, +0.0878, +1.1722, +0.0430, -0.7040, -0.3037,
+0.2125, +0.2748, +0.0432, -0.9073, -0.5361, -0.0635, +0.1367,
-0.2737, +0.1896, -0.0686, -0.8063, +0.1628, -0.2493, -0.0644,
-0.4000, -0.0832, -0.8262, -0.2087, +0.0469, -0.2701, -0.0342,
+0.3451, +0.3271, +0.1488, -0.2298, +0.0165, +0.3822, +0.3408,
-0.5740, +0.4275, -0.4073, +0.1550, -0.0615, -0.3236, +0.3417,
-0.4795, -1.4346, -1.1582, -0.0334, +0.1012, +0.0681, -0.3589,
+0.0009, +0.2403
],
[
+0.4514, +0.2783, +0.6260, +0.3761, -0.2668, +0.2137, -0.2847,
+0.1101, +0.1570, -0.8077, -0.3457, -0.1276, -0.3249, -1.1498,
+0.3708, +0.0481, +0.3022, -0.5984, +0.5839, +0.0303, +0.0994,
-0.2405, +0.0918, +0.5515, -0.1574, +0.0112, +0.1610, +0.0654,
-1.0527, +0.3909, -0.1364, -0.5042, -0.0112, -0.1151, -0.3180,
+0.0759, +0.4440, +0.0077, +0.4591, +0.2136, +0.0356, -0.3316,
-0.3721, -0.4922, -0.1280, -0.4384, -1.0652, -1.0171, +0.3237,
-0.1799, +0.4100, +0.1289, -0.5390, -0.4013, +0.1032, +0.2068,
+0.2910, +0.0418, -0.6356, +0.1113, +0.0839, -0.5103, -0.3927,
-0.4109, +0.1319, -0.5748, +0.2908, +0.2892, -0.5980, +0.1307,
+0.6128, -0.5456, -0.2789, +0.3952, +0.2433, -0.2612, +0.1269,
+0.3073, +0.0327, -0.7277, -0.0694, -0.1416, +0.1028, +0.2833,
-0.1480, +0.1768, -0.8571, -0.0552, +0.2363, -0.2078, +0.2579,
-0.0756, -0.6716, -0.7545, +0.0556, -0.2509, +0.2202, -0.4954,
+0.5409, +0.2372, -0.5721, +0.2712, -0.8320, +0.0032, -1.1220,
-0.3420, -0.4108, +0.2167, -0.0481, +0.1335, +0.5197, -0.0579,
+0.2260, -1.0780, +0.0694, +0.0960, +0.0198, -0.1209, -0.0714,
-0.3428, -1.4402, -0.0295, -0.8579, -0.0698, +0.0709, -0.6385,
+0.1580, +0.3372
],
[
-0.1761, -0.1190, -0.1444, -0.1236, +0.0190, -0.6771, +0.1407,
-1.1494, -0.5002, +0.1270, +0.1412, -0.1491, -0.4276, +0.1093,
-0.2619, -0.5989, -0.0392, -0.3743, +0.1734, -0.6701, +0.3487,
+0.1645, +0.0872, -0.0868, -0.3575, -0.4350, +0.5061, -0.0672,
-0.7309, -0.3852, -0.5321, +0.0891, +0.1581, -0.9461, +0.3101,
+0.2622, +0.1075, -1.1945, +0.0055, +0.1117, -0.2656, +0.1402,
-0.2464, +0.1366, +0.0586, -0.0188, +0.0702, +0.1080, +0.5085,
-0.8725, -0.6361, +0.2417, +0.2799, -0.2453, +0.0209, +0.1083,
-0.6030, +0.0556, +0.4698, -0.5757, +0.3942, -0.7870, -0.5153,
-0.3523, -0.7754, +0.1655, +0.1634, +0.1448, +0.0469, -0.5121,
-0.3382, -0.1957, +0.4266, +0.0971, +0.3234, -0.0826, -0.8881,
+0.4790, +0.0021, -0.0421, -0.6887, +0.0712, +0.0470, -0.6644,
-0.1432, -0.4532, -0.6663, +0.1885, -0.2055, -0.4784, +0.0304,
-0.0816, -0.3079, -0.3411, -0.2617, -0.4918, -0.2279, +0.2341,
+0.3344, +0.3590, -0.2672, -0.3317, -1.4133, +0.2980, +0.0894,
-0.0787, -0.6533, +0.4272, -0.1723, -1.3314, -0.2505, -0.6322,
-0.0732, -0.3339, -0.2277, -0.5841, -0.0023, +0.6160, -0.8841,
-0.3858, +0.1262, -0.1833, -0.3150, +0.2126, -0.0341, +0.1543,
-0.0226, -0.0872
],
[
+0.1093, -0.3953, -0.2439, +0.2264, +0.0552, -0.2641, +0.1326,
-0.3655, -0.0776, +0.1456, -0.3135, -0.2428, +0.1776, +0.0427,
-0.1049, +0.3194, -0.6210, -0.1818, +0.0587, -0.3108, -0.0102,
+0.3117, +0.2641, +0.0718, -0.2656, -0.0378, +0.0577, +0.3685,
-0.2128, +0.5100, -0.3235, +0.4025, -0.4131, -0.2795, +0.4549,
-0.5022, -0.7002, +0.1465, +0.1464, +0.4145, +0.2155, +0.2542,
-0.4837, +0.4830, +0.2456, +0.2177, +0.3704, +0.4179, -0.2369,
-1.2701, -0.1190, -0.0053, +0.0357, -0.3522, +0.6011, -0.4625,
-0.0481, +0.3027, -0.3711, -0.1843, -0.1280, -0.1988, +0.0890,
+0.2666, +0.3521, -0.0716, -0.5562, -0.2326, -0.5869, -0.0804,
+0.2775, +0.3285, -0.2048, -0.0762, -0.2221, -0.1345, -0.5772,
+0.4186, -0.3327, -0.3538, +0.2481, +0.2320, -0.2328, +0.0304,
-0.0009, +0.3131, +0.3529, +0.0744, +0.1237, -0.9127, +0.0470,
-0.7908, +0.7607, +0.2485, -0.1654, -0.1840, +0.2082, +0.1369,
+0.1086, -0.3694, -0.0488, -0.1335, +0.2039, +0.4223, -0.3518,
-0.1256, +0.1546, -0.5031, +0.0287, +0.0298, +0.1991, +0.0729,
-0.0224, -0.2507, -0.8534, +0.1450, -0.0068, -0.3593, -0.1262,
-0.4906, +0.2334, +0.0321, -0.7071, +0.2864, -0.0723, -0.0620,
-0.3231, +0.5353
],
[
-0.1872, -0.6072, -0.1623, -0.2428, +0.5207, -0.4531, +0.3649,
-0.0208, -0.0905, +0.1987, -0.1625, +0.0392, -0.7373, -0.2516,
-1.1968, -0.2712, -0.3207, -0.2279, -0.1482, +0.3457, -0.6574,
+0.2216, -0.7708, +0.0120, -0.2948, -0.2319, +0.0410, -0.1395,
+0.1286, -0.8962, -0.6844, -0.2874, -0.2098, -0.8660, -0.3060,
+0.1075, -0.4889, -0.4876, +0.1585, +0.0271, -0.7649, -0.3120,
-0.5355, -0.3379, +0.0560, -0.1290, -0.5892, -0.0722, +0.0397,
-1.7302, -0.1772, +0.0709, +0.4661, +0.5430, +0.0243, +0.1174,
-0.7254, +0.3965, +0.0488, +0.5466, +0.0975, -0.5617, +0.5235,
-0.1244, -0.5608, +0.1739, +0.5572, +0.4367, -0.4987, +0.2556,
-0.6455, +0.2922, -0.4531, +0.1180, -0.0714, -0.3898, -0.7729,
-0.2274, +0.0612, -0.0508, +0.1203, +0.1132, -0.2378, -0.4293,
+0.9299, +0.1615, +0.0224, +0.1529, -0.6744, -0.6975, -0.7713,
-0.1987, -1.2150, -0.0764, -0.4671, +0.1748, -0.0191, -0.1449,
+0.3394, +0.2276, +0.4175, -0.4635, -0.0192, -0.0181, -0.2375,
+0.0315, -0.9739, -0.0035, -0.1776, -0.1825, -0.1149, -0.6031,
-0.1993, +1.0660, +0.1942, +0.2916, -0.3240, +0.1672, +0.8438,
+0.0368, +0.2153, -0.2414, -0.5326, -0.1686, -0.8086, +0.2675,
-0.1521, +0.1660
],
[
+0.3672, +0.3659, -0.3833, -0.0773, +0.5133, -0.1099, -0.1455,
-0.2446, +0.1592, +0.1504, +0.1035, +0.3368, -0.3332, -0.2314,
+0.1218, +0.5182, +0.2823, -0.7633, -0.2657, -0.4206, +0.2680,
+0.3045, +0.1340, -0.7384, +0.1212, +0.5130, +0.1806, +0.0665,
+0.3528, +0.3223, -0.1746, +0.2645, +0.0619, +0.4065, +0.5508,
-0.4019, -0.5304, -0.9035, -0.9114, -0.7582, -0.1242, +0.0469,
-1.1396, -1.0376, +0.2146, -0.1570, -1.2924, -0.5072, -0.5510,
+0.8576, -0.6060, -0.5182, -0.4349, +0.5918, -0.2843, +0.2225,
-0.8508, -0.4305, +0.1419, -0.4509, -0.7419, -0.4085, -0.0913,
-0.1617, -0.2415, -0.3566, -0.4031, -0.8209, -1.1396, -0.1332,
+0.2044, +0.0062, -1.3372, -0.4069, -0.3494, +0.0520, -0.4325,
-0.2444, -0.3479, -0.4087, -0.0608, -0.0265, +0.7527, -0.4687,
-0.1593, +0.3562, -0.7702, -0.0494, +0.5192, -0.0127, +0.1519,
+0.4498, +0.2519, -0.3605, -0.8008, +0.2678, +0.4282, -0.2361,
+0.4220, +0.2413, -0.3887, -0.1261, -0.2721, -0.0707, -0.5530,
-0.0053, -0.0583, +0.1979, +0.2960, -0.9714, -0.5201, -0.5804,
-0.0634, +0.4646, +0.0475, -0.9271, -0.1548, -0.4379, -0.7117,
+0.5550, -2.2792, -1.3753, +0.3930, -0.2748, -0.1226, -0.1022,
-0.1620, -0.5356
],
[
+0.2949, -0.0316, -0.3410, -0.8611, -0.3436, -0.0336, -0.5042,
+0.3260, +0.0233, +0.5927, -0.4618, -0.2004, +0.0765, -0.3380,
-0.5127, +0.1943, -0.4830, -0.3109, +0.1185, -0.2727, +0.0741,
+0.4450, -0.2149, +0.2026, -0.2270, -0.4992, -0.2183, +0.3672,
+0.0453, -0.0997, -0.3463, -0.9736, -0.1515, -0.9857, +0.0598,
-0.5932, -0.3548, +0.1416, +0.2225, -0.3457, +0.2285, -0.6075,
+0.1748, +0.3191, -0.2952, +0.2206, +0.1597, +0.0573, +0.0946,
-0.0554, +0.4816, -0.2746, -0.2081, +0.0103, -0.1562, +0.0599,
+0.4590, +0.0938, -0.1300, +0.2554, +0.8053, -0.3771, +0.0845,
+0.1849, -0.2444, -0.3207, -0.2108, +0.4256, -0.4149, +0.3380,
-0.5478, -0.1006, +0.3588, +0.0793, +0.0071, +0.0359, -0.4133,
-0.4701, -0.2979, -0.1489, +0.3917, -0.3694, -0.2860, -0.1781,
+0.4127, -1.5028, -0.7666, -0.4226, +0.0058, -0.2195, +0.2938,
+0.1673, -0.1712, +0.0225, -0.2716, -0.4006, -0.8034, +0.1702,
-0.6867, -0.6487, +0.4672, +0.3390, -0.1677, -0.1081, -0.1951,
-0.6481, -0.0202, -1.5928, +0.2900, -0.1350, -0.1210, +0.1912,
-0.0764, -0.6552, -0.2993, +0.3903, +0.1795, -0.6494, +0.4557,
+0.0359, -0.1020, -0.0776, +0.2410, +0.4618, +0.1899, +0.0519,
-0.1175, -0.4876
],
[
+0.4475, +0.6580, -0.9072, -0.4252, -0.7311, +0.3157, -0.3769,
-0.2521, +0.0655, -0.3300, +0.8021, +0.5316, +0.5242, +0.9494,
-1.3550, -0.1479, -0.6358, -0.7123, -0.3423, +0.0953, -0.2029,
+0.5488, +0.3757, +0.6414, -1.0084, -0.4088, +0.1759, +0.1782,
-0.0354, -0.2070, +0.2549, -0.7897, -0.6122, +0.3156, +0.1283,
+0.1304, +0.2468, -0.3438, -0.1424, -2.6215, -0.1281, +0.5616,
+0.1851, -0.4052, +0.2366, -0.8374, +0.4125, +0.6650, -0.0151,
-0.5108, -1.8219, -0.3488, +1.0016, -0.4113, -0.5853, +0.6533,
-0.1658, +0.1938, -0.1667, +0.2028, +0.5964, -0.0301, +0.1358,
+0.1099, -0.0235, -1.5907, -1.0106, -0.0173, -0.1321, +0.2166,
-0.1253, +0.4326, +0.3080, -0.6533, +0.7349, -0.5500, +0.7962,
+0.5124, +0.0823, +0.9100, +0.1135, -0.2727, +0.8711, -0.1426,
-0.4308, -0.0841, +0.6076, -0.7595, -0.4672, +0.3527, -0.4772,
+0.3365, -0.4806, -1.4707, +0.8538, +0.1737, -0.4534, +0.5326,
+0.1963, -0.4248, +0.3910, +0.6292, +0.2747, +0.5681, -1.2108,
-0.0866, -0.1978, -0.8462, -0.3194, -1.2809, -0.2784, +0.2669,
-0.9332, -0.5646, -0.2407, +0.2679, -0.9020, -0.1795, -0.0125,
-0.8087, -0.9155, -2.1798, +0.0504, -0.3026, -1.3238, -1.7538,
+0.3439, -0.0563
],
[
+0.0319, -1.6628, -0.0173, +0.2989, +0.1676, +0.0364, -0.1573,
-2.2370, -0.2175, -0.9091, -0.6619, +0.0660, -0.5971, +0.3842,
-0.3193, -0.3382, +0.3384, -0.2757, +0.3400, +0.1164, -0.4148,
-0.1678, -0.3672, +0.9148, +0.4749, +0.0187, -0.3610, -0.2145,
+0.2947, +0.1979, -0.1432, -0.1918, +0.1166, +0.2417, -0.0415,
+0.1454, +0.1314, +0.5063, -0.6644, -0.1707, +0.2847, -0.4376,
-0.1103, -0.8536, +0.2727, -0.1896, -0.5441, -0.5398, -0.0445,
+0.0335, +0.0064, -0.4442, -0.0465, +0.3768, -0.8925, -0.2810,
-0.1458, +0.1723, -0.0520, +0.9517, +0.7592, -0.2111, +0.1319,
-0.8783, -0.2669, -0.2436, -0.2715, -0.4499, -0.7418, +0.1686,
+0.0313, -0.0361, -0.3253, +0.2035, +0.0673, -0.2192, +0.0130,
-0.2811, -0.0055, -0.4326, -0.5431, -0.3846, -0.2410, -0.8734,
-0.1606, +0.0070, -0.3290, +0.5137, +0.3075, -0.1952, +0.0826,
+0.1185, -0.0543, -0.1451, +0.4320, +0.5998, +0.4729, -0.3437,
-0.3013, -0.4789, -0.4182, +0.2208, +0.1769, +0.1167, +0.5403,
-0.2031, -1.8206, -0.9715, -0.4390, -1.0862, +0.0155, -0.7855,
-0.5351, -0.0728, -0.4738, +0.0931, +0.4726, -0.0554, +0.2552,
-0.5134, -0.3282, +0.0480, -0.2491, -0.4939, -0.3105, -1.0567,
-0.0065, +0.1648
],
[
+0.3248, -0.1633, +0.5252, -0.4404, -0.2621, +0.2188, +0.2378,
-0.4007, -0.7312, +0.4298, +0.4192, -0.0322, +0.1656, -0.8179,
+0.3438, +0.2162, +0.1376, -0.1415, +0.1829, +0.2162, +0.1823,
+0.0527, +0.4989, -0.1013, +0.0446, +0.2098, +0.0298, +0.4420,
+0.0982, +0.0556, -0.1275, -0.4022, +0.0921, +0.3972, +0.2696,
-0.1864, +0.4565, +0.2358, +0.4417, -0.0330, +0.1875, +0.3638,
-0.6421, +0.0176, -0.4929, +0.2501, -0.4281, -0.0558, +0.0193,
+0.4463, +0.3124, +0.2450, +0.2867, -0.2307, -0.4713, +0.0499,
-0.0854, +0.0547, -1.0298, -0.0193, +0.1035, +0.4370, -0.8156,
+0.0611, -1.1987, -0.1717, -0.4427, +0.0735, -0.4460, -0.1526,
-0.3083, -0.6694, +0.0866, +0.3043, +0.4516, -0.6468, -0.0721,
-0.1257, -0.1034, +0.0416, -0.0325, -0.5287, +0.4402, -0.1051,
-0.0580, -0.1416, -0.1219, +0.4046, +0.4270, +0.2213, +0.4172,
+0.4552, -0.3745, +0.0014, -0.4077, -0.4349, +0.0824, -0.0385,
+0.3813, +0.0135, +0.0187, +0.3953, -0.1952, +0.0045, -0.9718,
-1.0177, +0.1590, -0.6199, +0.4418, -0.4560, -0.0083, -0.1064,
+0.3796, +0.4562, +0.2073, -0.1268, +0.1443, +0.0193, +0.2260,
+0.1600, -0.6394, -0.1616, -0.6255, -0.1362, -0.0162, +0.4192,
+0.0862, -0.0224
],
[
+0.2984, +0.0992, -0.0996, -0.6480, +0.1425, -0.2154, -0.2364,
-0.5196, +0.2569, +0.4777, +0.0330, -0.2675, -0.4374, +0.4110,
+0.2946, -0.1093, +0.6461, -0.2925, -0.1891, +0.3656, -0.1734,
-0.6296, -0.5544, -0.3675, +0.3985, -0.1510, +0.3925, -0.2182,
-0.0117, -0.0585, -0.3104, +0.3286, +0.8725, +0.6009, +0.1548,
+0.1402, -0.0681, -0.3306, -0.2152, +0.1912, -0.1301, -0.6383,
-0.0810, +0.0093, -0.1257, +0.4486, -0.1424, +0.0640, +0.4627,
-0.2530, -0.1157, +0.2227, -0.1777, +0.3090, -0.2005, -0.1327,
+0.3165, +0.0120, +0.4165, -0.3985, +0.1769, -0.0774, -0.1105,
-0.1150, +0.2654, -0.0559, -0.1382, +0.2627, +0.2446, -0.5470,
-0.5101, +0.0276, -0.3068, +0.4351, -0.0344, -0.2234, +0.3508,
-0.8163, +0.2247, -0.7000, -0.3395, +0.0134, -0.1784, -0.2246,
+0.6040, -0.2900, +0.3628, -0.3002, +0.4111, +0.5022, -0.4669,
+0.2574, +0.5707, -0.2343, +0.6138, -0.1966, -1.4395, +0.0095,
-0.4462, -0.1831, +0.0234, -0.5368, +0.2990, -0.5012, +0.1117,
-0.1993, -0.2311, +0.4470, +0.5955, -0.8588, +0.1398, -0.7063,
-0.1499, -0.0807, -0.1804, -0.6344, +0.0708, +0.0311, -0.2556,
+0.2427, +1.2595, -0.2182, -0.0405, -0.1956, -0.4734, +0.0082,
-0.2395, -1.0875
],
[
+0.4931, -0.0139, -0.1785, -1.3003, -0.9315, +0.1883, +0.0783,
+0.1156, +0.2229, +0.4970, -0.3026, -0.0427, +0.3666, +0.0441,
+0.3605, +0.3052, +0.1717, +0.2815, -1.1105, +0.5068, -0.9422,
+0.1006, +0.3818, -1.0538, +0.4193, +0.1306, +0.0993, -0.0268,
+0.6013, -1.2039, -0.1301, -0.2177, -0.2388, +0.0288, -0.2354,
+0.0407, +0.2387, -0.4171, +0.1845, +1.3959, -0.4449, -1.0931,
+0.1117, -1.2900, -0.3455, +0.1650, +0.4779, +0.1619, -0.1346,
+0.5967, +0.1350, +0.1895, -0.1691, +0.0322, -0.5549, -0.4310,
-0.5731, -0.5571, +0.1791, +0.2053, -0.1841, -0.0625, +0.2050,
+0.2110, -0.0270, -1.5258, -0.1127, +0.3542, -0.6700, +0.0764,
+0.3272, -1.6352, +0.5401, -0.1176, -0.6925, -0.1588, +0.1769,
-0.5969, +0.2519, -0.7011, -0.5501, -0.3885, -0.2958, -0.5574,
-0.1361, +0.4964, +0.2261, +0.1588, -0.6176, +0.3896, +0.3587,
-0.1452, -0.4678, -0.4397, -0.6235, -0.2614, -0.3143, -0.7581,
-0.3924, +0.1405, -0.1026, -1.1059, -0.5655, -0.5290, -0.9429,
+0.1219, +0.2150, -0.0211, +0.3210, +0.1728, +0.3014, +0.7363,
-0.6684, -0.2873, -0.2049, -0.3292, -0.9234, +0.3870, -1.4117,
+0.2839, -0.1830, -0.7153, -1.5961, +0.0216, +0.3415, -0.3118,
-0.5493, -0.1679
],
[
+0.3085, +0.1217, +0.2708, +0.1764, +0.0721, -0.5826, +0.0589,
+0.1488, -0.2251, +0.1201, -0.5999, +0.3246, +0.7053, +0.4986,
-0.6902, +0.4752, -0.1896, +0.3343, -0.5747, +0.2491, +0.0376,
+0.0067, -1.5010, +0.0388, +0.0795, -0.4898, +0.2748, -0.7656,
+0.0971, -0.3358, +0.1696, +0.1662, -0.4723, +0.2035, +0.1646,
-0.0822, -0.1929, -0.8202, +0.2350, -0.1611, +0.1259, -1.3991,
+0.2596, -0.0796, +0.3935, +0.1308, +0.0481, +0.0643, -0.5223,
-0.7995, -0.1504, -0.1120, +0.1316, -0.5272, +0.1430, -1.6754,
-0.3763, -0.4128, +0.2002, -0.0846, +0.1848, +0.2289, -0.1495,
-0.2388, +0.1006, -0.4186, +0.6275, -0.8756, -0.2684, +0.1550,
-0.7136, -0.5394, -0.0622, -0.5143, -0.5301, -0.5289, -1.3851,
-0.1833, -0.3752, -1.3665, +0.3135, +0.3702, -0.2620, -0.2681,
-0.1054, -0.5276, -0.8615, +0.1398, +0.5070, +0.0973, -1.3543,
-0.1740, +0.2780, +0.2337, +0.2031, -0.0993, +0.3770, -0.5235,
-0.4920, +0.0583, -0.0661, -0.0447, -0.0847, +0.2940, +0.1105,
-0.4637, -0.0770, +0.1984, -0.1683, +0.3739, -0.3324, -0.1877,
-0.2671, -0.3427, -0.2696, -0.6949, -0.8354, +0.0840, -0.7230,
-0.2395, -0.2272, -0.1010, -0.5552, -0.0120, -1.7305, +0.4488,
-0.7055, -0.0241
],
[
-0.0699, -0.0347, -0.1426, +0.0382, -0.6711, -0.9992, +0.0990,
-0.0885, -0.0089, -0.3321, +0.0392, +0.1791, -0.0517, -0.0319,
+0.0047, -0.0617, -0.2380, +0.3098, -0.0395, -0.2500, -0.0267,
-0.1255, -0.4858, +0.1856, -0.0685, -0.2430, +0.1449, +0.3326,
-0.4269, +0.5182, +0.1142, -0.2007, +0.2764, -0.2299, +0.7191,
-0.7689, -0.1414, -1.1110, -0.1952, +0.0435, -1.4034, +0.0324,
+0.1118, +0.0090, -0.0316, -0.2086, -0.1791, +0.1090, -0.3573,
+0.0272, +0.4384, +0.0325, -0.5804, +0.0563, +0.3496, +0.1126,
-0.6086, -0.1028, -0.6224, +0.6576, +0.4319, -0.4998, +0.1549,
-0.3424, -0.1119, -0.3496, +0.4719, +0.0783, -0.4349, +0.1816,
-0.5880, +0.1618, -0.3262, -0.3673, -0.3076, -0.1708, -0.4230,
-0.5483, +0.4582, -0.7904, -0.5307, -0.5135, -0.5689, -1.2002,
-0.1985, +0.1232, +0.2213, -0.1413, +0.2333, +0.4705, -0.5972,
+0.5658, -0.1857, -0.0892, -0.5555, +0.0480, -0.7166, -0.3275,
-0.3127, +0.2932, -0.2623, -0.6273, -0.2590, -0.1251, -0.2027,
+0.2016, -0.7108, -0.2773, -0.4188, +0.5586, +0.2671, -0.2214,
-0.0762, -0.9201, -0.4880, +1.1728, -0.0636, +0.1197, -0.7996,
-0.1891, -0.7012, -0.5298, +0.4840, -0.2211, +0.3282, -0.3851,
+0.2790, +0.2206
],
[
+0.2515, -0.0460, +0.6307, -0.4469, -1.2297, +0.3137, -0.0871,
-0.9855, +0.3819, -1.2793, -0.4935, +0.2099, -0.2078, +0.5218,
-0.3201, +0.5992, -0.3316, -0.6488, -0.4344, -0.2481, +0.4467,
-0.1658, +0.3424, -0.7730, -0.0601, +0.2052, -0.7356, -0.1673,
+0.0029, -0.1864, +0.4766, +0.4392, -0.5309, -0.2602, -0.4008,
-0.9269, -0.8221, -0.3150, -0.5733, -0.5973, -0.1171, -0.2877,
+0.4653, +0.6510, -0.0373, +0.1813, -0.5940, -0.3138, -0.1951,
-0.2005, +0.3433, +0.3224, +0.0881, -0.2401, -1.2631, -0.0198,
+0.0647, -0.4116, -0.1162, -0.3314, +0.1495, +0.1694, +0.1436,
-0.2433, -0.6034, -0.1160, +0.4647, -1.1555, +0.2582, -0.7255,
-0.8492, +0.1665, +0.0833, +0.7934, +0.0374, +0.1002, -0.1522,
+0.9246, -0.4266, +0.3160, -0.1681, +0.4971, -0.1865, -0.1163,
-0.9149, +0.3540, -0.2281, +0.0785, +0.0523, -0.3920, -0.7295,
-1.5365, -0.3843, +0.2214, +0.5486, -0.1391, -0.0621, +0.6017,
-1.6143, -0.2603, -0.4795, -0.6317, -0.4797, -0.0137, -0.5190,
-0.4591, -1.6069, +0.2246, -0.1321, +1.2048, +0.1243, +0.1225,
+0.1101, -0.2003, -0.8028, +0.2594, -0.0839, +0.9636, -0.1147,
+0.4519, -0.9124, +0.5739, +0.0732, +0.1405, +0.2791, -0.0281,
-0.3491, +0.4972
],
[
+0.2997, -0.0590, +1.5474, -0.2897, -0.0720, +0.4884, +0.3767,
-0.6406, +0.2781, +0.1654, +0.5724, -0.3223, -0.3083, -0.0130,
-0.8242, +0.1416, -1.2836, -0.2536, -0.5389, -0.7150, +0.2057,
+0.3423, +0.1123, -0.4825, -0.3663, -0.0181, -0.9098, -0.8177,
-0.1424, +0.6420, -0.2045, -1.4718, +0.8390, -0.3501, +0.7143,
+0.4790, -0.8828, -0.6263, +0.4958, -0.3797, +0.0561, -0.5050,
-0.7387, +0.1345, +0.0628, -0.0689, -0.3732, -0.4678, -0.5067,
+0.8548, +0.1120, +0.6079, -0.0247, -0.5180, +0.0315, +0.7373,
-0.0192, -0.0424, -0.6181, +0.3579, +0.2907, -1.0741, -0.5571,
-0.4145, +0.6020, +0.1359, -0.1789, -0.7475, +0.2754, +0.3870,
-0.5211, +0.0470, +0.2925, -0.6933, -0.4954, +0.5955, +0.3210,
-0.7589, +0.3018, -0.5384, -1.0367, +0.2131, -0.3080, -0.1733,
+0.0630, -0.5894, -0.3412, +0.9476, +0.0097, -0.6666, -0.1119,
+0.2413, -0.6325, -0.2442, -1.2067, -0.3356, +0.2132, +0.1711,
-0.8029, -0.0991, +0.3307, +0.3498, -0.9837, +0.4930, -0.7711,
-0.9355, -0.3776, +0.2788, -1.0194, -0.4159, -0.3065, -0.5042,
+0.1818, -0.3402, -0.6123, -0.6465, +0.0899, -0.0228, -0.9121,
-0.7620, -1.9192, -0.1273, -0.3641, -0.6793, -0.6107, -0.9946,
-1.3522, -0.0111
],
[
-0.1071, -0.0914, -0.3853, -0.4486, -0.4044, +0.3221, +0.2509,
-0.2483, +0.8798, -0.0691, -0.9215, +0.4125, +0.0706, +0.3623,
-1.1719, -0.1146, -0.1429, +0.2887, -0.0463, -0.1385, +0.0253,
+0.6948, +0.2812, +0.2271, +0.1506, +0.1074, +0.2028, +0.3923,
+0.0775, +0.1088, -0.2068, +0.3874, -0.1533, -0.1300, +0.3297,
-0.5180, -0.0545, +0.3264, +0.1504, +0.4627, -0.6114, +0.1983,
-0.2525, +0.0557, +0.1478, +0.2654, +0.1795, -1.0393, -0.3441,
+0.2040, -0.2679, -0.1513, -0.0434, -0.6975, -1.2178, +0.0540,
+0.0830, -0.4225, -0.1042, +0.3557, -0.4862, +0.4225, +0.0174,
-0.0394, -0.2975, +0.2248, -0.5806, -0.0481, +0.0870, -0.9754,
-0.3492, -0.2499, +0.1434, -0.7345, -0.4206, -0.1391, +0.5547,
-1.5464, +0.2995, -0.2667, +0.2176, -0.0222, -0.8436, +0.3644,
-0.0144, +0.1929, -0.1981, -0.2658, +0.0008, +0.2550, +0.2259,
+0.2459, -0.0549, -0.8412, -0.2304, +0.2651, +0.3952, -0.1507,
+0.7589, +0.1382, +0.3815, +0.4374, +0.5381, -0.2944, -0.7976,
+0.5965, -0.2968, +0.2455, +0.3352, -0.7624, +0.1680, -1.0777,
+0.2098, +0.0263, +0.5142, +0.1962, -0.9001, -0.2288, -0.0602,
-0.3794, -0.0381, -0.3365, +0.5141, -0.2042, -0.2965, -0.2482,
+0.0614, -0.0506
],
[
-0.4827, -0.9625, -0.6821, +0.6980, -0.7364, -1.0121, +0.1778,
-0.2168, -0.5134, +0.0203, -0.1385, +0.3100, -0.4719, -0.0024,
-0.7544, +0.1833, -0.5929, +0.0118, +0.5007, +0.2929, -1.1400,
-0.3093, +0.1488, +0.0664, -0.0794, +0.1037, -0.1451, +0.4052,
+0.6305, +0.0479, +0.6161, -0.2096, +0.4492, +0.1423, -0.8857,
-0.5945, -0.9675, +0.1129, -0.0625, +1.0241, -0.2228, -0.3425,
+0.3905, -0.0610, -0.7793, -0.5432, -0.4786, -0.7494, -0.3886,
-0.0728, -0.3910, -0.3117, -0.6177, +0.7814, +0.1274, -0.1076,
-0.9399, -0.1651, -0.2074, -0.3800, +0.1975, +0.3417, -0.3567,
+0.1682, +0.5643, -0.3865, -0.5320, -0.0927, -0.4048, -0.3455,
+0.4964, +0.2474, +0.6381, -0.2735, +0.1357, +0.0831, -0.7824,
+0.1336, +0.1894, -0.5041, +0.1137, -0.8554, -0.3267, -0.8148,
+0.2027, -0.6058, +0.2357, -0.6815, +0.3592, -1.0961, -0.0851,
-0.3931, -0.3858, -0.4947, +0.3376, -0.4960, -1.2253, -0.2729,
-0.0613, +0.0338, -0.4375, +0.1772, -1.2183, -0.3815, -0.6360,
-0.1212, +0.5667, +0.3882, -0.2186, +0.3979, -0.8886, +0.5915,
-0.1799, -0.2567, -1.1061, +0.2576, +0.0697, +0.5720, -0.7618,
-0.0331, -0.3751, -0.5745, -0.3045, -0.0802, -0.2254, +0.2281,
+0.7876, +0.8453
],
[
-0.5146, -0.1675, +0.4185, +0.3867, -2.0276, -0.3550, -0.0103,
+0.0759, -0.5695, -0.5775, +0.4338, +0.0862, -0.2782, -1.1816,
+0.4527, +0.0245, +0.2384, +0.2595, -0.1889, +0.1669, +0.1330,
-0.5044, +0.1522, +0.1296, +0.1008, -0.1094, -1.0970, +0.2959,
-0.0608, -0.2347, +0.0648, -0.8752, -0.0874, -0.3487, +0.1176,
+0.6469, +0.4873, +0.2115, -0.1896, -0.3099, +0.2538, +0.1531,
-0.0522, +0.1851, -0.4144, -0.0601, -0.1023, +0.0833, +0.0115,
+0.0726, +0.8781, -0.2878, -1.7336, +0.0163, -1.6014, -0.0083,
-0.3536, +0.2323, -0.7565, -0.2184, -0.4687, +0.3105, +0.3376,
+0.4321, -0.3851, +0.0826, -0.2408, +0.0863, -1.4832, +0.0563,
-0.2557, -0.3054, -0.3075, -0.2278, -0.1671, +0.0521, +0.3658,
+0.3719, +0.0804, -0.6263, -0.1550, -0.1028, -0.2334, +0.2369,
-0.4011, +0.9961, -0.0063, +0.9473, +0.2949, -0.1119, -0.6594,
-0.4036, +0.4119, -0.6482, -0.1113, +0.1171, -0.4483, -0.4772,
-0.0929, +1.0857, +0.2080, +0.1390, +0.6034, +0.2009, +0.0470,
+0.1179, -0.3087, -0.2450, -0.3718, +0.1897, -0.0922, +0.5298,
+0.0121, +0.1860, +0.1340, +0.4798, -0.0977, -0.1933, -0.1136,
-0.0081, -0.3595, -0.1116, -0.8788, -0.8181, +0.0373, +0.1917,
+0.0235, +0.4108
],
[
-0.2226, -0.1118, -0.4395, +0.0949, +0.2345, +0.1716, -0.5861,
+0.3246, -0.1655, -0.3465, -0.1213, -0.3040, +0.2176, -0.1509,
+0.1273, +0.2831, -0.2209, -0.0024, -0.1555, +0.2461, -0.4054,
-0.3875, -0.2816, +0.1992, +0.2812, -0.4946, -0.0605, +0.4227,
-1.1630, +0.0745, -0.0690, -0.1638, -0.1808, +0.0094, -0.0691,
-0.2381, -0.8959, -0.2208, +0.3658, +0.5741, +0.2885, -0.0040,
+0.4126, -1.0711, +0.3548, -0.3181, -0.6322, -0.1208, -1.0722,
+0.2538, -0.5078, -0.5311, +0.3811, -0.2693, +0.2067, +0.6466,
+0.0822, -0.1527, +0.1130, -0.3884, -0.3325, -1.1208, +0.1370,
-0.3095, +0.0311, +0.1457, -0.0690, +0.0818, -0.5080, -0.7021,
-1.9342, +0.0178, -0.1540, +0.1579, +0.5999, -0.0807, -0.0241,
+0.4862, -0.1039, -0.0031, -0.5623, +0.0595, +0.1014, -0.7204,
+0.0172, +0.2057, +0.2657, +0.4014, -0.2632, +0.3744, +0.3248,
-0.4278, -0.5524, +0.4031, -0.4441, +0.5158, +0.4152, -0.1849,
+0.5721, +0.3827, +0.2814, -0.0213, -0.1929, -0.2211, +0.2765,
-0.5487, +0.4636, -0.3462, -0.2213, +0.2036, -0.0372, -0.7832,
+0.3035, -0.3648, +0.1918, -0.2024, +0.1873, -0.3389, -0.1560,
+0.2861, +0.5234, +0.1103, -0.0112, -0.1042, +0.5901, -0.1086,
-0.3113, +0.2456
],
[
+0.6437, +0.2104, -0.1146, -0.2029, -0.1413, +0.0463, -0.1361,
-0.3423, -0.5981, -1.3993, -0.2364, -0.6253, -0.0367, +0.0396,
+0.4071, -0.1041, +0.2952, -0.7470, -0.5323, +0.1711, -1.6514,
-0.2995, -0.1617, -0.0444, +0.2066, -1.2562, +0.1500, +0.2685,
+0.0740, -0.6300, -0.1595, -0.2937, -0.1878, -0.1974, +0.0573,
-0.1410, +0.3946, +0.3699, -0.2207, -0.9597, +0.6649, +0.3057,
-0.1968, -1.5333, +0.0333, +0.1745, +0.6678, -0.4441, -0.5636,
-0.0175, -0.8500, -0.1240, -0.6060, +0.2580, +0.1085, +0.0204,
-0.4799, +0.0779, +0.4179, -0.0790, -0.1145, -1.4217, +0.7018,
+0.0990, +0.4478, +0.2631, -1.5982, -0.1057, -0.6651, -0.2051,
+0.9339, -0.7448, -0.7337, -0.1249, +0.1416, +0.3265, +0.4638,
-0.3997, +0.1113, -0.6644, -1.0355, -0.5059, -0.3138, -0.8874,
-0.8188, +0.2271, -0.5940, +0.4332, -0.5486, -0.2080, -0.8829,
-0.1462, -0.7029, -1.0269, -0.4292, +0.6677, -0.6375, +0.2042,
-0.0905, +0.5251, -0.1005, -0.0572, -0.2358, +0.1797, -0.6072,
-0.5522, +0.0565, -0.6040, -0.0751, -0.0251, -0.0597, -0.6340,
-0.4350, -0.4168, +0.1966, -0.3608, -0.3125, -0.1044, +0.2857,
-0.2191, -0.0965, -0.1001, +0.1235, +0.3814, +0.0643, -0.8396,
+0.3091, +0.3177
],
[
-0.0125, +0.3690, -0.0585, -1.0826, -0.8428, -0.1197, -0.1145,
-0.2935, +0.3786, -0.0549, +0.4265, -0.2455, -0.1427, +0.2034,
+0.2929, +0.4222, +0.1811, -0.4978, -0.3388, +0.3621, -0.0699,
-0.4999, -0.5357, +0.0117, -0.5642, +0.0933, +0.1825, +0.6247,
+0.6617, +0.2117, +0.5872, -0.2158, -1.5956, -0.4088, +0.1522,
-0.0365, -0.1444, +0.0480, +0.0145, -0.8867, -0.3696, +0.7545,
+0.0776, -0.4678, +0.2637, +0.0863, +0.4760, -0.7836, -0.8257,
-0.7849, +0.3583, +0.2555, -0.3267, +0.0516, -0.4765, -0.2294,
-0.1335, +0.6383, -0.0482, +0.1919, +0.4054, -0.0641, +0.1350,
-0.6503, +0.2876, -0.1300, -0.0278, +0.1501, -1.4875, +0.3619,
-0.7003, -0.0982, +0.0386, +0.2116, -0.1633, +0.5162, -1.7085,
+0.1229, +0.0061, +0.4792, +0.0814, +0.2878, +0.2448, -0.0493,
+0.2096, +0.8204, -0.0978, +0.1990, -0.0858, +0.1393, +0.5323,
-0.4105, +0.3717, -0.3621, +0.1622, -0.0615, +0.0896, +0.2078,
+0.1755, -0.6073, +0.0121, +0.5078, +0.4294, -0.2070, +0.0555,
-0.3551, -0.2384, +0.9508, +0.9955, +0.3392, -0.1987, -0.4620,
+0.3046, -1.0395, -0.0181, +0.4667, +0.0746, -0.3318, -0.1283,
+0.4452, -0.2630, -0.2656, -0.2240, +0.2857, -0.5009, +0.1492,
-0.1431, -0.1559
],
[
-0.0629, -0.2279, -0.2815, +0.1708, +0.8295, -0.1126, -0.1447,
-0.6849, -0.1887, -0.0727, -0.3118, +0.3364, +0.1892, -0.0240,
+0.6113, +0.3232, -0.1727, -0.1026, -0.1972, -0.1144, -0.1729,
+0.0693, +0.0774, +0.6179, -0.0878, -0.1258, -0.0726, -0.4002,
-0.0009, +0.1317, -0.1568, -0.1211, -0.3489, -0.2788, +0.3074,
+0.1012, +0.4588, +0.5282, -0.4225, -0.3813, +0.3806, +0.0643,
+0.3658, +0.1461, -0.5421, +0.5411, -0.3853, +0.3188, -0.2143,
+0.3654, +0.1371, +0.1195, +0.4107, -1.8788, -0.3085, +0.1691,
+0.3571, +0.1617, -0.1075, -0.0012, -0.0209, +0.1295, +0.0088,
+0.2663, -0.4468, +0.2328, -0.5461, -0.4623, +0.0639, -0.4745,
+0.0185, +0.1748, +0.2751, +0.0289, -0.1511, -1.0610, -0.9342,
+0.1023, -0.5897, +0.4368, +0.4395, +0.3023, +0.0362, +0.0504,
+0.0549, +0.4292, -0.2066, -0.0201, +0.5096, -0.6873, -0.0542,
-0.6261, -0.4679, +0.3975, +0.6687, +0.0149, -0.0495, +0.2126,
+0.3135, -0.4383, +0.2927, -0.2681, +0.4141, +0.0640, +0.5893,
+0.0434, +0.3599, +0.5617, +0.2766, -0.1020, +0.5417, -0.4419,
+0.0661, -0.0103, -0.0789, -0.4222, +0.8133, -0.6130, -0.2251,
-0.3994, +0.0350, +0.4120, -0.0729, +0.1060, -0.1923, +0.2212,
-0.0015, -0.1975
],
[
-0.4066, +0.0110, -0.1552, +0.7796, -0.0239, +0.3462, -0.2415,
+0.0705, +0.2349, -0.0734, -0.3265, -0.1310, +0.2559, +0.5686,
-0.4128, +0.0741, +0.3212, -0.2172, +0.1443, +0.0788, +0.3495,
-0.5490, +0.3459, -0.0580, +0.0692, -0.9546, -0.4976, -0.1698,
+0.1399, -0.4934, -0.6442, +0.6383, +0.4024, -0.2844, +0.1234,
+0.1897, -0.0190, -0.0917, -0.2153, +0.7597, +0.3709, -0.5425,
+0.0875, +0.1794, +0.1240, -0.2885, -0.6782, -0.2058, -0.2110,
+0.3166, +0.2170, +0.2508, -0.0191, -1.8132, -0.4284, -0.4284,
+0.3022, -0.0620, +0.1973, -0.1018, -0.1348, -0.3027, -0.2532,
+0.8243, -0.9928, -0.0993, -0.3451, -0.2089, -0.3335, +0.1183,
+0.3709, -0.6175, +0.7353, -0.6027, +0.2178, +0.1978, -0.3458,
-0.0521, -0.6020, +0.1237, +0.0796, +0.0057, +0.1332, +0.7635,
-0.5412, +0.0156, -0.3960, +0.0878, -0.1451, -0.9331, -0.4365,
+0.1153, -0.0123, +0.1842, -0.1648, -0.3365, -0.0141, +0.2035,
+0.1613, +0.1402, -0.1826, -0.7295, +0.0094, +0.1283, -0.5300,
+0.0302, +0.3494, +0.3142, -0.8464, +0.6401, +0.4011, -0.2069,
+0.0559, -0.3056, +0.1178, +0.0165, +0.0848, +0.2998, -0.0056,
+0.2207, -0.4445, +0.2723, -0.5331, -0.0323, -0.3625, +0.4932,
+0.2006, -0.1021
],
[
-0.0519, +0.6391, -0.9079, -0.4731, +0.1328, -0.9114, +0.1221,
-0.8589, -0.1144, -0.0532, -0.2697, +0.1769, +0.1012, -0.1550,
-0.2779, -0.0026, -0.1059, -0.8324, -0.0975, +0.1698, +0.0020,
-1.0129, -0.0535, +0.0840, -0.6365, +0.3403, -1.0890, +0.2234,
+0.2051, -0.3762, -0.0219, -0.0708, +0.3895, -0.0854, +0.1450,
-0.3397, -1.0253, -0.1690, +0.2384, -0.0361, +0.6280, -0.0232,
-0.4950, +0.0184, -1.0879, -0.7964, -0.4034, -1.1418, -0.2425,
+0.1005, -0.1049, +0.2868, -0.9874, +0.2592, -0.7185, +0.2495,
-0.2086, -0.3738, +0.1108, -0.1768, -0.2887, -0.8102, +0.4337,
+0.1725, -0.2910, -0.0147, -0.0028, -1.5598, +0.5284, +0.1599,
+0.1920, +0.2220, -0.2343, -0.0158, +0.5636, +0.0944, -1.1463,
-0.2529, +0.1844, -0.6164, +0.2345, -0.0869, +0.4677, -0.6338,
+0.0034, -0.4730, -0.2976, +0.0252, -0.4131, -0.3446, -1.0642,
-0.1187, +0.9727, +0.5157, -1.0458, -0.3792, +0.0248, -0.3634,
+0.5012, -0.4836, +0.1707, +0.0261, +0.5114, -0.0316, +0.3451,
-0.0684, -0.3541, -0.2896, +0.2410, -0.7695, -0.7893, -0.2634,
-0.3458, +0.3608, -0.8740, -0.0193, +0.4683, -1.3214, +0.4505,
-0.8586, -0.5350, +0.0255, -1.7029, +0.1839, -0.6434, -0.7443,
+0.2752, -0.1452
],
[
+0.4497, +0.1332, +0.2510, -0.6546, +0.1903, +0.4629, -0.2670,
-0.6217, -0.7900, +0.4055, +0.1048, +0.0932, -1.2687, -0.0471,
+0.3675, -0.0751, +0.1833, +0.0968, +0.2331, +0.0363, -0.2188,
-0.0195, -0.0392, -0.5979, -0.0682, +0.3105, +0.2466, +0.1776,
+0.6357, -0.6630, +0.3737, -0.1215, +0.3028, +0.2282, +0.1548,
-0.5035, -0.1194, -0.6486, +0.1720, -0.1318, +0.0364, -0.4553,
+0.7951, +0.2073, -0.7346, -0.4911, +0.6745, +0.4376, +0.2163,
-0.0280, -0.1423, -0.3016, -1.3744, +0.3040, -0.3974, +0.0085,
+0.0414, +0.3840, +0.2412, -0.0372, +0.5375, -0.5450, +0.5068,
+0.8780, -0.4269, -0.6047, +0.5153, +0.1237, -0.9643, -0.6672,
-0.1510, +0.4786, -0.0686, +0.1789, -0.3273, -0.1779, +0.0177,
+0.1571, +0.1606, +0.1549, -0.4766, -0.9026, +0.0653, +0.0868,
+0.0621, -0.7266, -0.3686, +0.3382, +0.2308, +0.6874, +0.1348,
+0.4977, -0.5859, -0.2626, -0.3469, +0.1516, +0.0895, -0.2519,
-0.2592, -0.1837, -0.2296, +0.1767, -0.5679, -0.1375, -0.5070,
-0.6382, -0.1374, +0.5779, +0.4514, +0.1180, -0.2327, -0.4522,
-0.3484, -0.0035, +0.7310, +0.0454, +0.8697, +0.1059, -0.3448,
-0.2883, +0.6039, -0.3576, +0.1361, -0.1209, -0.4024, +0.1768,
-0.5684, -0.3368
],
[
-0.0751, +0.7339, +0.0408, -0.1337, -0.9369, +0.5861, +0.0524,
+0.0775, +1.0767, +0.3343, +0.3302, +0.5541, -0.4625, +0.0849,
-0.8359, -0.4253, +0.6121, +0.1737, -0.3291, +0.2212, -0.3580,
-0.0990, -0.7006, -0.1041, -0.0072, +0.7889, -0.3767, -0.0867,
-0.1271, -0.1077, -0.1127, -0.2207, -0.8218, -0.3934, -0.2509,
+0.4139, +0.1033, +0.0888, +0.0886, -0.5549, +0.0095, -0.4237,
+0.1985, +0.4558, -0.1326, +0.0703, -2.2189, -0.6442, +0.3691,
-0.7504, -0.5856, +0.0930, -0.8067, +0.1189, +0.1913, +0.1365,
-0.4218, +0.2168, +0.3196, -0.3373, +0.3003, -0.4674, +0.0712,
+0.0123, -0.1566, -0.0622, -0.1992, -0.3691, -0.5824, +0.2877,
-0.2970, -0.2558, -0.3782, -0.2703, +0.0438, -0.6318, -0.3175,
-0.5792, +0.2643, +0.3303, -0.2036, +0.2678, -0.1222, -1.0226,
+0.0067, -0.0072, +0.5945, -0.1497, -0.9653, -0.3418, -0.5937,
-0.4764, +0.5599, -0.0670, +0.3942, +0.1478, -0.6724, -0.4065,
-0.7034, -0.7214, +0.0970, +0.2242, -0.1217, +0.0651, +0.0720,
-0.5892, +0.4075, +0.1377, -0.3125, -0.3514, -0.2487, +0.0044,
-0.1191, +0.6080, +0.3778, -0.2010, -0.0943, -0.0418, -0.7266,
-0.1643, -0.9309, +0.2203, -0.0052, -0.1801, -0.0985, -0.3393,
+0.0116, -0.6730
],
[
+0.1655, +0.5211, +0.1849, +0.2302, +0.0248, +0.0857, +0.4936,
+0.2880, -0.2824, -0.4684, -0.4457, +0.1313, -0.6796, +0.4019,
-0.4805, +0.3073, +0.1115, +0.4333, +0.3163, -0.0816, -0.1883,
-0.7356, +0.2636, +0.0995, -0.4038, -0.1299, +0.0056, -0.2588,
-1.1740, -0.0212, +0.1199, -0.0595, -0.1753, -0.2451, -0.5145,
+0.6284, +0.4209, +0.2919, -0.3954, +0.1508, -0.6492, +0.3991,
-0.2702, +0.1863, -0.3409, -0.2513, +0.2952, -0.5227, +0.2080,
-0.6887, -0.1140, -0.0634, +0.1080, -0.0947, +0.0752, -0.1845,
-0.0627, +0.1679, +0.6477, +0.5071, +0.1089, -0.4251, -0.5890,
-0.2697, -0.4606, +0.1704, -0.2831, -0.7625, -0.6498, +0.1262,
-0.0748, +0.1890, +0.4346, -0.2856, +0.1777, +0.2118, -0.1639,
+0.0903, +0.2122, +0.0529, -0.0736, +0.5409, -0.1846, -0.2421,
+0.4487, +0.2775, +0.5984, -0.2293, -0.7922, +0.2889, -0.0407,
-0.8344, -0.1089, +0.3043, -0.0466, -0.1661, -0.5346, +0.6690,
-0.3531, -0.5414, +0.4480, +0.4577, +0.0423, +0.3368, -1.8731,
+0.0042, +0.0487, -0.2900, +0.0695, -0.9501, -0.4540, +0.2717,
+0.3854, -0.0282, -0.1735, +0.0998, +0.0670, -0.3067, +0.2340,
-0.1544, +0.2871, -0.1017, -0.2588, +0.0652, +0.0129, -0.2747,
+0.4155, +0.3013
],
[
+0.2223, -0.9679, +0.0401, +0.2288, +0.2322, -0.5094, +0.1492,
-0.6846, -1.2302, -1.1388, -0.6790, +0.4103, -0.6791, +0.1317,
-0.0930, -0.3675, +0.0592, -0.1658, -0.6330, +0.3715, +0.4643,
+0.1644, -0.1344, +0.4981, +0.1944, -0.8210, +0.0189, -1.2005,
-0.4496, -0.0847, -0.0119, +0.5595, -0.3832, -0.4543, -0.2933,
-0.1109, +0.0720, -0.0590, -0.2404, +0.0776, +0.5052, -0.0757,
-0.0339, -1.3568, -0.4457, -0.1594, -0.9628, -0.2158, -0.3825,
+0.3444, +0.0402, -0.2147, -0.2908, -0.3348, -0.1453, -0.3531,
+0.2931, +0.1324, +0.0639, +0.3645, +0.2742, -0.4940, +0.0532,
-0.1737, -0.0905, -0.3756, +0.1394, +0.4191, +0.3998, -0.4623,
-0.7704, +0.3675, +0.3252, -0.6931, -0.4788, -0.1673, -0.8348,
-0.3218, -0.0130, -0.3295, +0.2809, -0.2567, -0.1624, -0.4012,
+0.4326, -0.3862, -0.6991, +0.1055, +0.7961, +0.0076, -0.2748,
-0.3902, +0.2232, +0.1281, -0.3812, +0.3097, -0.4672, -0.3840,
-0.6258, +0.5342, +0.2299, -0.6102, -0.1629, +0.4811, +0.4830,
+0.4507, +0.2669, -0.6992, +0.2315, +0.2384, +0.2729, +1.0647,
-0.2825, -0.3955, -0.0508, -0.4677, +0.1125, +0.2005, -2.1221,
-0.4147, +0.7121, +0.0425, +0.1868, +0.1919, +0.2679, -0.3342,
-0.0763, +0.1073
],
[
-0.0654, -0.1073, -0.5421, -0.2829, +0.6467, +0.2590, -0.3870,
-0.8888, -0.1084, +0.5274, +0.1781, -0.0704, -0.4214, +0.2349,
+0.2373, -0.5531, -0.2668, -0.6312, -0.2647, +0.0704, -0.4293,
-0.7401, +0.3813, +0.1831, +0.1679, -0.4934, -0.0593, -0.3502,
-0.2770, -0.0854, +0.0259, -0.1256, -0.5697, -0.1773, -0.3519,
+0.4235, -0.0044, +0.5315, +0.2296, +0.3223, -0.2340, +0.3053,
-0.5114, +0.1810, +0.1124, +0.3993, +0.4845, +0.2141, -0.3139,
-0.0677, +0.3592, +0.1048, -1.2924, -1.0374, +0.8679, +0.5891,
+0.3809, -0.3269, +0.0119, -0.1475, -0.0078, +0.1639, +0.4512,
-1.1933, -0.1273, -0.2329, +0.1390, +0.0104, -0.0103, +0.2769,
+0.0931, -0.2808, +0.2999, -0.0588, -0.3744, -0.3424, -0.2338,
-0.2979, -0.3278, +0.3907, +0.2448, +0.0403, +0.1301, +0.0081,
-0.6344, -0.4866, -0.1352, -0.8317, -0.6158, +0.5084, -0.7984,
+0.3205, +0.1955, -0.3610, +0.1183, +0.1329, -0.3594, +0.3290,
-1.1694, -0.2003, +0.0781, -0.0032, +0.4619, -0.1056, -0.7916,
-0.5779, +0.6348, -0.0596, +0.4251, -0.5653, -0.1052, +0.1622,
-0.4602, -0.6598, +0.1516, +0.2682, +0.4577, +0.1530, -0.2461,
+0.0172, -1.3133, -0.0957, -0.1072, -1.1757, +0.2277, +0.2722,
-0.0490, +0.1136
],
[
-0.2706, -1.2512, +0.2696, -1.0084, +0.3117, -0.5319, -0.3465,
-0.0488, +1.0287, +0.1097, -0.3625, -0.0389, -0.2479, -0.3738,
+0.3461, -1.6060, +0.2601, -0.4393, -0.7228, -0.2388, +0.2520,
-0.0927, -0.0280, -0.1029, +0.3711, -0.3085, +0.0205, -0.1936,
-0.9044, -0.0753, -0.4208, -1.5043, +0.0190, -0.2815, -0.1377,
-0.6180, -0.1372, -0.1862, -1.2507, +0.0372, +0.3842, -0.6174,
+0.4050, +0.6584, -0.4990, -0.1768, -0.1160, -0.8179, -0.2810,
-1.1368, -0.0725, +0.2794, -1.3444, -0.7596, -0.7320, +0.0897,
+0.4123, +0.5142, -0.0750, +0.1239, +0.1268, +0.1203, +0.4617,
+0.0976, +0.1864, -0.0440, +0.0886, +0.1883, -0.5202, +0.3451,
-0.3070, +0.2383, -0.0104, +0.2111, +0.3925, -0.7099, +0.3812,
+0.3015, +0.1315, -1.3199, +0.4941, +0.1706, -0.2051, -0.8561,
+0.1534, -0.1699, +0.0855, -0.2844, +0.1083, -0.0424, -0.8068,
+0.3341, +0.2707, -0.7211, -0.5140, +0.1177, +0.1067, +0.2696,
-0.7588, +0.5373, -0.0774, +0.0413, -0.2777, -0.1816, +0.0809,
-0.3440, -0.0609, +0.1262, -0.7086, +0.0860, -0.6973, +0.0434,
-0.0279, -0.7123, -0.2715, +0.0866, -1.0773, -0.6588, -0.5664,
-0.0048, +0.1432, -0.4712, +0.9000, -0.7647, +0.3333, -0.4572,
-0.0580, -0.3291
],
[
-0.1489, +0.5402, +0.6031, -0.2599, +0.3752, +0.4728, +0.1931,
+0.0208, +0.1361, -0.7378, -0.7793, -0.4287, -0.3272, -0.2295,
+0.1028, +0.1304, -1.4411, -0.3225, -0.1984, -0.0057, +0.3592,
-0.0134, +0.3944, -0.0920, +0.3960, +0.0392, -0.5262, +0.3807,
-0.6193, -0.8345, -0.7070, -0.1297, -0.0611, +0.0396, +0.0453,
+0.6339, +0.5192, +0.4388, +0.4164, -0.2803, +0.1011, -0.2051,
-0.0662, +0.1686, -0.0033, -0.2948, +0.4392, +0.3964, +0.0259,
+0.1745, -0.0610, +0.0383, -0.4387, -0.7948, +0.0140, -0.3424,
+0.6568, -0.3117, -0.8600, +0.0046, -0.7629, -0.2124, -0.1618,
-0.0903, -0.0160, -0.1507, -0.3717, +0.4397, -0.5954, +0.0672,
+0.0663, -0.1411, -0.1637, +0.6175, -0.1576, -0.3223, -0.1278,
-0.3660, +0.1637, +0.7346, +0.4341, +0.3055, +0.1026, +0.0186,
+0.0550, +0.2911, -1.2892, -1.2114, -0.1807, -0.0828, -0.2480,
-0.2462, -0.2581, -0.7757, +0.2902, +0.0266, -0.2626, -1.7307,
-0.3781, -0.1001, +0.3482, -0.8434, +0.2130, +0.0520, -1.4175,
+0.1837, -0.2961, -1.0314, -0.7599, +0.3381, +0.6051, +0.5653,
+0.0906, -0.1182, -0.0302, -0.3867, -1.0201, -0.2550, -0.3505,
+0.4122, -0.0611, -0.1809, -1.3066, -0.4740, +0.0075, -0.2133,
+0.0619, -0.1811
],
[
+0.4725, -0.6903, +0.1069, -0.2764, -0.4829, +0.1424, -0.4064,
-0.7198, +0.0647, -0.1901, +0.5348, -0.2738, -0.2990, -0.3179,
-0.0097, -1.5312, +0.5687, +0.1389, -0.4395, -0.3930, +0.5095,
-0.1703, -0.1827, +0.0805, +0.4156, +0.3704, -0.1195, -0.1711,
+0.2741, +0.3656, -0.2913, -0.8413, +0.1128, -0.0996, +0.5234,
+0.1482, +0.4244, -0.4038, -0.2496, -0.0959, +0.0538, -0.4928,
-0.3236, -0.3246, +0.2523, +0.0080, +0.1540, -0.5401, -0.4358,
+0.5393, -0.7050, +0.5095, -0.1597, -0.6963, -0.5342, -0.4408,
+1.3109, -0.1664, -0.3779, -0.9932, +0.2030, +0.9412, -0.3152,
+0.1530, +0.2447, -0.4332, -0.1578, -0.9264, -1.2485, +0.1622,
+0.5076, -0.6892, -0.0615, +0.1607, -1.5503, +0.0253, +0.6621,
+0.5765, -0.7294, -0.3851, -0.2190, -0.7339, -0.7382, -0.1852,
+0.1626, -1.1072, -0.3674, -0.7856, -0.0389, -0.0732, -1.1486,
-0.4915, +0.3573, -0.0292, +0.4741, +0.2513, +0.5534, +0.3612,
-0.8750, -0.8013, +0.4614, -1.1983, -0.0763, +0.5898, +0.5711,
+0.2504, -0.4979, +0.0021, -0.4765, +0.0824, -0.1976, -0.4295,
+0.3984, -1.3600, -0.1005, -0.4039, +0.1267, +0.2414, -0.3044,
-0.9220, -0.8115, +0.2719, +0.0273, +0.2470, -0.3792, -0.3335,
+0.0905, +0.5115
],
[
+0.2076, +0.0608, +0.1159, -0.2067, -0.4900, -0.1953, -0.1437,
+0.4170, -0.0335, -0.0583, +0.0487, -0.3439, -0.5721, +0.3095,
+0.1239, +0.1339, +0.3199, +0.2520, -0.8602, -0.1184, +0.3002,
+0.1484, -0.0931, -0.2876, -0.4837, -0.2327, -0.3427, -0.1833,
-0.2469, +0.2013, +0.3121, +0.2055, +0.2293, -0.5355, +0.1646,
-0.0459, +0.1082, -0.1895, -0.2310, -1.0031, +0.2505, -0.0454,
-0.3693, -0.1980, +0.6906, -0.0510, -0.2170, +0.3851, +0.3530,
-0.2247, -0.1184, -0.1441, +0.6583, +0.1746, +0.1381, +0.3182,
+0.1797, +0.4545, -0.3327, +0.3556, -0.4037, -0.6595, +0.2673,
-0.0412, +0.4172, +0.3935, -0.0065, +0.0560, +0.2428, +0.3260,
-0.6168, -0.0381, -0.6579, -0.2867, -0.1551, +0.3825, -1.0231,
-0.0084, +0.1505, -0.2083, -0.3050, +0.3750, -0.3108, -0.4509,
-0.0301, +0.2131, -0.3729, +0.3262, -0.0628, -0.2456, -0.1833,
-0.1106, +0.2168, -0.3768, +0.2713, +0.5294, -0.2034, +0.0242,
-0.6079, -0.1576, +0.2150, +0.2100, +0.4975, +0.1428, -0.1845,
+0.3099, +0.2005, +0.0964, -0.2774, -0.1429, -0.2301, -0.0242,
+0.3992, -0.2053, +0.1651, -0.0676, -0.0708, -0.1506, -0.5444,
+0.2067, -0.1700, -0.2707, -0.1692, +0.1861, -0.0041, -0.0532,
+0.0183, +0.2384
],
[
-0.6375, +0.3651, +0.2436, +0.1752, -0.1460, -0.0626, +0.4601,
+0.0446, -0.1151, +0.3228, -0.1859, +0.1703, +0.2150, +0.0290,
-0.0694, +1.0209, +0.8811, +0.0272, -0.0669, +0.1925, +0.0989,
-0.8075, +0.1572, -0.3682, -0.0514, -0.0063, +0.2538, -0.4736,
-0.7416, -0.1268, +0.4847, -0.3042, -0.3548, -0.4044, +0.0744,
-0.0751, -0.0986, +0.1080, +0.1435, -0.8579, +0.0235, -0.4297,
+0.2014, -0.0363, +0.1568, -0.1170, +0.3229, +0.4236, +0.2343,
-0.0142, +0.1517, +0.3264, -0.2739, -0.1967, -0.8886, -0.0304,
+0.4113, +0.2752, -0.2778, -0.3894, -0.3228, -0.1496, +0.3176,
-0.0606, +0.6366, +0.0869, -0.4706, -0.0623, +0.4748, -0.4167,
-0.2650, -0.1523, -0.0372, -0.5838, -0.3944, -0.1141, -0.2120,
-0.1428, -0.8031, +0.7924, +0.0753, +0.2618, -0.6083, +0.0661,
+0.0275, +0.0300, +0.2161, +0.7156, +0.1853, +0.0142, +0.7981,
+0.1379, -0.1876, +0.3035, +0.0018, +1.0314, +0.0758, -0.1061,
+0.1913, -0.1958, -0.0590, -0.5442, +0.6021, +0.3196, +0.2609,
+0.1737, -0.0722, -0.2525, -0.4437, -0.0467, +0.2741, -0.0649,
-0.1994, -0.2601, -1.0840, -0.0605, +0.2686, -0.1046, +0.4438,
-0.5226, +0.1733, -0.4228, -0.5853, +0.3934, -0.0476, -0.3616,
-0.2780, -0.1258
],
[
+0.6867, +0.4724, +0.3292, -1.1454, -0.2058, +0.4164, +0.2246,
+0.0381, -0.5152, +0.8548, +0.4807, +0.2806, -0.2856, -0.3538,
+0.2281, +0.9878, -0.2247, +0.6616, -0.1019, +0.3186, -0.6905,
+0.0708, +0.5357, -0.1032, -0.7422, -1.2644, -0.4052, +0.6192,
-0.1329, -0.2649, +0.5382, +0.4912, -1.2289, +0.0261, -0.3315,
+0.0534, -0.5083, -0.1222, +0.4136, -0.4186, -0.6623, +0.0077,
-0.1184, -0.6798, -0.4292, -1.3118, +0.0811, -0.0773, +0.3190,
-0.6695, +0.5506, +0.6003, -0.5128, -0.4308, -0.2475, +0.8333,
-0.7603, +0.1938, +0.3206, +0.1851, -0.0911, -0.2287, +0.6717,
-0.0226, -0.6560, -0.3719, -0.4950, +0.2135, +0.1312, -0.1219,
+0.2059, +0.4192, +0.5512, +0.4458, +0.0162, -0.1117, +0.2476,
+0.3416, +0.1183, +0.1549, -0.2135, -0.6936, +0.3073, +0.3932,
-0.3390, +0.8693, +0.3992, +0.5370, +0.5329, +0.6200, +0.2953,
+0.0928, +0.3588, -1.3759, +0.1482, -0.0664, +0.3160, +0.4801,
+0.4890, +0.3038, -0.6116, +0.1161, -0.3446, -0.3424, -0.1681,
+0.1677, -0.6294, -0.9320, -0.4076, +0.2937, +0.6234, +0.0829,
-0.8659, -0.2892, -0.2668, -0.7057, +0.5845, +0.2654, -0.6165,
+0.0247, -0.2863, +1.0240, +0.0559, -0.2702, +0.2465, -0.3120,
-0.4070, +0.0242
]])
weights_dense2_b = np.array([
+0.0589, +0.2417, -0.0986, +0.1092, +0.0381, +0.2114, +0.2297, +0.1468,
-0.1559, -0.1759, +0.0344, +0.3008, +0.3244, +0.3597, +0.1691, +0.0277,
+0.0064, +0.2372, +0.0961, +0.3293, +0.2077, +0.1230, +0.1195, +0.4294,
+0.2816, +0.1169, +0.4083, +0.0309, +0.0433, +0.4852, +0.1903, +0.1055,
+0.2894, +0.0468, +0.0669, +0.1655, -0.0712, +0.2791, +0.3653, +0.1661,
+0.0730, +0.4481, +0.2336, +0.4547, +0.3048, +0.3674, +0.1072, +0.2519,
+0.0875, +0.1864, +0.0793, +0.1467, +0.1665, -0.0635, +0.2464, +0.2529,
+0.2301, +0.3551, +0.1974, +0.5505, +0.1776, +0.0702, -0.0460, +0.1697,
-0.0007, +0.0430, +0.2190, +0.3566, +0.2163, +0.2470, +0.1928, -0.0373,
+0.3103, +0.1705, +0.0386, +0.1008, +0.0483, -0.0530, +0.0899, +0.1354,
+0.2905, +0.0686, +0.2512, +0.4373, +0.2106, +0.2685, +0.2320, +0.0750,
+0.4447, +0.2390, -0.0136, +0.0224, +0.1565, -0.0579, -0.0142, +0.0710,
+0.2252, +0.1527, +0.3315, +0.1419, +0.3483, +0.3999, +0.2818, +0.1021,
+0.2659, +0.2742, +0.0814, +0.0849, -0.0068, +0.1339, +0.1216, +0.0148,
+0.0219, -0.0728, +0.2957, +0.0854, +0.0055, -0.0289, +0.0866, +0.0203,
+0.0353, +0.2069, +0.3297, +0.1171, +0.0607, +0.1667, +0.5721, -0.0218
])
weights_final_w = np.array(
[[
+0.1312, +0.2606, +0.0873, -0.2167, -0.0211, -0.2425, -0.2365, +0.0256,
-0.0525, -0.1167, +0.0605, +0.1389, -0.0801, -0.2973, +0.1066, +0.0129,
+0.0549
],
[
+0.0107, +0.1381, +0.0126, -0.0227, +0.0100, -0.1605, -0.4347,
-0.2622, -0.3952, +0.1417, -0.0394, -0.0345, -0.1321, +0.0973,
+0.0614, -0.1857, +0.0101
],
[
+0.0489, +0.0031, -0.2181, +0.2105, +0.2968, -0.2446, +0.0639,
-0.2621, -0.0567, +0.0232, +0.1381, +0.2903, -0.1861, -0.4347,
+0.5889, +0.1608, +0.3169
],
[
+0.2935, +0.0971, -0.1493, +0.0123, -0.3232, +0.1984, -0.1904,
+0.0674, -0.1251, -0.1421, -0.2297, +0.1642, +0.4883, +0.1015,
+0.0046, -0.1664, -0.1933
],
[
+0.0655, -0.0136, +0.1110, -0.0768, +0.0236, -0.0335, +0.2260,
-0.4519, -0.0795, -0.3457, +0.0667, -0.3201, +0.3744, -0.0273,
+0.2743, +0.0971, -0.1144
],
[
-0.3984, +0.0525, -0.0927, -0.1104, +0.0242, +0.2294, -0.0730,
-0.2585, -0.1027, -0.1138, -0.0855, +0.2366, -0.1895, -0.1304,
-0.4098, +0.2590, -0.0564
],
[
-0.0326, +0.0244, +0.3413, +0.2870, +0.1252, -0.0921, +0.4718,
+0.1316, -0.0767, -0.1213, -0.0975, +0.0160, -0.0676, +0.0719,
+0.3433, +0.1957, +0.2411
],
[
+0.1805, -0.0909, +0.1841, +0.0092, +0.0334, -0.0274, -0.1892,
-0.1136, -0.1936, -0.0776, +0.0088, -0.5994, -0.0894, +0.0054,
+0.0222, -0.5486, +0.0794
],
[
+0.1231, -0.1501, +0.1568, -0.0432, +0.2135, -0.0308, -0.1665,
+0.1327, -0.0055, +0.0054, -0.2374, -0.1399, +0.5330, +0.3818,
+0.0111, +0.3729, -0.3116
],
[
+0.0443, +0.0425, -0.2333, -0.0758, +0.0445, -0.0364, -0.1385,
+0.4477, +0.2164, +0.0454, -0.1320, -0.3380, +0.5406, +0.0488,
-0.2379, +0.1216, -0.0559
],
[
-0.4501, +0.1921, +0.0992, -0.2644, +0.0216, -0.0723, +0.0213,
-0.1043, -0.0775, +0.2790, -0.2341, +0.0725, +0.0472, -0.0021,
-0.1217, -0.1285, -0.2625
],
[
-0.0878, -0.1506, -0.2460, +0.1033, -0.0043, -0.1535, -0.0529,
+0.1674, +0.0078, +0.3391, +0.8355, +0.0874, -0.0233, +0.1923,
+0.0359, +0.1081, +0.1508
],
[
-0.3796, +0.0832, -0.2270, +0.1005, -0.1703, +0.0832, +0.0064,
+0.1643, +0.0637, -0.1930, +0.0365, -0.0470, +0.1059, -0.1114,
+0.1208, -0.3628, +0.2056
],
[
+0.2360, +0.0879, -0.1799, -0.0360, +0.1466, -0.3555, +0.0957,
-0.0095, +0.2665, +0.5403, -0.5690, -0.0800, -0.3047, +0.1044,
+0.0053, -0.0098, -0.2020
],
[
-0.1021, -0.0576, +0.5277, +0.3862, -0.2923, -0.1565, +0.3422,
+0.2636, +0.1655, +0.1700, +0.1921, -0.6448, -0.2882, +0.2176,
-0.2975, +0.4688, +0.0548
],
[
+0.1350, -0.0520, -0.0243, -0.1268, +0.2160, +0.0838, -0.2554,
+0.1928, -0.0042, -0.0117, +0.3009, -0.3786, -0.3108, -0.0581,
+0.2374, +0.2102, +0.0998
],
[
+0.1258, +0.1091, +0.3406, +0.3366, +0.2269, -0.1391, -0.1218,
+0.1913, -0.0044, +0.1850, +0.2097, -0.2372, -0.2832, -0.0987,
+0.1170, +0.0516, -0.1875
],
[
-0.0902, +0.2603, +0.3307, +0.1615, -0.0565, -0.3079, +0.0315,
+0.0874, +0.0755, +0.7551, -0.1873, +0.2390, -0.1559, +0.0447,
-0.0154, +0.0343, +0.1431
],
[
-0.1310, -0.0000, +0.1157, -0.4601, -0.2381, +0.1276, -0.1355,
+0.0909, -0.2596, +0.4611, +0.5581, +0.1119, -0.2449, -0.1039,
+0.1356, -0.1378, +0.1854
],
[
-0.2234, +0.1488, -0.0868, -0.0221, -0.2624, +0.5707, -0.4488,
-0.0552, +0.1251, +0.0740, +0.0574, +0.3261, +0.0011, -0.0559,
+0.0850, +0.1478, -0.0575
],
[
-0.1547, -0.2551, -0.2269, -0.0155, +0.2671, +0.2239, +0.0401,
-0.0152, -0.2506, -0.0510, -0.0408, -0.1499, -0.3132, -0.0916,
+0.0558, -0.1317, -0.1131
],
[
-0.3986, +0.0452, +0.0059, +0.0143, +0.3271, -0.2273, -0.1940,
+0.1802, -0.0765, -0.3095, +0.0365, +0.0157, +0.1850, -0.2821,
+0.0505, -0.0141, +0.0334
],
[
-0.1405, -0.1715, +0.2472, +0.0586, +0.2633, +0.0251, +0.0151,
+0.5158, -0.0045, -0.0354, +0.3007, +0.3528, +0.1331, -0.0457,
-0.1532, +0.1718, -0.0002
],
[
-0.2526, +0.0306, -0.2338, +0.0557, -0.0416, -0.2174, +0.2027,
+0.1293, +0.0471, -0.4165, -0.0245, +0.0312, -0.2050, +0.2281,
+0.0194, -0.0756, -0.1017
],
[
+0.2395, -0.0035, -0.0206, +0.2011, -0.0853, +0.1665, -0.2279,
-0.2057, +0.0230, -0.2693, -0.0173, -0.0095, +0.1317, -0.1597,
+0.3547, +0.0186, +0.1017
],
[
-0.1807, +0.0229, +0.1314, -0.0384, +0.1618, +0.1776, +0.2824,
+0.1815, -0.0702, +0.1897, +0.1154, -0.1015, +0.3698, +0.0610,
+0.2967, -0.1962, +0.0017
],
[
-0.0074, +0.2953, -0.2820, +0.1022, +0.3167, +0.4200, -0.0643,
+0.1692, +0.1454, -0.4422, -0.0188, +0.0325, -0.0591, +0.0319,
+0.0870, -0.1527, -0.1347
],
[
-0.2557, +0.0643, -0.0250, -0.0358, -0.2493, -0.1554, +0.1479,
+0.2026, -0.3371, -0.1351, +0.1498, +0.0373, +0.4199, +0.0188,
-0.2702, +0.0521, +0.0950
],
[
+0.1081, -0.4271, +0.0690, -0.3374, -0.0935, -0.1386, -0.0240,
-0.0407, +0.4000, +0.1332, -0.2086, +0.7726, +0.0442, +0.0718,
-0.2187, -0.1867, -0.0762
],
[
-0.2350, -0.1500, +0.1550, +0.1218, +0.0389, +0.1064, -0.0495,
+0.1717, -0.0759, -0.2376, -0.2130, -0.0091, -0.0250, -0.2824,
+0.0911, -0.1364, -0.1061
],
[
-0.1212, +0.4204, -0.0501, +0.0208, -0.2520, +0.2076, +0.0444,
+0.1458, -0.0982, -0.0022, +0.2173, +0.2447, -0.0222, -0.0018,
-0.0566, +0.2430, -0.0905
],
[
+0.1643, -0.2400, -0.1019, -0.1235, +0.2953, +0.1080, -0.0416,
+0.0763, -0.1258, -0.1884, -0.3330, +0.2187, -0.2763, -0.1867,
-0.2056, -0.2271, +0.1966
],
[
-0.4632, +0.1782, +0.2223, -0.3054, -0.0924, -0.2057, +0.0089,
+0.2455, +0.3828, +0.1087, -0.1154, +0.0237, -0.3203, -0.0789,
+0.0756, +0.1260, +0.0117
],
[
-0.0527, -0.1818, +0.0755, +0.0768, -0.0594, +0.1384, +0.0534,
-0.5923, +0.4133, +0.1154, -0.2839, -0.0294, +0.0174, +0.1350,
+0.1564, +0.1622, -0.0221
],
[
-0.1845, -0.0040, +0.0629, +0.0127, +0.2480, +0.3868, -0.1440,
+0.2099, +0.0125, -0.1419, -0.0316, -0.1121, -0.1147, +0.1579,
-0.1398, -0.3699, -0.2407
],
[
-0.3024, -0.4112, +0.3517, -0.1744, +0.0442, +0.1175, +0.1120,
+0.0313, -0.1000, -0.1217, -0.3270, +0.1557, +0.0458, +0.0634,
+0.1686, +0.0292, +0.0342
],
[
+0.0055, +0.0122, +0.0775, +0.0206, -0.2969, +0.1358, -0.2017,
-0.0757, -0.3607, -0.2885, -0.3357, +0.0148, -0.0613, -0.3099,
-0.2110, +0.4725, -0.0229
],
[
-0.0870, +0.2184, -0.0565, +0.1654, -0.1285, +1.1648, +0.5389,
-0.2226, -0.2618, -0.4095, -0.3999, -0.1014, +0.0956, +0.1682,
+0.0492, +0.2601, +0.0474
],
[
-0.2767, +0.0553, +0.0306, +0.0024, -0.0094, +0.1389, -0.1430,
+0.0036, +0.4679, +0.1425, -0.2145, -0.0646, +0.0081, -0.0832,
-0.1137, -0.0659, +0.1728
],
[
-0.3456, +0.2478, -0.0933, -0.2658, +0.0159, +0.0989, -0.1268,
-0.1793, -0.2511, -0.1328, -0.2603, -0.1707, +0.1464, +0.2335,
-0.1048, -0.3861, +0.3799
],
[
+0.0906, -0.2603, +0.1179, +0.1688, -0.2237, -0.0822, +0.0288,
+0.2402, +0.0442, +0.1249, -0.2379, +0.0698, -0.0972, -0.0560,
-0.1527, +0.0879, -0.0982
],
[
+0.1656, -0.1126, -0.3280, -0.1454, -0.1580, -0.2798, +0.0980,
+0.1962, +0.0373, -0.3712, +0.6412, -0.0309, +0.0066, +0.0889,
-0.0277, -0.3184, +0.1719
],
[
-0.3050, +0.1695, -0.2984, +0.3151, -0.1415, +0.0532, +0.0595,
+0.2024, +0.1260, +0.1307, -0.0846, -0.0340, +0.0356, +0.2450,
-0.0064, +0.3211, +0.1223
],
[
+0.2064, +0.0314, -0.7182, -0.0014, +0.4533, -0.0736, -0.4364,
+0.1237, +0.1503, +0.1236, -0.0738, +0.2819, +0.2560, -0.1169,
+0.1758, -0.4164, +0.0911
],
[
+0.1323, +0.0817, -0.0994, -0.3297, +0.0779, -0.0238, +0.1504,
+0.0698, +0.1012, +0.1071, -0.5144, -0.1897, -0.1890, -0.0724,
-0.0617, -0.0274, +0.2011
],
[
-0.0281, -0.2267, -0.3032, +0.2410, +0.0516, -0.2491, -0.3050,
-0.1812, +0.2029, +0.3053, +0.4205, -0.2745, -0.0210, -0.0765,
-0.0071, -0.1102, -0.1442
],
[
+0.0010, -0.0779, +0.3468, +0.4334, +0.1432, +0.1585, -0.1131,
-0.0239, -0.0272, -0.0349, +0.0364, +0.2644, -0.3331, +0.1804,
-0.5429, -0.2642, +0.2148
],
[
-0.1210, +0.6697, -0.0792, -0.2096, -0.0938, +0.1053, -0.3676,
+0.1888, +0.0033, +0.0138, -0.1001, +0.0124, -0.4953, -0.2625,
+0.4315, +0.1741, +0.0748
],
[
+0.3386, -0.0625, +0.2074, -0.0791, +0.4092, -0.1314, +0.0619,
+0.0787, -0.0088, +0.0886, -0.1326, +0.4888, -0.1590, -0.0694,
+0.3429, -0.1274, +0.0160
],
[
-0.0111, -0.2156, +0.1449, +0.2451, +0.1138, +0.0158, -0.5589,
-0.2248, -0.1329, -0.4511, +0.1869, -0.0745, -0.3610, +0.1168,
+0.1185, +0.3343, -0.0786
],
[
-0.1221, -0.4784, -0.2020, -0.1611, -0.0048, +0.1330, -0.3143,
+0.0581, -0.0509, -0.4198, +0.2756, +0.1595, +0.3195, +0.0522,
-0.0918, +0.1170, -0.0423
],
[
+0.1040, +0.0353, -0.3103, +0.0942, +0.0581, +0.0454, -0.1175,
-0.0464, +0.0016, +0.3844, +0.4261, -0.2159, +0.0670, -0.0158,
-0.1561, +0.2365, -0.1176
],
[
-0.1606, -0.4301, +0.0624, -0.4848, +0.0412, -0.3168, -0.0772,
-0.0698, -0.2040, +0.3138, -0.2606, -0.0569, +0.1970, +0.1635,
+0.1485, -0.0893, -0.0093
],
[
-0.0691, +0.7319, -0.1740, +0.1397, +0.0422, +0.1841, +0.0551,
-0.1779, +0.0263, +0.2575, -0.1547, -0.0804, -0.0104, +0.1240,
+0.0396, +0.5363, -0.0852
],
[
-0.0874, +0.0740, -0.2354, +0.0765, +0.2228, -0.2960, -0.2207,
-0.1744, -0.0959, +0.2325, -0.5918, +0.0846, -0.0356, +0.0450,
-0.2203, +0.1262, -0.1228
],
[
-0.3480, +0.1078, +0.0365, +0.4309, -0.1477, +0.0616, -0.0769,
-0.0193, +0.0070, -0.1749, +0.2338, +0.0302, +0.2317, +0.0679,
+0.0253, +0.2287, -0.0449
],
[
-0.0917, -0.1148, -0.5216, +0.5428, +0.0322, -0.0604, -0.1308,
+0.1717, +0.2328, +0.1074, +0.0431, -0.3477, -0.1743, -0.0226,
+0.3206, +0.1525, +0.0347
],
[
+0.1605, +0.0369, +0.2022, +0.0722, -0.2906, +0.3964, +0.7451,
+0.0630, +0.0663, -0.0819, +0.0415, +0.1646, -0.1499, -0.1255,
+0.1425, -0.1912, -0.0421
],
[
+0.0196, +0.0653, -0.1871, -0.3252, -0.4638, +0.4087, -0.2621,
-0.7069, +0.1975, -0.3065, +0.2014, -0.0236, -0.0719, +0.2058,
-0.0210, -0.1895, -0.0760
],
[
+0.1401, +0.2856, -0.0084, -0.4474, -0.1189, +0.1954, +0.1608,
-0.1745, -0.4177, -0.3583, -0.2078, +0.0498, -0.1714, -0.0160,
-0.0649, -0.1105, +0.1325
],
[
+0.0479, +0.2704, -0.0470, +0.3404, -0.1584, -0.0478, -0.0354,
-0.0816, +0.3430, +0.1074, +0.2332, -0.0058, -0.2567, -0.0425,
+0.1168, -0.3646, -0.0250
],
[
+0.4094, -0.5041, +0.2346, -0.1907, +0.2669, -0.2752, -0.1550,
-0.1642, +0.3191, -0.0082, +0.0502, -0.2870, -0.1683, -0.0697,
-0.3527, +0.3189, +0.1332
],
[
-0.2716, -0.0033, -0.2637, +0.0069, -0.2129, +0.2672, +0.0694,
-0.0430, +0.0637, -0.0555, -0.3187, +0.2670, -0.0308, +0.1458,
+0.2477, -0.2574, +0.0914
],
[
+0.2928, -0.0535, -0.0691, -0.0436, +0.0613, -0.0920, -0.1360,
+0.5571, +0.0624, +0.0410, -0.1896, +0.4296, +0.0266, +0.2580,
-0.2728, +0.0482, +0.0613
],
[
+0.0488, +0.2622, +0.1998, +0.2080, +0.1624, +0.2532, -0.2194,
+0.4212, -0.2253, +0.1078, -0.1612, +0.0316, -0.1959, +0.0955,
+0.1069, +0.2729, +0.0990
],
[
+0.2588, +0.0617, -0.0078, -0.3274, +0.1988, +0.0345, -0.0577,
+0.0777, +0.1154, +0.7071, -0.0287, +0.1041, +0.2812, -0.0966,
+0.1488, +0.1547, -0.0200
],
[
+0.1464, +0.1786, +0.1756, -0.0633, -0.1991, +0.0583, +0.3696,
-0.0610, -0.2088, +0.6352, -0.2822, -0.0619, +0.0712, +0.0683,
+0.0591, -0.1966, +0.0573
],
[
-0.0289, +0.5961, +0.2250, +0.1727, -0.1021, +0.0805, -0.6852,
-0.1022, +0.4412, -0.1102, -0.0573, +0.0108, +0.4795, -0.1854,
+0.1619, -0.3180, +0.0826
],
[
-0.1803, +0.0848, -0.1195, +0.2291, +0.0999, +0.1137, -0.2016,
+0.5933, -0.0431, +0.0807, -0.1362, -0.2669, -0.2680, +0.0616,
+0.3381, -0.1365, -0.0133
],
[
-0.0019, +0.1816, -0.2680, +0.0490, -0.1431, -0.0349, +0.1846,
+0.1512, -0.1755, +0.2003, -0.2616, -0.1497, +0.0726, -0.0063,
-0.2029, -0.3561, -0.1127
],
[
-0.0543, -0.0786, -0.1657, +0.3412, +0.2236, -0.0256, +0.1423,
-0.2476, -0.0579, -0.0400, +0.3387, +0.1397, +0.0693, -0.0978,
-0.0721, +0.0494, -0.1215
],
[
+0.3175, -0.0093, -0.0200, -0.1039, +0.1993, -0.2639, +0.2093,
-0.2860, -0.0647, +0.0382, -0.1736, +0.1920, +0.0989, -0.0879,
+0.3887, +0.1490, -0.2475
],
[
+0.1223, -0.0573, -0.1927, +0.0222, -0.2032, -0.1156, +0.1203,
+0.1252, +0.3434, -0.0309, +0.1683, -0.0745, -0.2512, -0.0162,
-0.2847, +0.0018, +0.2597
],
[
-0.2100, -0.0401, +0.3119, +0.0271, +0.0367, -0.2558, +0.3757,
+0.2703, -0.1604, -0.1646, -0.0853, -0.1255, +0.1305, +0.0554,
-0.0657, +0.3253, -0.2175
],
[
-0.1669, -0.1024, +0.3036, +0.1949, -0.4268, +0.2212, +0.2057,
-0.1204, +0.0223, -0.0924, +0.0152, -0.2183, +0.0476, -0.0045,
-0.1289, +0.0776, +0.1919
],
[
-0.1015, +0.1058, +0.3162, +0.2942, +0.2410, +0.1193, +0.1045,
-0.1116, -0.0550, +0.0776, -0.2475, +0.2624, +0.0536, -0.0720,
+0.1367, -0.1158, +0.1027
],
[
-0.3849, +0.3457, +0.0135, +0.0063, +0.2045, -0.0144, -0.0950,
+0.1271, +0.4600, -0.0311, +0.1044, -0.2487, +0.1230, +0.0327,
-0.3978, -0.3512, +0.2497
],
[
+0.2812, -0.0669, +0.1645, -0.0291, -0.0164, -0.1144, +0.2963,
-0.0595, -0.1484, +0.1771, -0.0079, -0.5441, +0.0339, +0.3036,
-0.2279, +0.1066, -0.2468
],
[
-0.0718, +0.5449, -0.0494, +0.1283, +0.2544, +0.1221, +0.0988,
+0.0028, +0.0135, +0.0904, +0.1348, +0.1037, -0.0058, +0.0007,
-0.0455, +0.1696, +0.1674
],
[
-0.2138, -0.3133, -0.3800, +0.0846, -0.0372, -0.2185, +0.0951,
+0.2048, +0.1555, +0.1266, +0.0414, +0.2256, +0.0326, -0.0332,
-0.0807, -0.3547, +0.2416
],
[
-0.0868, -0.0794, -0.2556, -0.3129, +0.0309, +0.1684, +0.3753,
+0.1522, +0.2974, -0.2167, +0.0158, +0.2495, +0.0596, -0.1184,
+0.0521, +0.2815, +0.1270
],
[
+0.0900, -0.1678, -0.0648, -0.0243, -0.1684, -0.2439, +0.0659,
+0.3151, -0.4868, +0.0200, -0.0563, -0.0812, +0.2157, -0.1118,
-0.0546, +0.1121, +0.1330
],
[
-0.1735, -0.0359, +0.1421, -0.0054, +0.1613, -0.0438, +0.7337,
-0.2124, +0.0604, +0.0033, +0.1450, -0.0176, -0.2187, -0.0204,
-0.2695, +0.0197, -0.0461
],
[
+0.0981, +0.2768, +0.0884, +0.5310, +0.1594, +0.4027, +0.1326,
-0.3091, +0.3588, -0.3596, +0.2099, +0.1202, -0.2811, -0.2679,
-0.3697, -0.1143, +0.0364
],
[
+0.1433, -0.1962, +0.1004, -0.0014, -0.1924, -0.2953, +0.0410,
+0.3597, +0.2484, +0.0705, -0.1239, +0.1030, +0.2636, +0.1599,
+0.0982, -0.0888, +0.0597
],
[
-0.0233, +0.0115, -0.6757, +0.2189, -0.0165, -0.4398, +0.5602,
+0.1727, -0.3657, +0.4095, +0.1018, +0.1222, -0.0591, -0.0114,
+0.2174, +0.2068, -0.2059
],
[
+0.1465, +0.3763, +0.2525, -0.0040, +0.1222, +0.0591, -0.2716,
-0.3108, +0.3361, +0.2440, +0.1371, +0.1249, -0.1091, +0.2130,
+0.4761, -0.0394, -0.1550
],
[
+0.0480, -0.4882, -0.0725, -0.3144, -0.2882, -0.0517, -0.0909,
+0.1522, -0.0457, -0.1458, -0.2927, +0.0594, -0.4833, -0.4030,
+0.1000, +0.0829, -0.1583
],
[
+0.2517, -0.1086, +0.2060, +0.1727, +0.0902, -0.1455, +0.1913,
-0.3011, +0.4524, -0.2250, -0.3558, -0.3009, -0.0365, -0.0636,
+0.0852, +0.1678, -0.0045
],
[
-0.0800, +0.2266, -0.0954, +0.0206, +0.1473, +0.6583, -0.4648,
+0.1038, +0.1741, -0.3025, +0.0773, +0.1044, +0.0888, -0.2105,
+0.1827, +0.0543, -0.0055
],
[
-0.0999, +0.0095, -0.0853, +0.0084, -0.2591, -0.0105, -0.3086,
-0.4188, -0.1658, +0.4141, +0.4294, -0.0325, +0.3242, -0.2091,
-0.2607, +0.1492, +0.1504
],
[
+0.0317, -0.0307, +0.3815, +0.0595, -0.1011, -0.0057, -0.1609,
-0.5363, -0.1927, +0.0689, -0.0432, +0.1582, +0.1995, +0.0791,
-0.0799, -0.0426, -0.0398
],
[
+0.2850, +0.2052, -0.0389, -0.0705, +0.3961, +0.0547, +0.0385,
+0.2505, +0.0714, -0.0159, +0.0321, +0.0161, +0.1245, -0.1221,
-0.2063, +0.0359, +0.0904
],
[
+0.2388, -0.0879, +0.0303, -0.1298, -0.2066, +0.2349, -0.1669,
-0.0393, +0.0557, -0.0419, -0.0636, -0.3270, -0.1898, +0.1185,
-0.1003, +0.2182, -0.1358
],
[
+0.1651, -0.2028, -0.3384, -0.5319, +0.2690, +0.0798, +0.3677,
+0.2660, +0.1497, +0.1026, -0.1128, +0.3130, +0.2733, +0.1554,
-0.1325, -0.1619, -0.0860
],
[
+0.1536, +0.0465, -0.1248, -0.0063, -0.1992, +0.0119, +0.0328,
+0.1646, -0.3838, +0.1776, -0.1014, +0.3482, +0.0298, +0.0296,
+0.1838, +0.1373, -0.1523
],
[
+0.0442, +0.1129, -0.3831, -0.1119, +0.0817, +0.1744, +0.2670,
+0.0339, +0.1102, -0.1592, -0.1006, -0.4853, +0.2444, +0.0459,
-0.1019, -0.1361, -0.0604
],
[
+0.1310, -0.0497, +0.2974, +0.2772, -0.2771, -0.2194, -0.0129,
-0.0623, +0.0006, -0.0298, +0.1518, +0.0271, +0.1619, -0.1267,
-0.1727, -0.1988, -0.1210
],
[
+0.3556, -0.3053, -0.3899, +0.2106, -0.1380, -0.2564, -0.0534,
-0.3945, -0.0198, +0.0277, +0.1276, -0.0327, +0.4129, +0.1444,
-0.3541, +0.2025, -0.0395
],
[
+0.1271, -0.1985, +0.3796, +0.0934, +0.2144, +0.0129, -0.2250,
-0.2218, -0.0024, +0.2304, -0.2798, +0.0901, -0.2428, +0.2513,
+0.1104, +0.2521, -0.0836
],
[
-0.0790, +0.3823, -0.1396, -0.3351, -0.1272, -0.2597, +0.0591,
+0.1499, -0.2298, +0.6025, +0.7618, -0.2407, +0.0333, -0.0403,
-0.0925, -0.3186, -0.1176
],
[
-0.1944, -0.2272, +0.3309, -0.0803, +0.3230, -0.1632, +0.6171,
-0.0997, +0.3684, -0.1963, -0.0740, +0.1319, +0.2760, -0.2500,
-0.0141, -0.3381, +0.0988
],
[
+0.2057, -0.1338, -0.1396, +0.1905, -0.0804, +0.0064, -0.1375,
+0.0256, +0.1821, +0.1315, +0.0442, +0.0899, -0.0152, -0.0606,
+0.1378, -0.5944, +0.0902
],
[
+0.3549, -0.1397, -0.1932, +0.0118, +0.0244, +0.0824, +0.1821,
-0.0512, -0.2780, -0.0666, +0.0240, -0.1947, -0.1455, +0.0940,
+0.0609, -0.2599, -0.0573
],
[
+0.0848, +0.0130, -0.3703, +0.1482, -0.0567, -0.2191, +0.0100,
-0.0555, -0.1383, +0.2142, -0.2411, -0.1008, -0.0247, -0.0685,
-0.6440, +0.5464, -0.2950
],
[
+0.0558, +0.0309, +0.2263, +0.4103, +0.0554, +0.3598, +0.1403,
+0.0423, -0.2540, -0.2593, +0.1508, +0.1841, -0.2731, +0.0649,
+0.3406, -0.1161, +0.2166
],
[
+0.0934, -0.2574, +0.1155, -0.0240, +0.0258, +0.1470, -0.0135,
+0.4334, -0.0167, -0.2294, -0.1428, -0.3680, -0.2007, -0.0010,
+0.1405, -0.4045, -0.0752
],
[
+0.3774, -0.3684, -0.1185, +0.3444, -0.0005, -0.2114, +0.1939,
-0.3012, +0.1912, +0.1980, +0.0747, +0.3322, +0.1122, -0.2042,
-0.2741, +0.1889, -0.0508
],
[
+0.1951, +0.0460, -0.2167, -0.2370, +0.1496, -0.2761, +0.2644,
-0.0282, -0.0858, -0.0368, +0.4173, -0.0596, +0.3189, +0.3252,
+0.3012, +0.2653, -0.1094
],
[
-0.0618, -0.1060, -0.0063, -0.1816, -0.0928, +0.1112, +0.2248,
-0.0704, +0.1565, -0.1376, -0.1280, -0.1405, -0.4444, -0.0081,
-0.3809, -0.2760, +0.2826
],
[
-0.2150, -0.2761, -0.4387, +0.0652, -0.0250, -0.1408, -0.1410,
-0.0401, -0.2530, -0.1720, -0.1383, +0.0815, +0.1345, +0.1094,
+0.1165, -0.1125, -0.0680
],
[
+0.4100, -0.2501, -0.1091, -0.1421, -0.1276, -0.0343, -0.2467,
+0.5050, -0.1084, +0.2873, +0.2955, -0.0441, +0.0624, -0.3208,
-0.1248, -0.2148, +0.0624
],
[
+0.2167, -0.2099, +0.1943, -0.0190, +0.1348, +0.0457, -0.0756,
-0.1493, -0.2804, +0.0296, -0.1222, -0.2076, +0.1460, +0.0056,
+0.0618, +0.0620, +0.1078
],
[
-0.1699, +0.1200, +0.0562, -0.2747, +0.2608, -0.3566, -0.2460,
-0.2062, +0.5545, -0.0188, +0.3313, -0.1312, -0.7428, -0.5009,
-0.6418, -0.0003, +0.2460
],
[
-0.2792, +0.1021, -0.2467, -0.0577, -0.1567, +0.0156, +0.0434,
-0.2623, +0.0924, +0.0685, +0.2042, +0.0532, +0.1473, -0.1451,
-0.0592, -0.1645, +0.1258
],
[
+0.0919, -0.0480, +0.3094, +0.1917, +0.0822, +0.0892, +0.0365,
-0.0325, +0.1961, -0.2383, -0.0073, +0.0189, +0.2700, +0.2116,
-0.2724, -0.1682, -0.1288
],
[
+0.1483, +0.0807, +0.2281, -0.3101, -0.0014, +0.0484, -0.2612,
-0.0005, +0.0087, -0.1544, +0.1201, +0.1475, -0.1714, +0.0190,
-0.2971, -0.1113, -0.4718
],
[
-0.1700, -0.1545, +0.2127, -0.1944, +0.3472, +0.2332, +0.2231,
+0.0469, +0.2023, -0.0298, -0.0297, +0.0597, -0.3039, +0.0959,
+0.2502, +0.2281, -0.0078
],
[
+0.3397, +0.2010, +0.6118, +0.2780, -0.0135, +0.4304, -0.2720,
-0.6300, +0.3764, +0.1227, -0.0925, +0.1188, -0.5911, +0.1235,
-0.1578, -0.4889, -0.1543
],
[
+0.4581, +0.2872, +0.1375, +0.2818, +0.2036, -0.0369, +0.0807,
-0.0667, -0.2801, -0.0582, +0.1936, +0.1047, -0.1245, -0.1259,
-0.1373, -0.1140, +0.0582
],
[
+0.2535, +0.5913, +0.2155, -0.0587, -0.0918, -0.0362, +0.7135,
+0.3591, +0.4240, +0.3692, -0.0313, -0.2431, +0.9143, -0.0241,
-0.6210, +0.4646, -0.1512
],
[
+0.3188, +0.2507, -0.2129, -0.4617, +0.1874, -0.1286, +0.0632,
-0.2470, -0.0572, +0.6183, +0.3531, +0.1321, -0.1687, +0.1307,
-0.3712, +0.0199, -0.0776
],
[
-0.1779, +0.3660, +0.2386, +0.2297, +0.1792, -0.1369, +0.2320,
-0.5867, -0.1306, +0.3471, -0.1127, +0.3352, -0.0214, +0.0801,
-0.1368, -0.7262, +0.1757
],
[
+0.0183, -0.0485, +0.1199, +0.2513, +0.2022, -0.0472, +0.3076,
+0.0656, -0.3302, -0.0587, -0.0200, -0.5474, -0.0863, +0.2087,
-0.2581, -0.1750, +0.3391
],
[
-0.2339, +0.0639, +0.2085, -0.1872, -0.2686, +0.1490, +0.0436,
-0.0841, -0.1054, -0.4899, +0.0193, -0.0250, +0.2212, +0.0972,
+0.0329, +0.1328, -0.0545
],
[
-0.0874, -0.1087, -0.1577, +0.0936, -0.3083, -0.1450, +0.0365,
-0.1924, -0.0536, +0.0475, +0.0348, +0.0772, +0.0348, -0.2800,
+0.0864, +0.2640, -0.0215
],
[
+0.0745, +0.4503, -0.4064, +0.6300, -0.1275, -0.0463, -0.4074,
+0.0031, +0.1408, +0.0531, +0.1400, +0.0308, -0.0220, -0.0014,
-0.3056, -0.1551, -0.0096
],
[
+0.1479, +0.1186, +0.1323, -0.3466, -0.0654, -0.1084, -0.2509,
+0.0944, -0.2135, +0.2020, +0.0602, -0.1239, +0.0741, +0.2037,
-0.4462, +0.1065, +0.1710
]])
weights_final_b = np.array([
-0.0274, +0.1314, -0.0578, +0.2965, +0.1318, -0.0622, +0.1158, +0.0643,
+0.2138, -0.1422, +0.1579, +0.0836, -0.0388, -0.0933, +0.2233, -0.2276,
+0.0375
])
# yapf: enable
if __name__ == "__main__":
demo_run()
| [
"[email protected]"
] | |
eaf3840aa3f8986f9ac5af4ac914a14e080bd347 | cc7ad1a2aa5d691c15ff7838d1e5126ab2c2bee0 | /basic_notifications/views.py | b7e1ecc497a68ddf9693738e0e033c9b746371b7 | [] | no_license | demirantay/lingooapp | 9632be8a7d3dd00e7a4ac13618f32975da389729 | c842bb032668ef1bd5e7f4282acd4990843c8640 | refs/heads/master | 2023-03-14T08:00:37.681334 | 2021-01-09T09:36:48 | 2021-01-09T09:36:48 | 285,181,982 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,688 | py | # Main Imports
import json
# Django Imports
from django.shortcuts import render, get_object_or_404, HttpResponse
from django.http import HttpResponseRedirect
from django.core.exceptions import ObjectDoesNotExist
from django.core.files import File
from django.contrib.auth.models import User
from django.utils import timezone
# My Module ImportsImports
from .models import NotificationBase
from profile_settings.models import BasicUserProfile
from teacher_authentication.models import TeacherUserProfile
from utils.session_utils import get_current_user, get_current_user_profile
from utils.session_utils import get_current_teacher_user_profile
from utils.access_control import delete_teacher_user_session
def notifications(request, page):
"""
in this page the user can see her notifications
"""
# Deleting admin-typed user session
# Deleting programmer-typed-user session
# Deleting Teacher-typed user sessions
# ACCESS CONTROL
delete_teacher_user_session(request)
# Get the current users
current_basic_user = get_current_user(request, User, ObjectDoesNotExist)
current_basic_user_profile = get_current_user_profile(
request,
User,
BasicUserProfile,
ObjectDoesNotExist
)
# Getting the current teacher profile
current_teacher_profile = get_current_teacher_user_profile(
request,
User,
TeacherUserProfile,
ObjectDoesNotExist
)
# Get all of the notifications
try:
all_notifications = NotificationBase.objects.filter(
notified_user=current_basic_user_profile
).order_by("-id")
except ObjectDoesNotExist:
all_notifications = None
# Get all of the posts
# At every page there will be 80 entries so always multiply it by that and
# then reduce your objects
current_page = page
previous_page = page-1
next_page = page+1
post_records_starting_point = current_page * 80
post_records_ending_point = post_records_starting_point + 80
try:
current_page_notifications = NotificationBase.objects.filter(
notified_user=current_basic_user_profile
).order_by('-id')[post_records_starting_point:post_records_ending_point]
except ObjectDoesNotExist:
current_page_notifications = None
# check if the user has unread notifications
has_unread_notifications = False
for notification in all_notifications:
if notification.is_read == False:
has_unread_notifications = True
break
else:
continue
# Since the page is visited make all of the notiications read = True
current_unread_notifications = {}
for notification in all_notifications:
if notification.is_read == False:
current_unread_notifications[notification.id] = False
notification.is_read = True
notification.save()
else:
pass
data = {
"current_basic_user": current_basic_user,
"current_basic_user_profile": current_basic_user_profile,
"current_teacher_profile": current_teacher_profile,
"all_notifications": all_notifications,
"has_unread_notifications": has_unread_notifications,
"current_page": current_page,
"previous_page": previous_page,
"next_page": next_page,
"current_page_notifications": current_page_notifications,
"current_unread_notifications": current_unread_notifications,
}
if current_basic_user == None:
return HttpResponseRedirect("/auth/login/")
else:
return render(request, "basic_notifications/notifications.html", data)
| [
"[email protected]"
] | |
7ec56d1dfd873785b0db9c891aacd95142031aa1 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /sQN3Jb43teMbC7rGJ_18.py | 795c8a4678747c53bbb24bcd6b59c6e238410b4e | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py |
def make_transpose(m):
dm = len(m)
dn = len(m[0])
tm = [[0] * dm for i in range(dn)]
for i in range(dm):
for j in range(dn):
tm[j][i] = m[i][j]
return tm
| [
"[email protected]"
] | |
c2ea0ec2e21e9047ed990c7351593ad82edc44ad | 536bce6ca78a9a151247b51acb8c375c9db7445f | /chapter1/1.5-interest_rate.py | 15aba2121680fc7d7fffc673afd05db59b2923ce | [] | no_license | clicianaldoni/aprimeronpython | 57de34313f4fd2a0c69637fefd60b0fb5861f859 | a917b62bec669765a238c4b310cc52b79c7df0c9 | refs/heads/master | 2023-01-28T18:02:31.175511 | 2023-01-23T08:14:57 | 2023-01-23T08:14:57 | 112,872,454 | 0 | 0 | null | 2017-12-02T19:55:40 | 2017-12-02T19:55:40 | null | UTF-8 | Python | false | false | 464 | py | p = 5 # Interest rate %
A = 1000 # Initial amount
years = 3 # Number of years to grow
# Formula for calculating sum: A(1 + p/100)^n
# To avoid integer division we convert p to float
sum = A * (1 + (float(p)/100))**years
print("After %g years with %g%% interest rate and an initial amount of %g we have %g." % (years, p, A, sum))
"""
Unix>python interest_rate.py
After 3 years with 5% interest rate and an initial amount of 1000 we have 1157.63.
"""
| [
"[email protected]"
] | |
e57bf9dec7e340b0469004ecf5111d0ea081f482 | 674f5dde693f1a60e4480e5b66fba8f24a9cb95d | /armulator/armv6/opcodes/concrete/ldc_ldc2_immediate_a2.py | c145465c0a7b80a8b878d200a1c3998d5b55001d | [
"MIT"
] | permissive | matan1008/armulator | 75211c18ebc9cd9d33a02890e76fc649483c3aad | 44f4275ab1cafff3cf7a1b760bff7f139dfffb07 | refs/heads/master | 2023-08-17T14:40:52.793120 | 2023-08-08T04:57:02 | 2023-08-08T04:57:02 | 91,716,042 | 29 | 7 | MIT | 2023-08-08T04:55:59 | 2017-05-18T16:37:55 | Python | UTF-8 | Python | false | false | 788 | py | from armulator.armv6.arm_exceptions import UndefinedInstructionException
from armulator.armv6.bits_ops import substring, bit_at
from armulator.armv6.opcodes.abstract_opcodes.ldc_ldc2_immediate import LdcLdc2Immediate
class LdcLdc2ImmediateA2(LdcLdc2Immediate):
@staticmethod
def from_bitarray(instr, processor):
imm8 = substring(instr, 7, 0)
coproc = substring(instr, 11, 8)
rn = substring(instr, 19, 16)
index = bit_at(instr, 24)
add = bit_at(instr, 23)
wback = bit_at(instr, 21)
if substring(coproc, 3, 1) == 0b101:
raise UndefinedInstructionException()
else:
imm32 = imm8 << 2
return LdcLdc2ImmediateA2(instr, cp=coproc, n=rn, add=add, imm32=imm32, index=index, wback=wback)
| [
"[email protected]"
] | |
fea4d5a004cb0d120f3829c1fa2cbf4b2df64e17 | 046333321b2717c6391a111fc2f74b04bbbeb7af | /chapter13(enumrate function)/sorted.py | cbe84261ffe34d30f366d660bdb7c5115a530460 | [] | no_license | jyash28/Python-practice | b0c9df42bc93716d8721a1420ee1f3170b40b18c | cd3a61934618145cbaa20e62194ebb1642ba9941 | refs/heads/main | 2023-07-03T18:06:38.407491 | 2021-07-13T09:47:07 | 2021-07-13T09:47:07 | 314,485,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | guitars= [
{"model1" : 'famaha f310' ,"price": 8400},
{"model2" : 'faith neptune' ,"price": 100000},
{"model3" : 'faith appolo venus' ,"price": 35000},
{"model4" : 'taylor' ,"price": 450000}
]
sorted_guitars = sorted(guitars, key= lambda d: d["price"],reverse = True)
print(sorted_guitars) | [
"[email protected]"
] | |
cee7caced2bc83a749cecf518d0afbeac3bf528e | 747f759311d404af31c0f80029e88098193f6269 | /addons/project_timesheet_contract/project/__init__.py | 34aa344afd62fd26763d265b1313036fe1245e01 | [] | no_license | sgeerish/sirr_production | 9b0d0f7804a928c0c582ddb4ccb7fcc084469a18 | 1081f3a5ff8864a31b2dcd89406fac076a908e78 | refs/heads/master | 2020-05-19T07:21:37.047958 | 2013-09-15T13:03:36 | 2013-09-15T13:03:36 | 9,648,444 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 84 | py | /home/openerp/production/extra-addons/project_timesheet_contract/project/__init__.py | [
"[email protected]"
] | |
e22cf41bebc21fe5ea70c17604946adc4fe9a69e | ef5bde73d58734f5081f127fe344ae85c53b8b68 | /config_modify.py | 8c8255c6e3156d5372724911ccee779d14d2e548 | [] | no_license | ychnlgy/VoxCeleb1 | a3a6337f322ec1c78f926e2f529db001f7ec8349 | 930ce2c5c9f0828705afb096c7ee33bfe4b6b96e | refs/heads/master | 2020-06-11T10:40:35.462721 | 2019-07-09T16:42:24 | 2019-07-09T16:42:24 | 193,934,200 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | import argparse
import voxceleb1
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--path", required=True)
args = parser.parse_args()
config = voxceleb1.training.Config(args.path)
del config.param_dict["_dob"]
kvs = ["--%s %s" % item for item in config.param_dict.items()]
print(" ".join(kvs))
| [
"[email protected]"
] | |
c33973915a1487aa198d9586d9ef07976496fe35 | 9c6dcd6964c0bbbc960106736a3adf83f99ae613 | /Balatarin/bipartiteMongo.py~ | 0ac84299fccd071931a5ee43aa4271ca00d40bdf | [] | no_license | Roja-B/Trajectories | 5ab065991c34ba74b6951ad090401c0cb14f222b | e1ce1c6ac8095f92853e0ebe7a41eb8a82e7eff2 | refs/heads/master | 2016-09-05T17:56:45.643404 | 2013-01-24T03:54:21 | 2013-01-24T03:54:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,701 | #!/usr/lib/python3.0
# This program extracts bipartite edgelist of users and links belonging to a specific time window (both the link and the votes should come from that time window)
# Author: Roja Bandari
# October 2012
from pymongo import Connection
from PARAMETERS import *
import datetime
import time
import sys
#sDate = sys.argv[1]
#delta = sys.argv[2] # in days
#sYear = int(sDate.split('/')[2])
#sMonth = int(sDate.split('/')[0])
#sDay = int(sDate.split('/')[1])
begin = datetime.datetime(2006,9,1)
end = datetime.datetime(2006,11,25)
startDate = begin
difference = datetime.timedelta(days=WINDOW)
slidingWindow = datetime.timedelta(days=SLIDE)
t1 = time.time()
connection = Connection()
balatarindb = connection.Balatarin
links = balatarindb.links
votes = balatarindb.votes
log = open("mongoError.log","a")
while startDate < end:
endDate = startDate + difference
bgraphname = "".join(["bipartite_politics_",str(startDate.month),"_"+str(startDate.day),"_"+str(startDate.year),"_"+str(WINDOW),"_days"])
print bgraphname
f = open(PATH+"/bipartite/"+bgraphname+".txt","w")
for vote in votes.find({"date":{"$gte":startDate,"$lt":endDate}}):
# print vote["linkID"]
linkID = vote["linkID"]
link = links.find_one({"linkID":linkID})
try:
if link["date"] < startDate : continue
except:
log.write(linkID+'\n')
continue
if link["category"] == "4":
f.write(vote["userID"]+'\t'+vote["linkID"]+'\n')
f.close()
startDate += slidingWindow
t2 = time.time()
print "Time Spent: "+str((t2-t1)/60)+" minutes.\n"
log.close()
| [
"[email protected]"
] | ||
a9520d4013f01df3a621233c6de34a7732d48832 | 2a05456121813e2c5c3a0e9a88c0c381a038633b | /euler089.py | b32e61c3f1608a6ae354bef88b3f646d1612cf92 | [] | no_license | Octaith/euler | 022fab72f7d2a72327694ea1970aa3e13a560673 | 457676a99013c7c5fd33697b82be998d07c464d9 | refs/heads/master | 2020-09-26T21:04:08.656499 | 2014-09-14T07:47:51 | 2014-09-14T07:47:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 789 | py | roman = (
('M', 1000),
('CM', 900),
('D', 500),
('CD', 400),
('C', 100),
('XC', 90),
('L', 50),
('XL', 40),
('X', 10),
('IX', 9),
('V', 5),
('IV', 4),
('I', 1)
)
def roman_to_dec(s):
result = 0
index = 0
for numeral, integer in roman:
while s[index:index+len(numeral)] == numeral:
result += integer
index += len(numeral)
return result
def dec_to_roman(n):
result = ""
for numeral, integer in roman:
while n >= integer:
result += numeral
n -= integer
return result
with open('roman.txt') as f:
data = f.read().split('\n')
saved = 0
for r in data:
saved += len(r)
saved -= len(dec_to_roman(roman_to_dec(r)))
print saved
| [
"[email protected]"
] | |
ede98906221ceb5af90a8e165e9a48203a10f212 | a1dae20db0338e735f0b4eb2804a069533bc5a9b | /render.py | f36dcfdfed83a87bd98faa44c513dbe54b05c932 | [] | no_license | thoppe/TwitterSquares | 4d78e80680c3b01673d602c2564811bf42090aa6 | a01dd65456fa70478a0ed03cd7c994c0a678e3ef | refs/heads/master | 2020-03-20T08:17:42.525989 | 2018-06-19T22:05:20 | 2018-06-19T22:05:20 | 137,304,270 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,058 | py | """Render Twitter Squares
Usage:
render.py <term> <n_images> [--resolution=<n>]
Options:
-h --help Show this screen.
-r --resolution=<n> Output resolution [default: 1200]
"""
import glob
import os
import sys
import random
from tqdm import tqdm
import numpy as np
import cv2
from docopt import docopt
dargs = docopt(__doc__)
total_images = int(dargs["<n_images>"])
square_n = int(np.sqrt(total_images))
resolution = int(dargs["--resolution"])
if square_n**2 != total_images:
raise ValueError(f"<n_images={total_images}> must be a square number!")
max_image_row_size = 20
#model_img_size = 224
model_img_size = 299
name = dargs["<term>"]
load_dest = f"data/profile_image/{name}"
subimage_dest = f"data/subimage/{name}"
activations_dest = f"data/activations/{name}"
figure_dest = "figures/"
def resize_and_crop(f0):
# Resize all the images to the base shape of (model_img_size,model_img_size)
# Center crop non-square images
f1 = os.path.join(subimage_dest, os.path.basename(f0)) + '.jpg'
if os.path.exists(f1):
return False
img = cv2.imread(f0)
if img is None:
os.remove(f0)
return False
x,y,c = img.shape
if x > y:
dx = (x - y)//2
img = img[dx:dx+y, :, :]
if y > x:
dy = y - x
img = img[:, dy:dy+x, :]
img = cv2.resize(img, (model_img_size,model_img_size))
x,y,c = img.shape
assert(x==y==model_img_size)
cv2.imwrite(f1, img)
#print ("Saved", f1)
def load_image_data():
F_INPUT = sorted(glob.glob(os.path.join(subimage_dest, '*')))
random.shuffle(F_INPUT)
F_INPUT = F_INPUT[:total_images]
IMG, ACT = [], []
for f0 in tqdm(F_INPUT):
f1 = os.path.join(activations_dest, os.path.basename(f0))+'.txt'
assert(os.path.exists(f1))
img = cv2.imread(f0)
IMG.append(img)
ACT.append(np.loadtxt(f1))
IMG = np.array(IMG)
ACT = np.array(ACT)
return IMG, ACT
_clf = None # Only import the model if we need to score something
def compute_activations(f0):
f1 = os.path.join(activations_dest, os.path.basename(f0)) + '.txt'
if os.path.exists(f1):
return False
global _clf
if _clf is None:
print("Importing classification model")
from model import layer_model
_clf = layer_model()
img = cv2.imread(f0)
img = img[:,:,::-1] # BGR to RGB
ax = _clf.predict(img)
np.savetxt(f1, ax)
if __name__ == "__main__":
# Create any missing directories
for d in [subimage_dest, figure_dest, activations_dest]:
if not os.path.exists(d):
os.system(f'mkdir -p "{d}"')
F_IN = set(sorted(glob.glob(os.path.join(load_dest, '*'))))
# Remove all zero-byte files
for f in list(F_IN):
if os.stat(f).st_size==0:
print(f"Removing zero-byte file {f}")
os.remove(f)
F_IN.remove(f)
for f0 in tqdm(F_IN):
resize_and_crop(f0)
print(f"Largest model possible {int(np.floor(len(F_IN)**0.5)**2)}")
F_IN = set(sorted(glob.glob(os.path.join(subimage_dest, '*'))))
for f0 in tqdm(F_IN):
compute_activations(f0)
# Check to make sure we have enough images
F_IN = set(sorted(glob.glob(os.path.join(activations_dest, '*'))))
if len(F_IN) < total_images:
msg = f"Not enough images for {name}, {len(F_IN)}/{total_images}"
raise ValueError(msg)
IMG, ACT = load_image_data()
from grid import generate_tsne, fit_to_grid
print("Generating tSNE coordinates")
X = generate_tsne(ACT)
print("Running Jonker-Volgenan")
img = fit_to_grid(IMG, X, square_n, out_res=model_img_size)
print("Resizing image")
img = cv2.resize(
img, (resolution, resolution), interpolation=cv2.INTER_CUBIC)
f_img_save = os.path.join(figure_dest, f"{name}.jpg")
cv2.imwrite(
f_img_save, img, [int(cv2.IMWRITE_JPEG_QUALITY), 95])
print (f"Saved output image to {f_img_save}")
os.system(f'eog "figures/{name}.jpg"')
| [
"[email protected]"
] | |
fccc5e04254af51c2fc4a03cdf992b81f31a1d28 | a6e4a6f0a73d24a6ba957277899adbd9b84bd594 | /sdk/python/pulumi_azure_native/sql/v20190601preview/__init__.py | 82b3a2004814746567987c5300774fdd220485e0 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | MisinformedDNA/pulumi-azure-native | 9cbd75306e9c8f92abc25be3f73c113cb93865e9 | de974fd984f7e98649951dbe80b4fc0603d03356 | refs/heads/master | 2023-03-24T22:02:03.842935 | 2021-03-08T21:16:19 | 2021-03-08T21:16:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,559 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from ._enums import *
from .database import *
from .get_database import *
from .get_managed_database import *
from .get_server import *
from .get_server_azure_ad_administrator import *
from .get_sync_group import *
from .get_sync_member import *
from .get_workload_classifier import *
from .get_workload_group import *
from .managed_database import *
from .server import *
from .server_azure_ad_administrator import *
from .sync_group import *
from .sync_member import *
from .workload_classifier import *
from .workload_group import *
from ._inputs import *
from . import outputs
def _register_module():
import pulumi
from ... import _utilities
class Module(pulumi.runtime.ResourceModule):
_version = _utilities.get_semver_version()
def version(self):
return Module._version
def construct(self, name: str, typ: str, urn: str) -> pulumi.Resource:
if typ == "azure-native:sql/v20190601preview:Database":
return Database(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:sql/v20190601preview:ManagedDatabase":
return ManagedDatabase(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:sql/v20190601preview:Server":
return Server(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:sql/v20190601preview:ServerAzureADAdministrator":
return ServerAzureADAdministrator(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:sql/v20190601preview:SyncGroup":
return SyncGroup(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:sql/v20190601preview:SyncMember":
return SyncMember(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:sql/v20190601preview:WorkloadClassifier":
return WorkloadClassifier(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:sql/v20190601preview:WorkloadGroup":
return WorkloadGroup(name, pulumi.ResourceOptions(urn=urn))
else:
raise Exception(f"unknown resource type {typ}")
_module_instance = Module()
pulumi.runtime.register_resource_module("azure-native", "sql/v20190601preview", _module_instance)
_register_module()
| [
"[email protected]"
] | |
5011a21caf349d8ce94e37300ed1812a3e77ff99 | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog/optimized_25989.py | e0a1a41656b1eecde1b382880dffcadd2189e571 | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,842 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((455.091, 548.131, 441.132), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((522.671, 548.429, 441.943), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((603.488, 547.37, 430.019), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((518.447, 575.68, 322.142), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((797.322, 526.929, 442.213), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((500.094, 537.392, 437.867), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((498.73, 536.413, 437.883), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((498.274, 508.872, 446.765), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((502.548, 483.082, 459.011), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((507.427, 454.916, 455.226), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((501.996, 437.831, 432.369), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((483.284, 448.014, 412.33), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((479.04, 548.96, 453.22), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((480.576, 351.176, 373.656), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((673.497, 390.666, 419.207), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((673.497, 390.666, 419.207), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((646.276, 396.064, 413.934), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((628.041, 411.52, 429.302), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((608.845, 430.869, 437.911), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((591.527, 453.567, 442.301), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((574.681, 476.771, 447.418), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((557.879, 500.257, 452.816), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((615.326, 292.629, 312.28), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((486.93, 713.637, 585.084), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((588.597, 521.567, 466.245), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((588.597, 521.567, 466.245), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((591.457, 516.909, 437.6), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((586.606, 526.984, 410.798), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((589.86, 554.228, 399.824), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((468.427, 583.357, 396.507), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((714.296, 539.961, 401.056), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((511.432, 560.558, 418.793), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((511.424, 560.567, 418.769), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((526.333, 544.602, 401.069), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((526.398, 522.405, 418.378), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((528.681, 499.153, 433.922), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((513.118, 484.584, 415.528), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((489.537, 476.382, 428.774), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((476.397, 455.114, 443.081), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((519.426, 501.731, 500.317), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((446.248, 399.583, 381.235), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((539.353, 543.386, 500.727), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((544.576, 549.144, 475.707), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((557.977, 563.579, 421.9), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((571.409, 578.04, 368.107), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((508.62, 629.225, 376.098), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((636.519, 560.093, 288.137), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((479.555, 565.106, 435.982), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((502.064, 579.203, 445.777), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((529.561, 585.617, 448.267), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((558.025, 587.772, 449.749), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((586.625, 589.775, 450.073), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((615.365, 589.726, 448.653), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((539.329, 564.984, 455.17), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((694.073, 613.407, 442.164), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"[email protected]"
] | |
8ce1689b4605bab929cceaf30bd0e1e4bc9293a9 | b40d1a26ea04a19ec0da7bf55db84b7ee36cc898 | /leetcode.com/python/1007_Minimum_Domino_Rotations_For_Equal_Row.py | 974ee558096c8fe9a393d9b91f507186e8e356d7 | [
"MIT"
] | permissive | partho-maple/coding-interview-gym | 5e8af7d404c28d4b9b52e5cffc540fd51d8025cf | 20ae1a048eddbc9a32c819cf61258e2b57572f05 | refs/heads/master | 2022-09-11T16:36:01.702626 | 2022-03-14T08:39:47 | 2022-03-14T08:39:47 | 69,802,909 | 862 | 438 | MIT | 2022-08-18T06:42:46 | 2016-10-02T14:51:31 | Python | UTF-8 | Python | false | false | 2,177 | py | # Source: https://tinyurl.com/v3zqer7
# Approach 1
class Solution(object):
def minDominoRotations(self, A, B):
"""
:type A: List[int]
:type B: List[int]
:rtype: int
"""
result = float("inf")
for domino in range(1, 7): # Since each domino can have only 1 to 6 values. So check all values if we can make it
isPossible = True
topRorationCount, bottomRotationCount = 0, 0
for a, b in zip(A, B):
if domino != a and domino != b: #
isPossible = False
break
if domino == a and domino != b:
bottomRotationCount += 1
elif domino != a and domino == b:
topRorationCount += 1
if isPossible:
result = min(result, min(topRorationCount, bottomRotationCount))
return -1 if result == float("inf") else result
# Source: https://tinyurl.com/v3zqer7
# Approach 2
class Solution(object):
def minDominoRotations(self, A, B):
"""
:type A: List[int]
:type B: List[int]
:rtype: int
"""
rotations = self.checkRotationFor(A, B, A[0])
# If one could make all elements in A or B equal to A[0]
if rotations != -1 or A[0] == B[0]:
return rotations
# If one could make all elements in A or B equal to B[0]
else:
return self.checkRotationFor(A, B, B[0])
def checkRotationFor(self, A, B, num):
"""
Return minimum number of swaps,
if one could make all elements in A or B equal to 'num'.
Else return -1
"""
# How many rotations should be done
# to have all elements in A equal to 'num'
# and to have all elements in B equal to 'num'
length = len(A)
rotations_A, rotations_B = 0, 0
for i in range(length):
if A[i] != num and B[i] != num:
return -1
elif A[i] != num:
rotations_A += 1
elif B[i] != num:
rotations_B += 1
return min(rotations_A, rotations_B)
| [
"[email protected]"
] | |
d7e949f538a3a780aa931750ee78faf99c3e1323 | 1a166165ab8287d01cbb377a13efdb5eff5dfef0 | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_04_01/operations/_ddos_protection_plans_operations.py | d971927ea9d9c040f54655faee5d8e8cf1f6edd5 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | manoj0806/azure-sdk-for-python | 7a14b202ff80f528abd068bf50334e91001a9686 | aab999792db1132232b2f297c76800590a901142 | refs/heads/master | 2023-04-19T16:11:31.984930 | 2021-04-29T23:19:49 | 2021-04-29T23:19:49 | 363,025,016 | 1 | 0 | MIT | 2021-04-30T04:23:35 | 2021-04-30T04:23:35 | null | UTF-8 | Python | false | false | 30,324 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DdosProtectionPlansOperations(object):
"""DdosProtectionPlansOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified DDoS protection plan.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_protection_plan_name: The name of the DDoS protection plan.
:type ddos_protection_plan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
ddos_protection_plan_name=ddos_protection_plan_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.DdosProtectionPlan"
"""Gets information about the specified DDoS protection plan.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_protection_plan_name: The name of the DDoS protection plan.
:type ddos_protection_plan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DdosProtectionPlan, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_04_01.models.DdosProtectionPlan
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlan"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
parameters, # type: "_models.DdosProtectionPlan"
**kwargs # type: Any
):
# type: (...) -> "_models.DdosProtectionPlan"
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlan"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'DdosProtectionPlan')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
parameters, # type: "_models.DdosProtectionPlan"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.DdosProtectionPlan"]
"""Creates or updates a DDoS protection plan.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_protection_plan_name: The name of the DDoS protection plan.
:type ddos_protection_plan_name: str
:param parameters: Parameters supplied to the create or update operation.
:type parameters: ~azure.mgmt.network.v2019_04_01.models.DdosProtectionPlan
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either DdosProtectionPlan or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_04_01.models.DdosProtectionPlan]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlan"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
ddos_protection_plan_name=ddos_protection_plan_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.DdosProtectionPlan"
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlan"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.DdosProtectionPlan"]
"""Update a DDoS protection plan tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_protection_plan_name: The name of the DDoS protection plan.
:type ddos_protection_plan_name: str
:param parameters: Parameters supplied to the update DDoS protection plan resource tags.
:type parameters: ~azure.mgmt.network.v2019_04_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either DdosProtectionPlan or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_04_01.models.DdosProtectionPlan]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlan"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
ddos_protection_plan_name=ddos_protection_plan_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.DdosProtectionPlanListResult"]
"""Gets all DDoS protection plans in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DdosProtectionPlanListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_04_01.models.DdosProtectionPlanListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlanListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('DdosProtectionPlanListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ddosProtectionPlans'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.DdosProtectionPlanListResult"]
"""Gets all the DDoS protection plans in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DdosProtectionPlanListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_04_01.models.DdosProtectionPlanListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlanListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('DdosProtectionPlanListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans'} # type: ignore
| [
"[email protected]"
] | |
59ab5667a34c44fdb895072c8f91f93182bc126b | acf314ab0fa399018764b2ebd96e33c66362994e | /0x0F-python-object_relational_mapping/1-filter_states.py | 3db5e1107d420d574d6614b5ae1f741eb6da16ad | [] | no_license | glyif/holbertonschool-higher_level_programming | 98f9c2da0b71a4e9e2dd9f6fde755875e9015f34 | 14c02d79e2008db1b992b08f9faa55b20dbe0691 | refs/heads/master | 2021-01-20T06:53:16.179354 | 2017-09-28T18:14:12 | 2017-09-28T18:14:12 | 89,939,980 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 975 | py | #!/usr/bin/python3
"""
mysqldb filter states
"""
import sys
import MySQLdb
def db_connection(user_name, password, db_name, host="localhost"):
"""
db_connection - connects to db
:param user_name: username
:param password: password
:param db_name: database name
:param host: host - default to localhost
:return: db
"""
db = MySQLdb.connect(host=host,
user=user_name,
passwd=password,
db=db_name)
return db
def db_query(db, query):
"""
db_query - queries database
:param db: database
:param query: query
:return: none
"""
cur = db.cursor()
cur.execute(query)
data = cur.fetchall()
for row in data:
print(row)
if __name__ == "__main__":
db = db_connection(sys.argv[1], sys.argv[2], sys.argv[3])
db_query(db, """SELECT id, name FROM states
WHERE name LIKE 'N%' ORDER BY states.id ASC""")
| [
"[email protected]"
] | |
2b6dbf579ae37711f46b26057e43ff7b642659e2 | 77c8c500d4077ad733fbfe2c6a85a1dd47bd3cb5 | /chelseashin/ProblemSolving/2156_포도주시식.py | 940cea71221cff29f679eb73ae27638dc45e2bad | [] | no_license | chelseashin/AlgorithmStudy2021 | 786f03c4c17bc057518d428481e7d710d24ec98e | 1a4744a621ed25715fc9060c5224f0b1092d9c00 | refs/heads/master | 2023-06-22T22:27:47.289806 | 2021-07-28T02:54:22 | 2021-07-28T02:54:22 | 326,441,667 | 1 | 5 | null | 2021-06-29T01:27:40 | 2021-01-03T15:44:16 | Python | UTF-8 | Python | false | false | 848 | py | # 참고 : https://pacific-ocean.tistory.com/152
# https://claude-u.tistory.com/204
# dp[i]의 최댓값을 구하는 것은 세 가지 방법에 의해 결정된다.
# 1) OXOO: 연속 두 개
# 2) OXO: 하나 띄고 한 개
# 3) X: i 번째를 마시지 않는 경우
from sys import stdin
input = stdin.readline
n = int(input())
a = [0] + [int(input()) for _ in range(n)]
dp = [0, a[1]]
if n > 1:
dp.append(a[1] + a[2])
for i in range(3, n+1):
dp.append(max(dp[i-1],
dp[i-3]+a[i-1]+a[i],
dp[i-2]+a[i]))
# print(n, a, dp)
print(dp[n])
# 위와 같은 방법
# wine = [0] + [int(input()) for _ in range(n)]
# dp = [0] * (n+1)
# dp[1] = wine[1]
# if n > 1:
# dp[2] = wine[1] + wine[2]
# for i in range(3, n+1):
# dp[i] = max(dp[i-3]+wine[i-1]+wine[i], dp[i-2]+wine[i], dp[i-1])
#
# print(dp[n]) | [
"[email protected]"
] | |
26f00db3c32d9d29465643d06dc02532c017e608 | 8d9318a33afc2c3b5ca8ac99fce0d8544478c94a | /Books/Casandra DB/opscenter-5.1.0/lib/py-debian/2.6/i386/twisted/python/_inotify.py | c002674f122a6bd2a9e80aa2344b47edeff20f78 | [] | no_license | tushar239/git-large-repo | e30aa7b1894454bf00546312a3fb595f6dad0ed6 | 9ee51112596e5fc3a7ab2ea97a86ec6adc677162 | refs/heads/master | 2021-01-12T13:48:43.280111 | 2016-11-01T22:14:51 | 2016-11-01T22:14:51 | 69,609,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 51 | py | ../../../../../py-unpure/twisted/python/_inotify.py | [
"[email protected]"
] | |
0324dd3dc62f88495cb95ea7424deef660c43536 | e2e39726195c7bc075b9bd56e757acd136527d5c | /typings/vtkmodules/vtkIOXML/__init__.pyi | 5a9a78845d01dba6a9f4391c9438656d0f13da23 | [
"BSD-3-Clause"
] | permissive | gen4438/vtk-python-stubs | a652272183d2d1ee48d4639e86bcffc1ac454af0 | c9abd76362adf387af64ce5ddbd04c5d3bebe9da | refs/heads/main | 2023-04-04T02:13:15.459241 | 2021-04-15T10:47:28 | 2021-04-15T10:53:59 | 358,224,363 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,682 | pyi | """
This type stub file was generated by pyright.
"""
import vtkmodules.vtkCommonCore as __vtkmodules_vtkCommonCore
import vtkmodules.vtkCommonExecutionModel as __vtkmodules_vtkCommonExecutionModel
import vtkmodules.vtkIOXMLParser as __vtkmodules_vtkIOXMLParser
from .vtkXMLReader import vtkXMLReader
from .vtkXMLDataReader import vtkXMLDataReader
from .vtkXMLUnstructuredDataReader import vtkXMLUnstructuredDataReader
from .vtkXMLPolyDataReader import vtkXMLPolyDataReader
from .vtkRTXMLPolyDataReader import vtkRTXMLPolyDataReader
from .vtkXMLCompositeDataReader import vtkXMLCompositeDataReader
from .vtkXMLWriter import vtkXMLWriter
from .vtkXMLCompositeDataWriter import vtkXMLCompositeDataWriter
from .vtkXMLDataObjectWriter import vtkXMLDataObjectWriter
from .vtkXMLDataSetWriter import vtkXMLDataSetWriter
from .vtkXMLFileReadTester import vtkXMLFileReadTester
from .vtkXMLGenericDataObjectReader import vtkXMLGenericDataObjectReader
from .vtkXMLHierarchicalBoxDataFileConverter import vtkXMLHierarchicalBoxDataFileConverter
from .vtkXMLUniformGridAMRReader import vtkXMLUniformGridAMRReader
from .vtkXMLHierarchicalBoxDataReader import vtkXMLHierarchicalBoxDataReader
from .vtkXMLUniformGridAMRWriter import vtkXMLUniformGridAMRWriter
from .vtkXMLHierarchicalBoxDataWriter import vtkXMLHierarchicalBoxDataWriter
from .vtkXMLMultiBlockDataReader import vtkXMLMultiBlockDataReader
from .vtkXMLMultiGroupDataReader import vtkXMLMultiGroupDataReader
from .vtkXMLHierarchicalDataReader import vtkXMLHierarchicalDataReader
from .vtkXMLHyperTreeGridReader import vtkXMLHyperTreeGridReader
from .vtkXMLHyperTreeGridWriter import vtkXMLHyperTreeGridWriter
from .vtkXMLStructuredDataReader import vtkXMLStructuredDataReader
from .vtkXMLImageDataReader import vtkXMLImageDataReader
from .vtkXMLStructuredDataWriter import vtkXMLStructuredDataWriter
from .vtkXMLImageDataWriter import vtkXMLImageDataWriter
from .vtkXMLMultiBlockDataWriter import vtkXMLMultiBlockDataWriter
from .vtkXMLPartitionedDataSetCollectionReader import vtkXMLPartitionedDataSetCollectionReader
from .vtkXMLPartitionedDataSetCollectionWriter import vtkXMLPartitionedDataSetCollectionWriter
from .vtkXMLPartitionedDataSetReader import vtkXMLPartitionedDataSetReader
from .vtkXMLPartitionedDataSetWriter import vtkXMLPartitionedDataSetWriter
from .vtkXMLPDataObjectReader import vtkXMLPDataObjectReader
from .vtkXMLPDataReader import vtkXMLPDataReader
from .vtkXMLPHyperTreeGridReader import vtkXMLPHyperTreeGridReader
from .vtkXMLPStructuredDataReader import vtkXMLPStructuredDataReader
from .vtkXMLPImageDataReader import vtkXMLPImageDataReader
from .vtkXMLUnstructuredDataWriter import vtkXMLUnstructuredDataWriter
from .vtkXMLPolyDataWriter import vtkXMLPolyDataWriter
from .vtkXMLPUnstructuredDataReader import vtkXMLPUnstructuredDataReader
from .vtkXMLPPolyDataReader import vtkXMLPPolyDataReader
from .vtkXMLPRectilinearGridReader import vtkXMLPRectilinearGridReader
from .vtkXMLPStructuredGridReader import vtkXMLPStructuredGridReader
from .vtkXMLPTableReader import vtkXMLPTableReader
from .vtkXMLPUnstructuredGridReader import vtkXMLPUnstructuredGridReader
from .vtkXMLRectilinearGridReader import vtkXMLRectilinearGridReader
from .vtkXMLRectilinearGridWriter import vtkXMLRectilinearGridWriter
from .vtkXMLStructuredGridReader import vtkXMLStructuredGridReader
from .vtkXMLStructuredGridWriter import vtkXMLStructuredGridWriter
from .vtkXMLTableReader import vtkXMLTableReader
from .vtkXMLTableWriter import vtkXMLTableWriter
from .vtkXMLUnstructuredGridReader import vtkXMLUnstructuredGridReader
from .vtkXMLUnstructuredGridWriter import vtkXMLUnstructuredGridWriter
__loader__ = ...
__spec__ = ...
| [
"[email protected]"
] | |
ee9cee9c908ac3278c8545a66f4d96149faae702 | 7ce05272d21c903abc85ebc74544009aacd80c82 | /Advance_Python/Python_Database_Programming/Other/add_user_in_bank.py | 5c2f984a2d9a435280b32ffcf34ffcf45b74ed87 | [] | no_license | sachinyadav3496/PythonInternBatch2018 | 8899a866f60a39b4c7eff4f5bc79ec2586833403 | 8e2610ad80c39ea747e8a6547ebe540e7b019a79 | refs/heads/master | 2021-06-26T09:18:58.178457 | 2020-10-03T09:49:32 | 2020-10-03T09:49:32 | 136,880,809 | 18 | 34 | null | 2020-10-03T09:49:33 | 2018-06-11T05:56:26 | Jupyter Notebook | UTF-8 | Python | false | false | 548 | py | import pymysql as sql
db = sql.connect(host='localhost',port=3306,user='bank_app',password='redhat',database='bank_app')
c = db.cursor()
f = open('bank_data.csv')
data = []
for line in f :
d = line.split(',')
d[2] = float(d[2][:-1])
data.append(d)
f.close()
for var in data :
name = var[0]
password = var[1]
bal = var[2]
cmd = "insert into bank(user,password,bal) values('{}','{}',{})".format(name,password,bal)
c.execute(cmd)
db.commit()
print("Added data to bank sucessfully")
c.close()
db.close()
| [
"[email protected]"
] | |
073b6b152e0805dbc16dce1d402482e505bd9770 | 9d5723c09148cc353e5339a706ba582a162dceec | /hunkim/lab12-5.py | 822c653469deeadaddde45a16a92e53b9bc3eaab | [] | no_license | SilverQ/dl_study | 424bce279c059c290a4c766e87fadb150fff82da | 663b432abc5afd0eed278368a5fea19ece6a383c | refs/heads/master | 2022-11-14T08:27:10.937535 | 2020-07-02T10:05:04 | 2020-07-02T10:05:04 | 82,505,280 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,120 | py | '''
The original script shows how to predict the next day's closing stock prices using a basic RNN
https://github.com/hunkim/DeepLearningZeroToAll/blob/master/lab-12-5-rnn_stock_prediction.py
At first, let's understand the original code and prior arts completely
'''
import tensorflow as tf
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import os
tf.set_random_seed(777) # reproducibility
np.set_printoptions(precision=2)
if "DISPLAY" not in os.environ:
# remove Travis CI Error
matplotlib.use('Agg')
def MinMaxScaler(data):
'''
Min Max Normalization
Parameters
----------
data : numpy.ndarray
input data to be normalized
shape: [Batch size, dimension]
Returns
----------
data : numpy.ndarry
normalized data
shape: [Batch size, dimension]
References
----------
.. [1] http://sebastianraschka.com/Articles/2014_about_feature_scaling.html
원래는 normalized data만 반환하였으나, 데이터의 복구를 위해 min, max도 반환
'''
numerator = data - np.min(data, 0)
denominator = np.max(data, 0) - np.min(data, 0)
# noise term prevents the zero division
return [numerator / (denominator + 1e-7), np.min(data, 0), np.max(data, 0)]
# train Parameters
seq_length = 7
data_dim = 5
hidden_dim = 10
output_dim = 1
learning_rate = 0.01
iterations = 500
# Open, High, Low, Volume, Close
xy = np.loadtxt('data-02-stock_daily.csv', delimiter=',')
xy_rev = xy[::-1] # reverse order (chronically ordered), 날짜 오름차순으로.
'''
print('xy: ', xy[-3:])
xy: [[ 566.89 567. 556.93 10800. 556.97]
[ 561.2 566.43 558.67 41200. 559.99]
[ 568. 568. 552.92 13100. 558.46]]
print('xy_rev: ', xy_rev[:3])
xy: [[ 568. 568. 552.92 13100. 558.46]
[ 561.2 566.43 558.67 41200. 559.99]
[ 566.89 567. 556.93 10800. 556.97]]
'''
# split data to train_set/test_set and Scaling
train_size = int(len(xy_rev) * 0.7)
train_set = xy_rev[0:train_size]
test_set = xy_rev[train_size - seq_length:] # Index from [train_size - seq_length] to utilize past sequence
[train_set, min, max] = MinMaxScaler(train_set)
[test_set, min, max] = MinMaxScaler(test_set)
'''
print('train_set: ', train_set[:3])
print('min: ', min) # 컬럼별로 min-max 연산은 따로따로 한 것을 알 수 있음.!!!
train_set: [[0.25 0.25 0.23 0. 0.23]
[0.23 0.24 0.25 0. 0.24]
[0.25 0.24 0.25 0. 0.23]]
min: [ 494.65 495.98 487.56 7900. 492.55]
'''
# build datasets. Create batch for 7-days.
def build_dataset(time_series, seq_length):
dataX = []
dataY = []
for i in range(0, len(time_series) - seq_length):
_x = time_series[i:i + seq_length, :]
_y = time_series[i + seq_length, [-1]] # the next day's closing stock prices
# print(_x, "->", _y)
dataX.append(_x)
dataY.append(_y)
return np.array(dataX), np.array(dataY)
trainX, trainY = build_dataset(train_set, seq_length)
testX, testY = build_dataset(test_set, seq_length)
'''
print('trainX: ', trainX[:4])
print('trainY: ', trainY[:3])
trainX: [[[2.53e-01 2.45e-01 2.34e-01 4.66e-04 2.32e-01]
[2.30e-01 2.40e-01 2.55e-01 2.98e-03 2.37e-01]
[2.49e-01 2.42e-01 2.48e-01 2.60e-04 2.27e-01]
[2.21e-01 2.47e-01 2.55e-01 0.00e+00 2.63e-01]
[3.63e-01 3.70e-01 2.67e-01 1.25e-02 2.62e-01]
[2.59e-01 3.11e-01 2.74e-01 4.56e-01 2.72e-01]
[2.76e-01 2.78e-01 1.98e-01 5.70e-01 1.78e-01]]
[[2.30e-01 2.40e-01 2.55e-01 2.98e-03 2.37e-01]
[2.49e-01 2.42e-01 2.48e-01 2.60e-04 2.27e-01]
[2.21e-01 2.47e-01 2.55e-01 0.00e+00 2.63e-01]
[3.63e-01 3.70e-01 2.67e-01 1.25e-02 2.62e-01]
[2.59e-01 3.11e-01 2.74e-01 4.56e-01 2.72e-01]
[2.76e-01 2.78e-01 1.98e-01 5.70e-01 1.78e-01]
[1.59e-01 1.79e-01 1.42e-01 3.94e-01 1.61e-01]]
[[2.49e-01 2.42e-01 2.48e-01 2.60e-04 2.27e-01]
[2.21e-01 2.47e-01 2.55e-01 0.00e+00 2.63e-01]
[3.63e-01 3.70e-01 2.67e-01 1.25e-02 2.62e-01]
[2.59e-01 3.11e-01 2.74e-01 4.56e-01 2.72e-01]
[2.76e-01 2.78e-01 1.98e-01 5.70e-01 1.78e-01]
[1.59e-01 1.79e-01 1.42e-01 3.94e-01 1.61e-01]
[1.65e-01 2.01e-01 1.93e-01 2.82e-01 2.20e-01]]
[[2.21e-01 2.47e-01 2.55e-01 0.00e+00 2.63e-01]
[3.63e-01 3.70e-01 2.67e-01 1.25e-02 2.62e-01]
[2.59e-01 3.11e-01 2.74e-01 4.56e-01 2.72e-01]
[2.76e-01 2.78e-01 1.98e-01 5.70e-01 1.78e-01]
[1.59e-01 1.79e-01 1.42e-01 3.94e-01 1.61e-01]
[1.65e-01 2.01e-01 1.93e-01 2.82e-01 2.20e-01]
[2.24e-01 2.36e-01 2.34e-01 2.98e-01 2.52e-01]]]
trainY: [[0.16]
[0.22]
[0.25]]
'''
# input place holders
X = tf.placeholder(tf.float32, [None, seq_length, data_dim])
Y = tf.placeholder(tf.float32, [None, 1])
# build a LSTM network
cell = tf.contrib.rnn.BasicLSTMCell(num_units=hidden_dim, state_is_tuple=True, activation=tf.tanh)
outputs, _states = tf.nn.dynamic_rnn(cell, X, dtype=tf.float32)
Y_pred = tf.contrib.layers.fully_connected(
outputs[:, -1], output_dim, activation_fn=None) # We use the last cell's output
# cost/loss
loss = tf.reduce_sum(tf.square(Y_pred - Y)) # sum of the squares
# optimizer
optimizer = tf.train.AdamOptimizer(learning_rate)
train = optimizer.minimize(loss)
# RMSE
targets = tf.placeholder(tf.float32, [None, 1])
predictions = tf.placeholder(tf.float32, [None, 1])
rmse = tf.sqrt(tf.reduce_mean(tf.square(targets - predictions)))
with tf.Session() as sess:
init = tf.global_variables_initializer()
sess.run(init)
# Training step
for i in range(iterations):
_, step_loss = sess.run([train, loss], feed_dict={
X: trainX, Y: trainY})
if i % 100 ==0:
print("[step: {}] loss: {}".format(i, step_loss))
# Test step
test_predict = sess.run(Y_pred, feed_dict={X: testX})
rmse_val = sess.run(rmse, feed_dict={
targets: testY, predictions: test_predict})
print("RMSE: {}".format(rmse_val))
# Plot predictions
plt.plot(testY)
plt.plot(test_predict)
plt.xlabel("Time Period")
plt.ylabel("Stock Price")
# plt.show()
plt.savefig('Stock_price.png')
| [
"[email protected]"
] | |
36cc5a4f3b24b18cffd92d7046c81ced4ac397e1 | 75089d61a7f985fc23a3e29e6517a744c1b5c76b | /data/__init__.py | ce8ecdf19bf474e1e68babbf0bdf5d154c34d3d5 | [] | no_license | KUR-creative/data-warehouse | e4e85ffa7cd0ec416bb67c62aef624bca6323370 | e1e4720f8ad529291f2c36b1c284a6e4b15ac637 | refs/heads/master | 2023-01-21T14:21:54.533736 | 2020-12-03T06:34:20 | 2020-12-03T06:34:20 | 288,881,020 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27 | py | from . import (
raw,
)
| [
"[email protected]"
] | |
1f5c2bd6304a0c9d074d8c9541a0eb810b6bf790 | 45006b78675997765c2248ce2944aa24c9cd3787 | /tf_agents/bandits/policies/policy_utilities.py | 9d39be6fd7480a0e6372bfa270c35c47d4db578c | [
"Apache-2.0"
] | permissive | ymodak/agents | 43d2105965db763b07233139c0b87896c956547a | a6ab65605a6910cb3130a500614d006c9271157b | refs/heads/master | 2022-09-24T03:47:05.815845 | 2020-05-18T18:02:40 | 2020-05-18T18:03:03 | 265,031,865 | 0 | 0 | Apache-2.0 | 2020-05-18T18:50:52 | 2020-05-18T18:50:51 | null | UTF-8 | Python | false | false | 8,891 | py | # coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for bandit policies."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import policy_step
from tf_agents.utils import common
class InfoFields(object):
"""Strings which can be used in the policy info fields."""
# Mean of predicted rewards (per arm).
PREDICTED_REWARDS_MEAN = 'predicted_rewards_mean'
# Samples of predicted rewards (per arm).
PREDICTED_REWARDS_SAMPLED = 'predicted_rewards_sampled'
# Type of bandit policy (see enumerations in `BanditPolicyType`).
BANDIT_POLICY_TYPE = 'bandit_policy_type'
# Used to store the chosen action for a per-arm model.
CHOSEN_ARM_FEATURES = 'chosen_arm_features'
PolicyInfo = collections.namedtuple( # pylint: disable=invalid-name
'PolicyInfo',
(policy_step.CommonFields.LOG_PROBABILITY,
InfoFields.PREDICTED_REWARDS_MEAN,
InfoFields.PREDICTED_REWARDS_SAMPLED,
InfoFields.BANDIT_POLICY_TYPE))
# Set default empty tuple for all fields.
PolicyInfo.__new__.__defaults__ = ((),) * len(PolicyInfo._fields)
PerArmPolicyInfo = collections.namedtuple( # pylint: disable=invalid-name
'PerArmPolicyInfo',
(policy_step.CommonFields.LOG_PROBABILITY,
InfoFields.PREDICTED_REWARDS_MEAN,
InfoFields.PREDICTED_REWARDS_SAMPLED,
InfoFields.BANDIT_POLICY_TYPE,
InfoFields.CHOSEN_ARM_FEATURES))
# Set default empty tuple for all fields.
PerArmPolicyInfo.__new__.__defaults__ = ((),) * len(PerArmPolicyInfo._fields)
def populate_policy_info(arm_observations, chosen_actions, rewards_for_argmax,
est_rewards, emit_policy_info,
accepts_per_arm_features):
"""Populates policy info given all needed input.
Args:
arm_observations: In case the policy accepts per-arm feautures, this is a
Tensor with the per-arm features. Otherwise its value is unused.
chosen_actions: A Tensor with the indices of the chosen actions.
rewards_for_argmax: The sampled or optimistically boosted reward estimates
based on which the policy chooses the action greedily.
est_rewards: A Tensor with the rewards estimated by the model.
emit_policy_info: A set of policy info keys, specifying wich info fields to
populate
accepts_per_arm_features: (bool) Whether the policy accepts per-arm
features.
Returns:
A policy info.
"""
if accepts_per_arm_features:
# Saving the features for the chosen action to the policy_info.
chosen_arm_features = tf.gather(
params=arm_observations, indices=chosen_actions, batch_dims=1)
policy_info = PerArmPolicyInfo(
predicted_rewards_sampled=(
rewards_for_argmax if
InfoFields.PREDICTED_REWARDS_SAMPLED in emit_policy_info else ()),
predicted_rewards_mean=(
est_rewards
if InfoFields.PREDICTED_REWARDS_MEAN in emit_policy_info else ()),
chosen_arm_features=chosen_arm_features)
else:
policy_info = PolicyInfo(
predicted_rewards_sampled=(
rewards_for_argmax if
InfoFields.PREDICTED_REWARDS_SAMPLED in emit_policy_info else ()),
predicted_rewards_mean=(
est_rewards
if InfoFields.PREDICTED_REWARDS_MEAN in emit_policy_info else ()))
return policy_info
class BanditPolicyType(object):
"""Enumeration of bandit policy types."""
# No bandit policy type specified.
UNKNOWN = 0
# Greedy decision made by bandit agent.
GREEDY = 1
# Random decision for exploration made by epsilon-greedy agent sampled from
# uniform distribution over actions.
UNIFORM = 2
def create_bandit_policy_type_tensor_spec(shape):
"""Create tensor spec for bandit policy type."""
return tensor_spec.BoundedTensorSpec(
shape=shape, dtype=tf.int32,
minimum=BanditPolicyType.UNKNOWN, maximum=BanditPolicyType.UNIFORM)
@common.function
def masked_argmax(input_tensor, mask, output_type=tf.int32):
"""Computes the argmax where the allowed elements are given by a mask.
If a row of `mask` contains all zeros, then this method will return -1 for the
corresponding row of `input_tensor`.
Args:
input_tensor: Rank-2 Tensor of floats.
mask: 0-1 valued Tensor of the same shape as input.
output_type: Integer type of the output.
Returns:
A Tensor of rank 1 and type `output_type`, with the masked argmax of every
row of `input_tensor`.
"""
input_tensor.shape.assert_is_compatible_with(mask.shape)
neg_inf = tf.constant(-float('Inf'), input_tensor.dtype)
modified_input = tf.compat.v2.where(
tf.cast(mask, tf.bool), input_tensor, neg_inf)
argmax_tensor = tf.argmax(modified_input, axis=-1, output_type=output_type)
# Replace results for invalid mask rows with -1.
reduce_mask = tf.cast(tf.reduce_max(mask, axis=1), tf.bool)
neg_one = tf.constant(-1, output_type)
return tf.compat.v2.where(reduce_mask, argmax_tensor, neg_one)
def has_bandit_policy_type(info, check_for_tensor=False):
"""Check if policy info has `bandit_policy_type` field/tensor."""
if info in ((), None):
return False
fields = getattr(info, '_fields', None)
has_field = fields is not None and InfoFields.BANDIT_POLICY_TYPE in fields
if has_field and check_for_tensor:
return isinstance(info.bandit_policy_type, tf.Tensor)
else:
return has_field
def set_bandit_policy_type(info, bandit_policy_type):
"""Sets the InfoFields.BANDIT_POLICY_TYPE on info to bandit_policy_type.
If policy `info` does not support InfoFields.BANDIT_POLICY_TYPE, this method
returns `info` as-is (without any modification).
Args:
info: Policy info on which to set bandit policy type.
bandit_policy_type: Tensor containing BanditPolicyType enums or TensorSpec
from `create_bandit_policy_type_tensor_spec()`.
Returns:
Policy info with modified field (if possible).
"""
if info in ((), None):
return PolicyInfo(bandit_policy_type=bandit_policy_type)
fields = getattr(info, '_fields', None)
if fields is not None and InfoFields.BANDIT_POLICY_TYPE in fields:
return info._replace(bandit_policy_type=bandit_policy_type)
try:
info[InfoFields.BANDIT_POLICY_TYPE] = bandit_policy_type
except TypeError:
pass
return info
@common.function
def bandit_policy_uniform_mask(values, mask):
"""Set bandit policy type tensor to BanditPolicyType.UNIFORM based on mask.
Set bandit policy type `values` to BanditPolicyType.UNIFORM; returns tensor
where output[i] is BanditPolicyType.UNIFORM if mask[i] is True, otherwise it
is left as values[i].
Args:
values: Tensor containing `BanditPolicyType` enumerations.
mask: Tensor of the same shape as `values` with boolean flags indicating
values to set to `BanditPolicyType.UNIFORM`.
Returns:
Tensor containing `BanditPolicyType` enumerations with masked values.
"""
return tf.where(
mask, tf.fill(tf.shape(values), BanditPolicyType.UNIFORM), values)
def get_model_index(arm_index, accepts_per_arm_features):
"""Returns the model index for a specific arm.
The number of models depends on the observation format: If the policy accepts
per-arm features, there is only one single model used for every arm. Otherwise
there is a model for every arm.
Args:
arm_index: The index of the arm for which the model index is needed.
accepts_per_arm_features: (bool) Whether the policy works with per-arm
features.
Returns:
The index of the model for the arm requested.
"""
return 0 if accepts_per_arm_features else arm_index
def compute_feasibility_probability(observation, constraints, batch_size,
num_actions, action_mask=None):
"""Helper function to compute the action feasibility probability."""
feasibility_prob = tf.ones([batch_size, num_actions])
if action_mask is not None:
feasibility_prob = tf.cast(action_mask, tf.float32)
for c in constraints:
# We assume the constraints are independent.
action_feasibility = c.compute_action_feasibility(observation)
feasibility_prob *= action_feasibility
return feasibility_prob
| [
"[email protected]"
] | |
f5b2ea2f20edbedb90a3351960045e897c52f2c3 | db98aeb4883d2aa9969970d353b9d6212c7dbde2 | /lectures/07-python-dictionaries/examples/dna9.py | f71e08b870a9f5ce84946e6c88096ad74de04bfa | [
"MIT"
] | permissive | qianwenluo/biosys-analytics | cec7e84477e01f9aa17e30c1fd8286710deed617 | f936095931fa8f237de8bdf058b960db86effa49 | refs/heads/master | 2020-04-15T20:19:25.669143 | 2019-05-07T17:52:17 | 2019-05-07T17:52:17 | 164,988,099 | 0 | 1 | MIT | 2019-01-10T04:12:20 | 2019-01-10T04:12:20 | null | UTF-8 | Python | false | false | 478 | py | #!/usr/bin/env python3
"""Tetra-nucleotide counter"""
import sys
import os
from collections import defaultdict
args = sys.argv[1:]
if len(args) != 1:
print('Usage: {} DNA'.format(os.path.basename(sys.argv[0])))
sys.exit(1)
arg = args[0]
dna = ''
if os.path.isfile(arg):
dna = ''.join(open(arg).read().splitlines())
else:
dna = arg
count = defaultdict(int)
for base in dna.lower():
count[base] += 1
print(' '.join(map(lambda b: str(count[b]), "acgt")))
| [
"[email protected]"
] | |
908d6b9bdd11c832f27b876675752c230f0dd8e9 | 901bfc797cc369c0bea21167ac471d0311cb93ac | /e3/DiffieHellman.py | bf11a36e45a5949541a91c675a66430dd0b9b984 | [
"MIT"
] | permissive | NigrumAquila/subject_cryptographic_protection | 022216fd1481febc3a010efdfd11ab3398c73d00 | 2b4015b3c1b6d57391e866a70d308e78e5cab719 | refs/heads/master | 2021-03-17T15:58:10.590822 | 2020-05-01T06:30:54 | 2020-05-01T06:30:54 | 247,001,657 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 858 | py | import __main__
if __main__.__file__ != 'main.py':
exit('run main.py')
from .DHlib.DHalg import encrypt, decrypt, getSharedSecret, printAllKeys, printParams
from lib.colors import *
from lib.duty import *
key = getSharedSecret()
printAllKeys()
while True:
printParams();
message = typedText('Enter message for RSA encryption: ')
printTextAndValue('Original message: ', message)
encrypted_message = encrypt(key, message)
try:
printTextAndValue('Encrypted message: ', encrypted_message)
except UnicodeError:
warning('\rYour encoding isn\'t UTF-8')
end('Please, restart it with "PYTHONIOENCODING=UTF-8 python main.py" or by IDE with utf8 encoding')
decrypted_message = decrypt(key, encrypted_message)
printTextAndValue('Decrypted message: ', decrypted_message)
repeatProcedure() | [
"[email protected]"
] | |
2ed5006395d6e55cc012484b9d82f09f074e11cf | 8fc2ab3d29a30e603e19b30bb9517928de529167 | /hackerank_whatsnext.py | 2d44363c21597514612dc972cc035e6441f66752 | [] | no_license | rushilchugh/Practise | 35a9861bec6786580dc0a440eb25d78e43cb7bc9 | 98fd593b95dad641bef1d519c6c6ed1daaae630f | refs/heads/master | 2020-03-13T21:14:14.013604 | 2018-04-27T12:23:50 | 2018-04-27T12:23:50 | 131,291,684 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,765 | py | __author__ = 'Rushil'
#SetCount(x) - Number of ones in an binary number x
#Johnny wants to find a binary number, D, that is the smallest binary number >B where setCount(B) = setCount(D)
#He then wants to compress D into an array of integers,C (in the same way that integer array A contains the compressed form of binary string B).
#Values in even represents consecutive 1
#Values in odd represents consecutive 0
from itertools import groupby
import re
#Given input 4 1 3 2 4
def get_bin_rep(num):
inp_text = num.replace(' ','')
f_str = ''
for index,char in enumerate(inp_text):
if index % 2 == 0:
f_str += '1'*int(char)
else:
f_str += '0'*int(char)
return f_str
def get_other_bin(bin_num):
occ_0 = 0
bin_num = list(bin_num)
if bin_num[-1] == '0':
f1_index = ''.join(bin_num).rfind('1')
bin_num[-1] = '1'
bin_num[f1_index] = '0'
return ''.join(bin_num)
for index,i in enumerate(bin_num):
if i == '0':
occ_0 = index
bin_num[occ_0] = '1'
bin_num[occ_0 + 1] = '0'
return ''.join(bin_num)
def make_rep(bin_num):
#11110111010111
f_str = ''
for i,j in groupby(bin_num):
f_str += str(len(list(j)))
f_str += ' '
return f_str
#
#print(get_other_bin('11110111001111'))
#print(make_rep('11110111001111'))
#print(make_rep(get_other_bin(get_bin_rep('4 1 3 2 4'))))
n = int(input().strip())
m_list = []
for i in range(n):
w_len = input().strip()
m_word = input().strip()
m_list.append(m_word)
for i in m_list:
f_sol = make_rep(get_other_bin(get_bin_rep(i)))
print(len(f_sol))
print(f_sol)
| [
"[email protected]"
] | |
2ae309ab7516c2e17c6d104bf77aa92bce5dbd7d | 26e91aead18d0fad6f5ce8fc4adf7d8e05a2f07f | /byceps/services/board/models/topic.py | ddc5451016c5f326ba92595817d09bd24677a035 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | leathe/byceps | 40c1f8a1aab3521fcac45d88eab6364d448d4e67 | cd0c618af63fed1cd7006bb67da46eac0ddbb1c7 | refs/heads/master | 2020-12-02T09:02:51.087511 | 2019-12-14T17:00:22 | 2019-12-14T17:00:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,529 | py | """
byceps.services.board.models.topic
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import datetime
from sqlalchemy.ext.associationproxy import association_proxy
from ....blueprints.board.authorization import (
BoardPermission,
BoardTopicPermission,
)
from ....database import BaseQuery, db, generate_uuid
from ....typing import UserID
from ....util.instances import ReprBuilder
from ...authentication.session.models.current_user import CurrentUser
from ...user.models.user import User
from ..transfer.models import CategoryID
from .category import Category
class TopicQuery(BaseQuery):
def for_category(self, category_id: CategoryID) -> BaseQuery:
return self.filter_by(category_id=category_id)
def only_visible_for_user(self, user: CurrentUser) -> BaseQuery:
"""Only return topics the user may see."""
if not user.has_permission(BoardPermission.view_hidden):
return self.without_hidden()
return self
def without_hidden(self) -> BaseQuery:
"""Only return topics every user may see."""
return self.filter(Topic.hidden == False)
class Topic(db.Model):
"""A topic."""
__tablename__ = 'board_topics'
query_class = TopicQuery
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
category_id = db.Column(db.Uuid, db.ForeignKey('board_categories.id'), index=True, nullable=False)
category = db.relationship(Category)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
creator_id = db.Column(db.Uuid, db.ForeignKey('users.id'), nullable=False)
title = db.Column(db.UnicodeText, nullable=False)
posting_count = db.Column(db.Integer, default=0, nullable=False)
last_updated_at = db.Column(db.DateTime, default=datetime.utcnow)
last_updated_by_id = db.Column(db.Uuid, db.ForeignKey('users.id'))
last_updated_by = db.relationship(User, foreign_keys=[last_updated_by_id])
hidden = db.Column(db.Boolean, default=False, nullable=False)
hidden_at = db.Column(db.DateTime)
hidden_by_id = db.Column(db.Uuid, db.ForeignKey('users.id'))
hidden_by = db.relationship(User, foreign_keys=[hidden_by_id])
locked = db.Column(db.Boolean, default=False, nullable=False)
locked_at = db.Column(db.DateTime)
locked_by_id = db.Column(db.Uuid, db.ForeignKey('users.id'))
locked_by = db.relationship(User, foreign_keys=[locked_by_id])
pinned = db.Column(db.Boolean, default=False, nullable=False)
pinned_at = db.Column(db.DateTime)
pinned_by_id = db.Column(db.Uuid, db.ForeignKey('users.id'))
pinned_by = db.relationship(User, foreign_keys=[pinned_by_id])
initial_posting = association_proxy('initial_topic_posting_association', 'posting')
posting_limited_to_moderators = db.Column(db.Boolean, default=False, nullable=False)
def __init__(
self, category_id: CategoryID, creator_id: UserID, title: str
) -> None:
self.category_id = category_id
self.creator_id = creator_id
self.title = title
def may_be_updated_by_user(self, user: CurrentUser) -> bool:
return (
(
not self.locked
and user.id == self.creator_id
and user.has_permission(BoardTopicPermission.update)
)
or user.has_permission(BoardPermission.update_of_others)
)
@property
def reply_count(self) -> int:
return self.posting_count - 1
def count_pages(self, postings_per_page: int) -> int:
"""Return the number of pages this topic spans."""
full_page_count, remaining_postings = divmod(
self.posting_count, postings_per_page
)
if remaining_postings > 0:
return full_page_count + 1
else:
return full_page_count
def __eq__(self, other) -> bool:
return self.id == other.id
def __repr__(self) -> str:
builder = ReprBuilder(self) \
.add_with_lookup('id') \
.add('category', self.category.title) \
.add_with_lookup('title')
if self.hidden:
builder.add_custom(f'hidden by {self.hidden_by.screen_name}')
if self.locked:
builder.add_custom(f'locked by {self.locked_by.screen_name}')
if self.pinned:
builder.add_custom(f'pinned by {self.pinned_by.screen_name}')
return builder.build()
| [
"[email protected]"
] | |
a4315be2234838908f42b3d2d0d042647f384c92 | a80884040ce1c178274a3068d216f440dd541844 | /tests/regression/test_tee_map_completion.py | 07635a9b627669bb5320d9242fc4ef3be123bf53 | [
"MIT"
] | permissive | maki-nage/rxsci | a4aae51edc1ef684b55df22e34c11aa1d54ef740 | 915e59ebf593c4b313265bb87cf0e1209ec2ee0f | refs/heads/master | 2023-01-19T14:32:11.638497 | 2023-01-17T08:06:35 | 2023-01-17T08:06:35 | 242,592,973 | 9 | 2 | MIT | 2022-11-08T21:54:16 | 2020-02-23T21:23:56 | Python | UTF-8 | Python | false | false | 464 | py | import rx
import rx.operators as ops
import rxsci as rs
def test_completion():
data = [1, 2, 3]
actual_data = []
actual_completed = []
rx.from_(data).pipe(
rs.ops.tee_map(
ops.count(),
rs.math.sum(reduce=True),
)
).subscribe(
on_next=actual_data.append,
on_completed=lambda: actual_completed.append(True)
)
assert actual_completed == [True]
assert actual_data == [(3, 6)]
| [
"[email protected]"
] | |
28d7d37773b000b74a0651e75715b1992064c925 | 5d5f6ba3bdcb52b4750a5f28afa8a1a1019bfc9e | /python_basics/python_fundamentals/functionsIntermediate/functionsIntermediate1.py | d4c0b59e7de7fe8d6ba8eb493267361224b8c5de | [] | no_license | eDiazGtz/pythonLearning | 06e96f2f5a6e48ac314cb815cf9fbf65d0b7c2c8 | 57d7b2292cf5d9769cce9adf765962c3c0930d6c | refs/heads/master | 2023-06-18T02:16:09.293375 | 2021-05-03T18:09:52 | 2021-05-03T18:09:52 | 335,090,531 | 0 | 0 | null | 2021-05-03T18:09:53 | 2021-02-01T21:35:24 | Python | UTF-8 | Python | false | false | 1,200 | py | import random
# random.random() returns a random floating number between 0.000 and 1.000
# random.random() * 50 returns a random floating number between 0.000 and 50.000
# random.random() * 25 + 10 returns a random floating number between 10.000 and 35.000
# round(num) returns the rounded integer value of num 0.5 round up
#print(randInt()) # should print a random integer between 0 to 100
#print(randInt(max=50)) # should print a random integer between 0 to 50
#print(randInt(min=50)) # should print a random integer between 50 to 100
#print(randInt(min=50, max=500)) # should print a random integer between 50 and 500
def randInt(min=0, max=100):
range = max - min
if(range < 0):
return "Min must be less than Max; Max must be greater than 0"
num = round(random.random() * range + min)
return num
print(randInt()) # should print a random integer between 0 to 100
print(randInt(max=50)) # should print a random integer between 0 to 50
print(randInt(min=50)) # should print a random integer between 50 to 100
print(randInt(min=50, max=500)) # should print a random integer between 50 and 500
| [
"[email protected]"
] | |
4af6502a710b7d7100d9f5e384a09128caf93cb0 | 576cc83449e10fd3f98281970c46016ea7a5aea2 | /Cameo/filter.py | 90ff87c95582c437a4e823c03071524feb09e16e | [] | no_license | HotView/PycharmProjects | 215ab9edd341e3293daebcf86d97537f8cd28d75 | 61393fe5ba781a8c1216a5cbe7e0d06149a10190 | refs/heads/master | 2020-06-02T07:41:53.608742 | 2019-11-13T08:31:57 | 2019-11-13T08:31:57 | 191,085,178 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 46 | py | import cv2
import numpy as np
import dateutil
| [
"[email protected]"
] | |
c8770ff0014c49e8aef32a4df572380d038204df | 23f3349e8b50f0cb3e461bbd65c1ea8dec792d0b | /2_semestr/lec_05.py | 683d1a2c6fabbc138893e0adaa2d19cb1db944a8 | [] | no_license | JacobLutin/Python_bmstu | d17866dbab0e74f0f9d600c4dbd9d53eb5c5b7be | 66fd8679de7556978b9cd1e9fd8646a8d7d6daa8 | refs/heads/master | 2020-05-29T14:40:09.310602 | 2017-03-27T05:18:58 | 2017-03-27T05:18:58 | 64,742,311 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,240 | py | import numpy as np
a = np.arange(12)
a1 = np.copy(a)
print("Исходная матрицы")
a2 = np.reshape(a1, (3, 4))
print(a2, '\n')
a2 = a2.T
print("Транспонированная матрица")
print(a2, '\n')
#min, max, sum, сортировка
b = np.array([[2, 8, 0], [6, 1, 3], [4, 7, 5]])
print("Новая исходная матрица\n", b, '\n')
dsum = b.sum()
dmin = b.min()
dmax = b.max()
print('Некоторые значения для всей матрицы')
print('sum=', dsum, ' min=', dmin, ' max=', dmax, '\n')
mincol = b.min(axis=0)
maxrow = b.max(axis=1)
print('Значения min и max для столбцов и строк')
print('min в столбцах = ', mincol, ' max в строках = ', maxrow, '\n')
# Функция sort описание
# sort(axis=-1, kind='quicksort', order=None)
# axis - ось, по которой идет сортировка.
# kind - тпи сортировки. Возможные значения 'quicksort', 'mergesort', 'heapsort'
c = b.copy()
c.sort(axis=0, kind='mergesort')
print('Сортировка столбцов\n', c)
print()
c = b.copy()
c.sort(axis=1, kind='mergesort')
print('Сортировка строк\n', c)
print()
| [
"[email protected]"
] | |
aa0c4ed9935283356909351e267db22a24e3bf0b | 913e24ea110f839c73363bc1aac9673e561fa5f8 | /gym_environments/widowx_env/envs/v41_widowx_pybullet_env_fixedGoal_actionStepCoeff25_noBounds_1joint_reward16.py | 08772e2ee4242328bac40c5c52cdb93a98e9dff2 | [
"MIT"
] | permissive | PierreExeter/WidowX-reacher | 24e2b3f72e9aec24a9a61e6a8958c200e0dbe893 | 560c6779dc91a887191f344c43de24926ba75b4d | refs/heads/master | 2023-03-06T13:48:12.810858 | 2021-02-22T15:36:52 | 2021-02-22T15:36:52 | 264,480,232 | 4 | 0 | MIT | 2021-02-22T15:27:44 | 2020-05-16T16:36:53 | Python | UTF-8 | Python | false | false | 17,140 | py | import gym
from gym import error, spaces, utils
from gym.utils import seeding
from numbers import Number
from collections import OrderedDict
import pybullet as p
import pybullet_data
import os
import numpy as np
import random
# ENVIRONMENT CONFIGURATION
NEUTRAL_VALUES = [0.015339807878856412, -1.4839419194602816,
1.4971652489763858, -0.008369006790373335, -0.08692557798018634, .027]
RESET_VALUES = [0.015339807878856412, -1.2931458041875956,
1.0109710760673565, -1.3537670644267164, -0.07158577010132992, .027]
# AFTER 60 timesteps, moving the joint 2 by 0.01, we reach this tip position: [0.13962698, 0.00214202, 0.31920969]
# action: 0.01
# obs: [ 0.13962698 0.00214202 0.31920969 -0.69314635]
# reward: -0.009033412729705663
# done: False
# info: {'total_distance': 0.0950442672111562, 'goal position': array([0.20422488, 0.00313302, 0.24949928]), 'tip position': array([0.13962698, 0.00214202, 0.31920969]), 'joint position': array([-0.69314635]), 'current_joint_pos': array([-0.70314634], dtype=float32), 'new_joint_pos': array([-0.69314635], dtype=float32), 'joint_vel': array([0.], dtype=float32)}
# timestep: 59
# MINIMUM ACHIEVABLE DISTANCE BY PYBULLET: 4.9471871525143285e-09 m. So it is possible to reach 5e-4 (smallest success ratio)
# action: 0.01
# obs: [ 0.13962698 0.00214202 0.31920969 -0.69314635]
# reward: -2.447466072200283e-17
# done: False
# info: {'total_distance': 4.9471871525143285e-09, 'goal position': array([0.13962698, 0.00214202, 0.31920969]), 'tip position': array([0.13962698, 0.00214202, 0.31920969]), 'joint position': array([-0.69314635]), 'current_joint_pos': array([-0.70314634], dtype=float32), 'new_joint_pos': array([-0.69314635], dtype=float32), 'joint_vel': array([0.], dtype=float32)}
# timestep: 59
# RL BOUNDS
BOUNDS_XMIN = -100
BOUNDS_XMAX = 100
BOUNDS_YMIN = -100
BOUNDS_YMAX = 100
BOUNDS_ZMIN = -100
BOUNDS_ZMAX = 100
# JOINT_MIN = np.array([
# -3.1,
# -1.571,
# -1.571,
# -1.745,
# -2.617,
# 0.003
# ])
# JOINT_MAX = np.array([
# 3.1,
# 1.571,
# 1.571,
# 1.745,
# 2.617,
# 0.03
# ])
# only use joint 2
JOINT_MIN = -1.571
JOINT_MAX = 1.571
JOINT_NAMES = ['joint_1', 'joint_2', 'joint_3',
'joint_4', 'joint_5', 'gripper_joint']
SIM_START_POSITION = np.array([-0.185033226409, 0.00128528, 0.46227163])
class WidowxEnv(gym.Env):
metadata = {'render.modes': ['human']}
def __init__(self):
"""
How to initialize this environment:
env = gym.make('replab-v0').start_rospy(goal_oriented=[GOAL_ORIENTED])
If goal_oriented is true, then the environment's observations become a dict
and the goal is randomly resampled upon every reset
params:
goal_oriented: Changes some aspects of the environment for goal-oriented tasks
rospy.init_node is set with random number so we can have multiple
nodes running at the same time.
self.goal is set to a fixed, randomly drawn goal if goal_oriented = False
"""
# self.obs_space_low = np.array(
# [-.16, -.15, 0.14, -3.1, -1.6, -1.6, -1.8, -3.1, 0])
# self.obs_space_high = np.array(
# [.16, .15, .41, 3.1, 1.6, 1.6, 1.8, 3.1, 0.05])
# observation_space = spaces.Box(
# low=self.obs_space_low, high=self.obs_space_high, dtype=np.float32)
# self.observation_space = observation_space
# pierre: reduce observation space
self.obs_space_low = np.array(
[-.16, -.15, 0.14, -1.6])
self.obs_space_high = np.array(
[.16, .15, .41, 1.6])
observation_space = spaces.Box(
low=self.obs_space_low, high=self.obs_space_high, dtype=np.float32)
self.observation_space = observation_space
# added by Pierre, normalize action space, cf https://stable-baselines.readthedocs.io/en/master/guide/rl_tips.html
# self.action_space = spaces.Box(low=np.array([-0.5, -0.25, -0.25, -0.25, -0.5, -0.005]) / 25,
# high=np.array([0.5, 0.25, 0.25, 0.25, 0.5, 0.005]) / 25, dtype=np.float32)
# changed by Pierre: only move joint 2
self.action_space = spaces.Box(low=np.array([-0.01]), high=np.array([0.01]), dtype=np.float32)
# PB: actions are too big and the robot moves too much
# self.action_space = spaces.Box(low=np.array([-1, -1, -1, -1, -1, -1]),
# high=np.array([1, 1, 1, 1, 1, 1]), dtype=np.float32)
self.current_pos = None
# self.goal = np.array([-.14, -.13, 0.26])
self.goal = np.array([0.13962698, 0.00214202, 0.31920969]) #[.14, .0, 0.26]) # added by Pierre: changed to feasible target by moving only joint 2
# self.set_goal(self.sample_goal_for_rollout())
# print("********goal is : ***********", self.goal)
self.start_sim(goal_oriented=False, render_bool=True)
# re-added by Pierre
def start_sim(self, goal_oriented=False, render_bool=False):
self.render_bool = render_bool
self.goal_oriented = goal_oriented
if self.render_bool:
self.physics_client = p.connect(p.GUI)
else:
self.physics_client = p.connect(p.DIRECT)
if self.goal_oriented:
self.observation_space = spaces.Dict(dict(
desired_goal=spaces.Box(low=np.array(
[-.16, -.15, 0.25]), high=np.array([.16, .15, 0.41]), dtype=np.float32),
achieved_goal=spaces.Box(low=self.obs_space_low[
:3], high=self.obs_space_high[:3], dtype=np.float32),
observation=self.observation_space
))
# p.resetSimulation()
# p.setTimeStep(0.01)
p.resetDebugVisualizerCamera(cameraDistance=0.6, cameraYaw=0, cameraPitch=-30, cameraTargetPosition=[
0.2, 0, 0.1], physicsClientId=self.physics_client) # added by Pierre
p.setAdditionalSearchPath(pybullet_data.getDataPath())
path = os.path.abspath(os.path.dirname(__file__))
self.arm = p.loadURDF(os.path.join(path, "URDFs/widowx/widowx.urdf"), useFixedBase=True)
self.sphere = p.loadURDF(os.path.join(path, "URDFs/sphere.urdf"),
useFixedBase=True) # added by Pierre
self.plane = p.loadURDF('plane.urdf') # added by Pierre
self.reset()
return self
# shared functions between both sim and robot mode
def sample_goal_for_rollout(self):
return np.random.uniform(low=np.array([-.14, -.13, 0.26]), high=np.array([.14, .13, .39]))
def set_goal(self, goal):
self.goal = goal
def step(self, action):
"""
Parameters
----------
action : [change in x, change in y, change in z]
Returns
-------
ob, reward, episode_over, info : tuple
ob (object) :
either current position or an observation object, depending on
the type of environment this is representing
reward (float) :
negative, squared, l2 distance between current position and
goal position
episode_over (bool) :
Whether or not we have reached the goal
info (dict) :
For now, all this does is keep track of the total distance from goal.
This is used for rlkit to get the final total distance after evaluation.
See function get_diagnostics for more info.
"""
action = np.array(action, dtype=np.float32)
self.my_action = action # added by Pierre
# modified by Pierre
self.joint_positions, self.joint_velocities = self._get_current_joint_positions()
self.new_joint_positions = self.joint_positions + action
self.new_joint_positions = np.clip(np.array(self.new_joint_positions), JOINT_MIN, JOINT_MAX)
self._force_joint_positions_training(self.new_joint_positions)
# joint_positions = self._get_current_joint_positions()
# new_joint_positions = joint_positions + action
# new_joint_positions = np.clip(np.array(new_joint_positions), JOINT_MIN, JOINT_MAX)
# self._force_joint_positions(new_joint_positions)
end_effector_pos = self._get_current_end_effector_position()
x, y, z = end_effector_pos[0], end_effector_pos[1], end_effector_pos[2]
conditions = [
x <= BOUNDS_XMAX,
x >= BOUNDS_XMIN,
y <= BOUNDS_YMAX,
y >= BOUNDS_YMIN,
z <= BOUNDS_ZMAX,
z >= BOUNDS_ZMIN
]
violated_boundary = False
for condition in conditions:
if not condition:
violated_boundary = True
break
if violated_boundary:
# if out of boundarie, don't update joint position
self._force_joint_positions_training(self.joint_positions)
self.current_pos = self._get_current_state()
return self._generate_step_tuple()
def _generate_step_tuple(self):
episode_over = False
self.total_distance_from_goal = np.linalg.norm(self.current_pos[:3] - self.goal) # np.sqrt(-reward)
reward = self._get_reward(self.goal)
# self.tip_vel = self._get_current_end_effector_velocity()
# added by Pierre
info = {}
info['total_distance'] = self.total_distance_from_goal
info['goal position'] = self.goal
info['tip position'] = self.current_pos[:3]
info['joint position'] = self.current_pos[3:]
info['current_joint_pos'] = self.joint_positions
info['new_joint_pos'] = self.new_joint_positions
info['joint_vel'] = self.joint_velocities
info['penalty'] = self.penalty
# info['tip_vel'] = self.tip_vel
# if reward > -0.0001:
# if total_distance_from_goal < 0.0005: # added by Pierre
# episode_over = True
if self.goal_oriented:
obs = self._get_obs()
return obs, reward, episode_over, info
return self.current_pos, reward, episode_over, info
def reset(self):
p.resetBasePositionAndOrientation(
self.arm, [0, 0, 0], p.getQuaternionFromEuler([np.pi, np.pi, np.pi]))
p.resetBasePositionAndOrientation(self.sphere, self.goal, p.getQuaternionFromEuler(
[np.pi, np.pi, np.pi])) # added by Pierre: move sphere to self.goal position
self._force_joint_positions(RESET_VALUES)
self.current_pos = self._get_current_state()
# commented by Pierre: don't re-sample new goal
if self.goal_oriented:
# self.set_goal(self.sample_goal_for_rollout())
return self._get_obs()
return self.current_pos
def _get_obs(self):
obs = {}
obs['observation'] = self.current_pos
obs['desired_goal'] = self.goal
obs['achieved_goal'] = self.current_pos[:3]
return obs
def sample_goals(self, num_goals):
sampled_goals = np.array(
[self.sample_goal_for_rollout() for i in range(num_goals)])
goals = {}
goals['desired_goal'] = sampled_goals
return goals
def _get_reward(self, goal):
self.beta = 10
self.penalty = self.beta * np.linalg.norm(self.my_action)
rew = - self.total_distance_from_goal - self.penalty
return rew
def render(self, mode='human', close=False):
pass
def compute_reward(self, achieved_goal, goal, info):
return - (np.linalg.norm(achieved_goal - goal)**2)
def get_diagnostics(self, paths):
"""
This adds the diagnostic "Final Total Distance" for RLkit
"""
def get_stat_in_paths(paths, dict_name, scalar_name):
if len(paths) == 0:
return np.array([[]])
if type(paths[0][dict_name]) == dict:
return [path[dict_name][scalar_name] for path in paths]
return [[info[scalar_name] for info in path[dict_name]] for path in paths]
def create_stats_ordered_dict(
name,
data,
stat_prefix=None,
always_show_all_stats=True,
exclude_max_min=False,
):
if stat_prefix is not None:
name = "{} {}".format(stat_prefix, name)
if isinstance(data, Number):
return OrderedDict({name: data})
if len(data) == 0:
return OrderedDict()
if isinstance(data, tuple):
ordered_dict = OrderedDict()
for number, d in enumerate(data):
sub_dict = create_stats_ordered_dict(
"{0}_{1}".format(name, number),
d,
)
ordered_dict.update(sub_dict)
return ordered_dict
if isinstance(data, list):
try:
iter(data[0])
except TypeError:
pass
else:
data = np.concatenate(data)
if (isinstance(data, np.ndarray) and data.size == 1
and not always_show_all_stats):
return OrderedDict({name: float(data)})
stats = OrderedDict([
(name + ' Mean', np.mean(data)),
(name + ' Std', np.std(data)),
])
if not exclude_max_min:
stats[name + ' Max'] = np.max(data)
stats[name + ' Min'] = np.min(data)
return stats
statistics = OrderedDict()
stat_name = 'total_distance'
stat = get_stat_in_paths(paths, 'env_infos', stat_name)
statistics.update(create_stats_ordered_dict('Final %s' % (stat_name), [
s[-1] for s in stat], always_show_all_stats=True,))
return statistics
# Functions only for sim mode
def _get_current_joint_positions(self):
# joint_positions = []
# joint_velocities = [] # added by Pierre
# for i in range(6):
# joint_positions.append(p.getJointState(self.arm, i)[0]) # check that's the joint angle
# joint_velocities.append(p.getJointState(self.arm, i)[1]) # added by Pierre
# return np.array(joint_positions, dtype=np.float32), np.array(joint_velocities, dtype=np.float32)
joint_positions = []
joint_velocities = [] # added by Pierre
# only return position of joint 2
joint_positions.append(p.getJointState(self.arm, 1)[0]) # check that's the joint angle
joint_velocities.append(p.getJointState(self.arm, 1)[1]) # added by Pierre
return np.array(joint_positions, dtype=np.float32), np.array(joint_velocities, dtype=np.float32)
def _get_current_end_effector_position(self):
real_position = np.array(list(p.getLinkState(self.arm, 5, computeForwardKinematics=1)[4]))
# real_position[2] = -real_position[2] #SIM z coordinates are reversed
# adjusted_position = real_position + SIM_START_POSITION
return real_position
# added by Pierre
def _get_current_end_effector_velocity(self):
real_vel = np.array(
list(p.getLinkState(self.arm, 5, computeLinkVelocity=1, computeForwardKinematics=1)[6]))
return real_vel
def _set_joint_positions(self, joint_positions):
# In SIM, gripper halves are controlled separately
joint_positions = list(joint_positions) + [joint_positions[-1]]
p.setJointMotorControlArray(
self.arm,
[0, 1, 2, 3, 4, 7, 8],
controlMode=p.POSITION_CONTROL,
targetPositions=joint_positions
)
# original function: only used at reset
def _force_joint_positions(self, joint_positions):
for i in range(5):
p.resetJointState(
self.arm,
i,
joint_positions[i]
)
for i in range(7, 9):
p.resetJointState(
self.arm,
i,
joint_positions[-1]
)
def _force_joint_positions_training(self, joint_positions):
p.resetJointState(
self.arm,
1,
joint_positions[0]
)
def _get_current_state(self):
return np.concatenate(
[self._get_current_end_effector_position(),
self._get_current_joint_positions()[0]],
axis=0)
# Functions for pickling
def __getstate__(self):
state = self.__dict__.copy()
return state
def __setstate__(self, state):
self.__dict__.update(state)
if state['render_bool']:
self.start_sim(goal_oriented=state['goal_oriented'], render_bool=False)
else:
self.start_sim(goal_oriented=state['goal_oriented'], render_bool=state['render_bool'])
self.reset()
| [
"[email protected]"
] | |
dcf32321584fe37884e0f4817db5a71e31b2c2c1 | b0c2f67b2878a312c6e6ffa5fe4158bd55dad69c | /chap4/exp4.1.py | 55fe517c4d14c71d71fb13cf69c53d6a324056ee | [] | no_license | loukey/pythonDemo | efda51be07beede0be2a8cdaae9b7e153bc790bc | 9e6f64908ccba64d32ffc58edbb8d6f8ab6bf68d | refs/heads/master | 2021-01-20T04:39:24.054749 | 2017-05-19T05:41:00 | 2017-05-19T05:41:00 | 89,711,134 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | # -*- coding: utf-8 -*-
#例4.1简单地打印数字
#很有趣的是,Python没有do..while以及while..until
#但是我们也可以强行实现
while True:
print 'Please enter a number:'
number = input()
print number
if number==0:
break
print 'List Ended'
| [
"[email protected]"
] | |
bac598ef7573c63aeb9e39fc13f67f09c8edb748 | 747f759311d404af31c0f80029e88098193f6269 | /addons/training_doc/training_doc.py | e31f2e4105271968edebb810aead63d83aef037f | [] | no_license | sgeerish/sirr_production | 9b0d0f7804a928c0c582ddb4ccb7fcc084469a18 | 1081f3a5ff8864a31b2dcd89406fac076a908e78 | refs/heads/master | 2020-05-19T07:21:37.047958 | 2013-09-15T13:03:36 | 2013-09-15T13:03:36 | 9,648,444 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 66 | py | /home/openerp/production/extra-addons/training_doc/training_doc.py | [
"[email protected]"
] | |
e8cb6b230d208935d065fedcf70f0c591e8ba666 | 8bdd86dd0ae6b6f7aae17ff0ef2d887afd06d2fa | /examples/sharecuts.py | e78cac870e69efa96b9030c63a0ef69e72d5fb6a | [
"MIT"
] | permissive | shuxiaokai/looter | b0504600e4d5730eff2aab27fbe19d2fd5fb1f18 | 2be094576e31fd13123719ca94e42cb31475dffa | refs/heads/master | 2023-04-18T01:19:51.827004 | 2020-05-17T08:11:28 | 2020-05-17T08:11:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 849 | py | """
捷径社区的捷径排行榜
"""
from pprint import pprint
import requests
import looter as lt
domain = 'https://sharecuts.cn'
total = []
def crawl(url):
items = requests.get(url, headers=lt.DEFAULT_HEADERS).json()
for item in items:
data = {}
data['name'] = item['name']
data['category'] = item['Category']['name']
data['note'] = item['note']
data['author'] = item['User']['nickname']
data['url'] = item['url']
data['downloads'] = item['downloads_count']
data['votes'] = item['votes_count']
data['comments'] = item['comments_count']
pprint(data)
total.append(data)
if __name__ == '__main__':
task = f'{domain}/api/shortcuts/hot?offset=0&limit=1025'
crawl(task)
lt.save(total, name='sharecuts.csv', sort_by='votes', order='desc')
| [
"[email protected]"
] | |
6075bf7ba52d2c689ce7e8d799b2bdfa2bb43e1b | 4ad06bae18751fd71df145d126e3624ea90e05e6 | /flat_sharp/interpolation.py | b8f5c77d79fd72a53d69363fa13955e41a1408be | [] | no_license | daniellengyel/flat_sharp | 04d82399e44d178e52c56acf1ba2ff3a75e4c27f | 4a1e3f4abbebc7a5342aaa63080493b77aff5677 | refs/heads/master | 2021-04-22T16:14:26.238625 | 2020-07-09T13:03:44 | 2020-07-09T13:03:44 | 249,861,447 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,933 | py | import numpy as np
from utils import *
from data_getters import *
from postprocessing import *
import copy
import torch
def interpolate_models(model1, model2, beta):
params1 = model1.named_parameters()
params2 = model2.named_parameters()
new_model = copy.deepcopy(model2)
new_params = new_model.named_parameters()
dict_new_params = dict(new_params)
for name1, param1 in params1:
if name1 in dict_new_params:
dict_new_params[name1].data.copy_((1. - beta) * param1.data + beta * dict_new_params[name1].data)
return new_model
def scale_output_model(model1, alpha):
if isinstance(model1, LeNet):
last_layer_names = ["fc3.weight", "fc3.bias"]
else:
last_layer_names = ["fc2.weight", "fc2.bias"]
params1 = model1.named_parameters()
new_model = copy.deepcopy(model1)
new_params = new_model.named_parameters()
dict_new_params = dict(new_params)
for name1, param1 in params1:
if name1 in last_layer_names:
dict_new_params[name1].data.copy_(alpha * param1.data)
return new_model
def T_alpha_models(model, num_inter_models, alpha_range):
inter_models_arr = []
alphas = np.linspace(alpha_range[0], alpha_range[1], num_inter_models)
for alpha in alphas:
params1 = model.named_parameters()
new_model = copy.deepcopy(model)
new_params = new_model.named_parameters()
dict_new_params = dict(new_params)
for name1, param1 in params1:
if name1 in dict_new_params:
dict_new_params[name1].data.copy_((1. - beta) * param1.data + beta * dict_new_params[name1].data)
inter_models_arr.append(curr_model)
return inter_models_arr
return new_model
def get_loss_grad(net, criterion, data):
inputs, labels = data
# Compute gradients for input.
inputs.requires_grad = True
net.zero_grad()
# forward + backward + optimize
outputs = net(inputs)
loss = criterion(outputs.float(), labels)
loss.backward(retain_graph=True)
param_grads = get_grad_params_vec(net)
return loss, torch.norm(param_grads)
def get_model_interpolate_arr(model_a, model_b, num_inter_models, beta_bound=None):
inter_models_arr = []
if beta_bound is None:
beta_bound = [0, 1]
betas = np.linspace(beta_bound[0], beta_bound[1], num_inter_models)
for beta in betas:
curr_model = interpolate_models(model_a, model_b, beta)
inter_models_arr.append(curr_model)
return inter_models_arr
def get_model_interpolate_2d(offset, v1, v2, num_inter_models, alpha_bound, beta_bound, func):
X = np.linspace(alpha_bound[0], alpha_bound[1], num_inter_models)
Y = np.linspace(beta_bound[0], beta_bound[1], num_inter_models)
v1_net = vec_to_net(v1, offset)
v2_net = vec_to_net(v2, offset)
v1_dict = dict(v1_net.named_parameters())
v2_dict = dict(v2_net.named_parameters())
val_arr = []
for x in X:
curr_arr = []
for y in Y:
curr_model = copy.deepcopy(offset)
dict_curr_model = dict(curr_model.named_parameters())
for name1, param1 in offset.named_parameters():
dict_curr_model[name1].data.copy_(dict_curr_model[name1].data + x * v1_dict[name1].data + y * v2_dict[name1].data)
to_append = func(curr_model)
curr_arr.append(to_append)
val_arr.append(curr_arr)
return val_arr
def project_onto(net, v1, v2, offset):
v1_norm = v1 / torch.norm(v1)
v2_norm = v2 / torch.norm(v2)
net_vect = get_params_vec(net) - get_params_vec(offset)
alpha = torch.matmul(v1_norm, net_vect)
beta = torch.matmul(v2_norm, net_vect)
return alpha, beta
def take_n_gd_steps(net, optimizer, criterion, data, n=1, get_grad=True, v1=None, v2=None, offset=None):
grads_arr = []
projections = []
if (v1 is not None) and (v2 is not None):
projections.append(project_onto(net, v1, v2, offset))
for _ in range(n):
inputs, labels = data
# Compute gradients for input.
inputs.requires_grad = True
# zero the parameter gradients
optimizer.zero_grad()
# forward + backward + optimize
outputs = net(inputs)
loss = criterion(outputs.float(), labels)
loss.backward(retain_graph=True)
optimizer.step()
if (_ % 100) == 0:
print(_)
print(loss)
print()
if get_grad:
grads_arr.append(get_grad_params_vec(net))
if (v1 is not None) and (v2 is not None):
projections.append(project_onto(net, v1, v2, offset))
return net, grads_arr, projections
def do_the_do(model, optimizer, criterion, data_loader, num_inter_models, num_steps=1, beta_bound=None):
data = next(iter(data_loader))
model_a = copy.deepcopy(model)
model_b = take_n_gd_steps(model, optimizer, criterion, data, n=num_steps)
inter_models = get_model_interpolate_arr(model_a, model_b, num_inter_models, beta_bound=beta_bound)
return inter_models
exp_id = "1589992134.56161"
if __name__ == "__main__":
# get data
train_data, test_data = get_postprocessing_data(experiment_folder, vectorized=True)
train_loader = DataLoader(train_data, batch_size=10000, shuffle=True) # fix the batch size
test_loader = DataLoader(test_data, batch_size=len(test_data))
criterion = torch.nn.CrossEntropyLoss()
cfs_dict = exp_dict["stuff"]["configs"].loc[exp_id].to_dict()
nets = get_nets(cfs_dict)
optimizers = get_optimizers(cfs_dict)(nets)
inter_nets = []
for nn_idx in range(len(nets)):
inter_nets.append(do_the_do(nets[nn_idx], optimizers[nn_idx], criterion, train_loader, 20))
for nn_index in range(len(nets)):
y_val = inter_nets[nn_index][1][:, 1]
plt.plot(list(range(len(y_val))), y_val)
plt.show()
| [
"[email protected]"
] | |
3c2914aeeb137940e884af34f7f4ae1b9a1cb124 | 306d2a92fb331aec6ddf0794b538d6e3385a0df9 | /app/api/news/views.py | 4821c74644481851dbbc7b49363e6c122d7dddf6 | [] | no_license | Zarinabonu/ForceApp | f343d3a52aee08890230c5425c9e238df99c5a7f | 13f8e8613999c4850fc6f0bfcec66f897eecbe4a | refs/heads/master | 2020-12-10T08:00:25.072289 | 2020-01-20T13:14:07 | 2020-01-20T13:14:07 | 233,540,795 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 491 | py | from rest_framework.generics import ListAPIView
from app.api.news.serializers import NewsSerializer
from app.model import News
class NewsListAPIView(ListAPIView):
serializer_class = NewsSerializer
def get_queryset(self):
qs = News.objects.all()
v = self.request.GET.get('-views')
l_seen = self.request.GET.get('last_seen')
if v:
qs = qs.order_by('views')
if l_seen:
qs = qs.order_by('-created')
return qs
| [
"[email protected]"
] | |
40ac13a7d56e1369c096d6b0d9a77961d16430bf | 443585e4fc146308b18bc2f9234d0947da38d3e5 | /input_output/2.py | 8e66671a1fb57677c88f36c9a0d41923421258d1 | [] | no_license | ggyudongggyu/20201208commit | b524c4a7fb241cacaacffa5882c55d1d0ccba11f | fbb58a8ed06f454a2a79a9b8c75deabaec62b317 | refs/heads/master | 2023-02-02T21:59:51.518218 | 2020-12-24T14:32:21 | 2020-12-24T14:32:21 | 319,578,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31 | py | in_str = input()
print(in_str)
| [
"[email protected]"
] | |
d57f83bffdfb4d3b6a1515261b2d121eadb2561f | 8ed86b8e9c451abcb2ce0ddf2f2067c11f3993d8 | /osmnx/simplify.py | 9a7f930496b164cd592daeb1d16d9989edec354f | [
"MIT"
] | permissive | surfcao/osmnx | 65830096c21b8353a536f776dfedba7de20eac4c | 51c9addb42425657fa6b11c7442f79f10b9e3e22 | refs/heads/master | 2021-01-19T23:32:40.068378 | 2017-04-19T20:22:01 | 2017-04-19T20:22:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,568 | py | ###################################################################################################
# Module: simplify.py
# Description: Simplify and correct network topology
# License: MIT, see full license in LICENSE.txt
# Web: https://github.com/gboeing/osmnx
###################################################################################################
import time
import logging as lg
from shapely.geometry import Point, LineString
from .utils import log
def is_endpoint(G, node, strict=True):
"""
Return True if the node is a "real" endpoint of an edge in the network, otherwise False.
OSM data includes lots of nodes that exist only as points to help streets bend around curves.
An end point is a node that either:
1) is its own neighbor, ie, it self-loops.
2) or, has no incoming edges or no outgoing edges, ie, all its incident edges point inward or all its incident edges point outward.
3) or, it does not have exactly two neighbors and degree of 2 or 4.
4) or, if strict mode is false, if its edges have different OSM IDs.
Parameters
----------
G : networkx multidigraph
node : int
the node to examine
strict : bool
if False, allow nodes to be end points even if they fail all other rules but have edges with different OSM IDs
Returns
-------
bool
"""
neighbors = set(list(G.predecessors(node)) + list(G.successors(node)))
n = len(neighbors)
d = G.degree(node)
if node in neighbors:
# if the node appears in its list of neighbors, it self-loops. this is always an endpoint.
return True
# if node has no incoming edges or no outgoing edges, it must be an end point
elif G.out_degree(node)==0 or G.in_degree(node)==0:
return True
elif not (n==2 and (d==2 or d==4)):
# else, if it does NOT have 2 neighbors AND either 2 or 4 directed edges, it is an endpoint
# either it has 1 or 3+ neighbors, in which case it is a dead-end or an intersection of multiple streets
# or it has 2 neighbors but 3 degree (indicating a change from oneway to twoway)
# or more than 4 degree (indicating a parallel edge) and thus is an endpoint
return True
elif not strict:
# non-strict mode
osmids = []
# add all the edge OSM IDs for incoming edges
for u in G.predecessors(node):
for key in G.edge[u][node]:
osmids.append(G.edge[u][node][key]['osmid'])
# add all the edge OSM IDs for outgoing edges
for v in G.successors(node):
for key in G.edge[node][v]:
osmids.append(G.edge[node][v][key]['osmid'])
# if there is more than 1 OSM ID in the list of edge OSM IDs then it is an endpoint, if not, it isn't
return len(set(osmids)) > 1
else:
# if none of the preceding rules returned true, then it is not an endpoint
return False
def build_path(G, node, endpoints, path):
"""
Recursively build a path of nodes until you hit an endpoint node.
Parameters
----------
G : networkx multidigraph
node : int
the current node to start from
endpoints : set
the set of all nodes in the graph that are endpoints
path : list
the list of nodes in order in the path so far
Returns
-------
paths_to_simplify : list
"""
# for each successor in the passed-in node
for successor in G.successors(node):
if not successor in path:
# if this successor is already in the path, ignore it, otherwise add it to the path
path.append(successor)
if not successor in endpoints:
# if this successor is not an endpoint, recursively call build_path until you find an endpoint
path = build_path(G, successor, endpoints, path)
else:
# if this successor is an endpoint, we've completed the path, so return it
return path
if (not path[-1] in endpoints) and (path[0] in G.successors(path[-1])):
# if the end of the path is not actually an endpoint and the path's first node is a successor of the
# path's final node, then this is actually a self loop, so add path's first node to end of path to close it
path.append(path[0])
return path
def get_paths_to_simplify(G, strict=True):
"""
Create a list of all the paths to be simplified between endpoint nodes.
The path is ordered from the first endpoint, through the interstitial nodes,
to the second endpoint.
Parameters
----------
G : networkx multidigraph
strict : bool
if False, allow nodes to be end points even if they fail all other rules but have edges with different OSM IDs
Returns
-------
paths_to_simplify : list
"""
# first identify all the nodes that are endpoints
start_time = time.time()
endpoints = set([node for node in G.nodes() if is_endpoint(G, node, strict=strict)])
log('Identified {:,} edge endpoints in {:,.2f} seconds'.format(len(endpoints), time.time()-start_time))
start_time = time.time()
paths_to_simplify = []
# for each endpoint node, look at each of its successor nodes
for node in endpoints:
for successor in G.successors(node):
if not successor in endpoints:
# if the successor is not an endpoint, build a path from the endpoint node to the next endpoint node
try:
path = build_path(G, successor, endpoints, path=[node, successor])
paths_to_simplify.append(path)
except RuntimeError:
log('Recursion error: exceeded max depth, moving on to next endpoint successor', level=lg.WARNING)
# recursion errors occur if some connected component is a self-contained ring in which all nodes are not end points
# handle it by just ignoring that component and letting its topology remain intact (this should be a rare occurrence)
# RuntimeError is what Python <3.5 will throw, Py3.5+ throws RecursionError but it is a subtype of RuntimeError so it still gets handled
log('Constructed all paths to simplify in {:,.2f} seconds'.format(time.time()-start_time))
return paths_to_simplify
def is_simplified(G):
"""
Determine if a graph has already had its topology simplified.
If any of its edges have a geometry attribute, we know that it has previously been simplified.
Parameters
----------
G : networkx multidigraph
Returns
-------
bool
"""
edges_with_geometry = [d for u, v, k, d in G.edges(data=True, keys=True) if 'geometry' in d]
return len(edges_with_geometry) > 0
def simplify_graph(G_, strict=True):
"""
Simplify a graph's topology by removing all nodes that are not intersections or dead-ends.
Create an edge directly between the end points that encapsulate them,
but retain the geometry of the original edges, saved as attribute in new edge
Parameters
----------
G_ : graph
strict : bool
if False, allow nodes to be end points even if they fail all other rules but have edges with different OSM IDs
Returns
-------
networkx multidigraph
"""
if is_simplified(G_):
raise Exception('This graph has already been simplified, cannot simplify it again.')
log('Begin topologically simplifying the graph...')
G = G_.copy()
initial_node_count = len(list(G.nodes()))
initial_edge_count = len(list(G.edges()))
all_nodes_to_remove = []
all_edges_to_add = []
# construct a list of all the paths that need to be simplified
paths = get_paths_to_simplify(G, strict=strict)
start_time = time.time()
for path in paths:
# add the interstitial edges we're removing to a list so we can retain their spatial geometry
edge_attributes = {}
for u, v in zip(path[:-1], path[1:]):
# there shouldn't be multiple edges between interstitial nodes
edges = G.edge[u][v]
if not len(edges) == 1:
log('Multiple edges between "{}" and "{}" found when simplifying'.format(u, v), level=lg.WARNING)
# the only element in this list as long as above assertion is True (MultiGraphs use keys (the 0 here), indexed with ints from 0 and up)
edge = edges[0]
for key in edge:
if key in edge_attributes:
# if this key already exists in the dict, append it to the value list
edge_attributes[key].append(edge[key])
else:
# if this key doesn't already exist, set the value to a list containing the one value
edge_attributes[key] = [edge[key]]
for key in edge_attributes:
# don't touch the length attribute, we'll sum it at the end
if len(set(edge_attributes[key])) == 1 and not key == 'length':
# if there's only 1 unique value in this attribute list, consolidate it to the single value (the zero-th)
edge_attributes[key] = edge_attributes[key][0]
elif not key == 'length':
# otherwise, if there are multiple values, keep one of each value
edge_attributes[key] = list(set(edge_attributes[key]))
# construct the geometry and sum the lengths of the segments
edge_attributes['geometry'] = LineString([Point((G.node[node]['x'], G.node[node]['y'])) for node in path])
edge_attributes['length'] = sum(edge_attributes['length'])
# add the nodes and edges to their lists for processing at the end
all_nodes_to_remove.extend(path[1:-1])
all_edges_to_add.append({'origin':path[0],
'destination':path[-1],
'attr_dict':edge_attributes})
# for each edge to add in the list we assembled, create a new edge between the origin and destination
for edge in all_edges_to_add:
G.add_edge(edge['origin'], edge['destination'], **edge['attr_dict'])
# finally remove all the interstitial nodes between the new edges
G.remove_nodes_from(set(all_nodes_to_remove))
msg = 'Simplified graph (from {:,} to {:,} nodes and from {:,} to {:,} edges) in {:,.2f} seconds'
log(msg.format(initial_node_count, len(list(G.nodes())), initial_edge_count, len(list(G.edges())), time.time()-start_time))
return G
| [
"[email protected]"
] | |
fa4627fc540dc0fe3e22751b5e32ea7167da0399 | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/agc029/B/4327177.py | e0009a3dbc5483db0e2de0f6c57fab864e879af1 | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 264 | py | from collections import Counter
input()
A=list(map(int,input().split()))
A.sort(reverse=True)
C=Counter(A)
ans=0
for a in A:
if C[a]==0:
continue
C[a]-=1
t=2**a.bit_length()-a
if C[t]:
C[t]-=1
ans+=1
print(ans) | [
"[email protected]"
] | |
955d81f88ec11f246f6d9dcdd6f9b8a4d2744fe8 | c8dc80a0082b2d59de0e1df24b25483f55510c57 | /0x0F-python-object_relational_mapping/11-model_state_insert.py | 55472dd9df2fb8c159784fb95664ea3259b05a3a | [] | no_license | Daransoto/holbertonschool-higher_level_programming | 51d81fac1dc3a6bd0799283332a3bcf5e2480330 | 4fa5f95b462f0e22b1e87189d162f0cb8c5625b6 | refs/heads/master | 2020-07-22T23:03:01.184032 | 2020-02-13T21:36:18 | 2020-02-13T21:36:18 | 207,358,507 | 0 | 4 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | #!/usr/bin/python3
""" Inserts Louisiana state. """
import sys
from sqlalchemy import create_engine
from model_state import Base, State
from sqlalchemy.orm import sessionmaker
if __name__ == "__main__":
engine = create_engine('mysql+mysqldb://{}:{}@localhost/{}'
.format(sys.argv[1], sys.argv[2], sys.argv[3]),
pool_pre_ping=True)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
New = State(name="Louisiana")
session.add(New)
session.commit()
print(New.id)
session.close()
| [
"[email protected]"
] | |
d5b2d0a9e571234c680d803851735c7c32986bee | 62912bea20c56093f27fb2826e0f5f4a26a3ed0b | /symphony/cli/pyinventory/api/user.py | 26b7cb869b430045ee70020b452c8fdb9a7edcd2 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | sijad/magma | 5c2b6520e207f05c29f29248627b90629f1f4088 | 78b5f16432d7070a84da74b90d4f1e3f8348fa37 | refs/heads/master | 2021-04-09T06:34:15.295104 | 2020-03-20T19:28:42 | 2020-03-20T19:31:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,129 | py | #!/usr/bin/env python3
# Copyright (c) 2004-present Facebook All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from typing import List
from ..client import SymphonyClient
from ..consts import Entity, User
from ..exceptions import EntityNotFoundError
from ..graphql.edit_user_input import EditUserInput
from ..graphql.edit_user_mutation import EditUserMutation
from ..graphql.user_query import UserQuery
from ..graphql.user_status_enum import UserStatus
from ..graphql.users_query import UsersQuery
USER_ROLE = 1
def get_user(client: SymphonyClient, email: str) -> User:
"""Returns `pyinventory.consts.User` object by its email
Args:
email: the email address the user registered with
Returns:
pyinventory.consts.User object
Raises:
EntityNotFoundError: the user was not found
FailedOperationException: internal inventory error
Example:
```
user = client.get_user("[email protected]")
```
"""
result = UserQuery.execute(client, email)
user = result.user
if user is None:
raise EntityNotFoundError(entity=Entity.User, entity_name=email)
return User(
id=user.id,
auth_id=user.authID,
email=user.email,
status=user.status,
role=user.role,
)
def add_user(client: SymphonyClient, email: str, password: str) -> User:
"""Adds new user to inventory with its email and password
Args:
email: the email address of the user
password: password the user would connect with
Returns:
pyinventory.consts.User object
Raises:
EntityNotFoundError: the user was not created properly
FailedOperationException: internal inventory error
AssertionError: The user was not created for some known reason
HTTPError: Error with connection
Example:
```
user = client.add_user("[email protected]", "P0ssW!rd0f43")
```
"""
resp = client.post(
"/user/async/",
{"email": email, "password": password, "role": USER_ROLE, "networkIDs": []},
)
if not resp.ok:
error_message = resp.json().get("error", None)
if error_message is not None:
raise AssertionError(error_message)
raise
return get_user(client, email)
def deactivate_user(client: SymphonyClient, user: User) -> None:
"""Deactivate the user which would prevent the user from login in to symphony
Users in symphony are never deleted. Only de-activated.
Args:
user: user to deactivate
Raises:
FailedOperationException: internal inventory error
Example:
```
user = client.get_user("[email protected]")
client.deactivate_user(user)
```
"""
EditUserMutation.execute(
client, input=EditUserInput(id=user.id, status=UserStatus.DEACTIVATED)
)
def activate_user(client: SymphonyClient, user: User) -> None:
"""Activate the user which would allow the user to login again to symphony
Args:
user: user to activate
Raises:
FailedOperationException: internal inventory error
Example:
```
user = client.get_user("[email protected]")
client.activate_user(user)
```
"""
EditUserMutation.execute(
client, input=EditUserInput(id=user.id, status=UserStatus.ACTIVE)
)
def get_users(client: SymphonyClient) -> List[User]:
"""Get the list of users in the system (both active and deactivate)
Returns:
list of `pyinventory.consts.User` objects
Raises:
FailedOperationException: internal inventory error
Example:
```
users = client.get_users()
for user in users:
print(user.email)
```
"""
result = UsersQuery.execute(client).users
if result is None:
return []
users = []
for edge in result.edges:
node = edge.node
if node is not None:
users.append(
User(
id=node.id,
auth_id=node.authID,
email=node.email,
status=node.status,
role=node.role,
)
)
return users
def get_active_users(client: SymphonyClient) -> List[User]:
"""Get the list of the active users in the system
Returns:
list of `pyinventory.consts.User` objects
Raises:
FailedOperationException: internal inventory error
Example:
```
users = client.get_active_users()
for user in users:
print(user.email)
```
"""
users = get_users(client)
return [user for user in users if user.status == UserStatus.ACTIVE]
| [
"[email protected]"
] | |
fda194aff772871c7c4b2ea781497dc72cf05c8a | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_cryings.py | 62b48a889fcdd5f58f940b8aca110dd0c8ff2b83 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py |
from xai.brain.wordbase.verbs._cry import _CRY
#calss header
class _CRYINGS(_CRY, ):
def __init__(self,):
_CRY.__init__(self)
self.name = "CRYINGS"
self.specie = 'verbs'
self.basic = "cry"
self.jsondata = {}
| [
"[email protected]"
] | |
30df20c459875066299b819277787ea6cd268ad7 | 0fd506e2651fde07ff65ae80c12226a18d7778a2 | /wildfire/pandas_cut_test.py | d3bbc09ac55c3f2995b8e3a3325024673de427c4 | [] | no_license | MillerWu2014/remote_project | af69e20d84809ea3d1e121e5cac57715073f70d6 | 7458ec6571b5b046f07ce2f89dcb393e0dd2e478 | refs/heads/master | 2020-08-03T15:43:36.028810 | 2019-09-30T07:45:06 | 2019-09-30T07:45:06 | 211,804,058 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 86 | py | import pandas as pd
x = pd.Series(range(100))
print(pd.cut(x, 50, labels=range(50))) | [
"[email protected]"
] | |
ca010878792d0bc73fec72213f7db9f251dfd0e5 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_bandaged.py | 9ef009f331d8a4fe83aa934179c391e624957156 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 245 | py |
from xai.brain.wordbase.nouns._bandage import _BANDAGE
#calss header
class _BANDAGED(_BANDAGE, ):
def __init__(self,):
_BANDAGE.__init__(self)
self.name = "BANDAGED"
self.specie = 'nouns'
self.basic = "bandage"
self.jsondata = {}
| [
"[email protected]"
] | |
cd846f89d90d6f2f5ce61fa895e49409d4e39009 | 604ffaf79c5f9c816bb1a2151ae33fbf29bca52b | /cloudstoragetui/keypress.py | 6eaf37d5320d02c08c43e2c2b52b735c33eabb6f | [
"MIT"
] | permissive | joeyism/cloud-storage-tui | 1069092b51f1d11daa033ea5896b625e42e55691 | 8fda9bc8551756e88db706944489f1bbcc95a52c | refs/heads/master | 2023-05-31T00:33:07.979555 | 2021-06-07T17:19:26 | 2021-06-07T17:19:26 | 352,346,198 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,773 | py | import curses
from typing import List
from cloudstoragetui.constants import KEY_QUIT, KEY_UP, KEY_DOWN, KEY_LEFT, KEY_RIGHT, KEY_ENTER, ESC, UP, DOWN, LEFT, RIGHT
from cloudstoragetui.draw import DrawnBox
from cloudstoragetui.cursor_state import CursorState
from cloudstoragetui.debug import log
def _extract_min_max(box):
min_y = box.top_left_y + 1
min_x = box.top_left_x + 1
max_y = box.length_y + box.top_left_y - 2
max_x = (box.index + 1) * box.length_x - 1
return (min_y, min_x, max_y, max_x)
def _eval_keypress(screen, key, boxes, cursor_state):
curs_y, curs_x = curses.getsyx()
box = boxes[cursor_state.column]
min_y, min_x, max_y, max_x = _extract_min_max(box)
action = None
if key in KEY_QUIT:
action = ESC
elif key in KEY_UP:
cursor_state.move_row_up(min_y)
screen.move(max(curs_y - 1, min_y), curs_x)
action = UP
elif key in KEY_DOWN:
cursor_state.move_row_down(max_y)
screen.move(min(curs_y + 1, max_y), curs_x)
action = DOWN
elif key in KEY_LEFT:
if curs_x == min_x:
cursor_state.move_column_left()
box = boxes[cursor_state.column]
min_y, min_x, max_y, max_x = _extract_min_max(box)
screen.move(min_y, min_x)
else:
screen.move(curs_y, max(curs_x - 1, min_x))
action = LEFT
elif key in KEY_RIGHT + KEY_ENTER:
cursor_state.move_column_right()
box = boxes[cursor_state.column]
screen.move(box.top_left_y + 1, box.top_left_x + 1)
action = RIGHT
screen.refresh()
return action
def eval_keypress(screen, key: int, boxes: List[DrawnBox], cursor_state: CursorState):
return _eval_keypress(screen, key, boxes, cursor_state)
| [
"[email protected]"
] | |
8020760bc5cd0a1d148739c5991cea3a09beb85f | 5aadc1f06bdb68a73bb003b23cc85af528d61bf4 | /detection_network/src/rl/ppo.py | b71a185fbb72e8e16bc734d17634f6bdea14b165 | [] | no_license | zideajang/zi_full_self_drive_system | 81dca2ca0541dfab7c021c6e3a0e58701bbf1693 | fee2e4057619a19a585fbd8b9622f69c25946be1 | refs/heads/master | 2023-09-03T02:41:35.720600 | 2021-09-27T02:37:41 | 2021-09-27T02:37:41 | 358,083,188 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,307 | py |
import torch
import torch.optim as optim
class RolloutStorage:
def __init__(self,num_steps,num_processes,action_size):
pass
class PPO(object):
def __init__(
self,
controller,
clip_param,
lr,
baseline_decay,
action_size = 18,
ppo_epoch=1,
num_mini_batch=100,
max_grad_norm=2.0,
entropy_coef=0,
num_steps=100,
num_processes=1
):
self.ppo_epoch = ppo_epoch
self.controller = controller
self.optimizer = optim.Adam(controller.parameters(),lr=lr)
self.num_mini_batch = num_mini_batch
self.clip_param = clip_param
self.max_grad_norm = max_grad_norm
self.entropy_coef = entropy_coef
self.rollouts = RolloutStorage(num_steps,num_processes,action_size)
self.baseline = None
self.decay = baseline_decay
def state_dict(self):
return {
"baseline":self.baseline,
"rollouts":self.controller.state_dict(),
"optimizer:":self.optimizer.state_dict()
}
def load_state_dict(self,states):
pass
def update(self, sample, is_train=True):
reward, action, log_prob = sample
if self.baseline is None:
self.baseline = reward
else:
self.baseline = self.decay * self.baseline + (1 - self.decay) * reward
self.rollouts.insert(action, log_prob, reward)
if not is_train:
return -1,-1
advantages = self.rollouts.rewards - self.baseline
loss_epoch = 0
entropy_epoch = 0
for _ in range(self.ppo_epoch):
data_generator = self.rollouts.generator(advantages, self.num_mini_batch)
for sample in data_generator:
(
actions_batch,
reward_batch,
old_actions_log_probs_batch,
adv_targ,
) = sample
action_log_probs, entropy = self.controller.evaluate_actions(
actions_batch
)
ratio = torch.exp(
action_log_probs - torch.from_numpy(adv_targ)
)
adv_targ_th = torch.from_numpy(adv_targ).float()
| [
"[email protected]"
] | |
1fe3fb6fa971710011542bc58df695cb0c6d7730 | c3082eb2adc43b311dd3c9ff16fd3ed9df85f266 | /python/examples/fastapi/dynamic-response/main.py | e9ecf608f59322153963fae72ce85a28b0f05e1f | [] | no_license | szabgab/slides | 78818c7138331b3ba9e221c81da3678a46efe9b3 | 63bba06678554db737602f2fbcd6510c36037e8a | refs/heads/main | 2023-08-31T07:13:51.536711 | 2023-08-29T13:17:59 | 2023-08-29T13:17:59 | 122,212,527 | 87 | 69 | null | 2023-05-19T06:55:11 | 2018-02-20T14:57:03 | Python | UTF-8 | Python | false | false | 164 | py | from fastapi import FastAPI
import datetime
app = FastAPI()
@app.get("/")
async def root():
return {"message": f"Hello World at {datetime.datetime.now()}"}
| [
"[email protected]"
] | |
a9582fe1ff3a16c1aa108f54b5ff1ae3984f5ccb | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_171/ch65_2019_06_07_01_24_18_525767.py | 79aa0ff8a8f6334244edcac2ba434f9ec46d556f | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | def acha_bigramas(string):
lista=[]
i=0
while i<len(string):
if string[i:i+2] not in lista and len(string[i:i+2])>3:
lista.append(string[i:i+2])
i+=1
return lista | [
"[email protected]"
] | |
c5248a3cae6dcafab9c6ad505abc712db1980a72 | 74ec860957869ea48af8535bf32f9fd87cc81011 | /dna-methylation/scripts/develop/illumina450k/residuals/plot/scatter_comparison.py | 22a26b4bf6f95cc4078b5e99f2a407784322a5a3 | [] | no_license | GillianGrayson/dna-methylation | f1a0878f4aa8c917bee9e5230387d6145826fb3a | e602ba91f3d275d92aadf0f874ac6f189adf547b | refs/heads/master | 2022-02-08T03:31:22.423781 | 2022-02-01T16:50:37 | 2022-02-01T16:50:37 | 164,105,085 | 0 | 1 | null | 2020-03-20T18:08:24 | 2019-01-04T12:30:29 | Python | UTF-8 | Python | false | false | 2,721 | py | import pydnameth as pdm
import pandas as pd
import os.path
from scripts.develop.routines import *
max_rows = 10
fn = 'scatter_comparison_rows.xlsx'
rows_dict = {}
if os.path.isfile(fn):
df = pd.read_excel(fn)
tmp_dict = df.to_dict()
for key in tmp_dict:
curr_dict = tmp_dict[key]
rows_dict[key] = list(curr_dict.values())
fn = 'scatter_comparison_cols.xlsx'
cols_dict = {}
if os.path.isfile(fn):
df = pd.read_excel(fn)
tmp_dict = df.to_dict()
for key in tmp_dict:
curr_dict = tmp_dict[key]
cols_dict[key] = list(curr_dict.values())
data_bases = cols_dict['data_bases']
data_list = []
annotations_list = []
attributes_list = []
observables_list = []
data_params_list = []
for data_base in data_bases:
data = pdm.Data(
path='',
base=data_base
)
data_list.append(data)
annotations = pdm.Annotations(
name='annotations',
type='450k',
exclude='bad_cpgs',
select_dict={
'CHR': ['-X', '-Y']
}
)
annotations_list.append(annotations)
observables = pdm.Observables(
name='observables',
types={}
)
cells = pdm.Cells(
name='cells',
types='any'
)
target = get_target(data.base)
obs = get_observables_list(data.base)
data_params = get_data_params(data.base)
data_params['cells'] = ['Bcell', 'CD4T', 'CD8T', 'Gran', 'NK']
data_params['observables'] = ['gender']
attributes = pdm.Attributes(
target='age',
observables=observables,
cells=cells
)
attributes_list.append(attributes)
observables_list.append(obs)
data_params_list.append(data_params)
for run_id in range(0, len(rows_dict['items']), max_rows):
s_id = run_id
f_id = min(s_id + max_rows, len(rows_dict['items']))
curr_dict = {}
for key in rows_dict:
curr_dict[key] = rows_dict[key][s_id:f_id][::-1]
pdm.residuals_plot_scatter_comparison(
data_list=data_list,
annotations_list=annotations_list,
attributes_list=attributes_list,
observables_list=observables_list,
data_params_list=data_params_list,
rows_dict=curr_dict,
cols_dict=cols_dict,
method_params={
'line': 'no',
'fit': 'yes',
'semi_window': 4,
'box_b': 'Q1',
'box_t': 'Q99',
'legend_size': 1,
'add': 'none'
}
# method_params = {
# 'line': 'no',
# 'fit': 'no',
# 'semi_window': 4,
# 'box_b': 'Q1',
# 'box_t': 'Q99',
# 'legend_size': 1,
# 'add': 'none'
# }
)
| [
"[email protected]"
] | |
ce19c5dcc0781b0f973bef61f84f4c439f3f4947 | 924c65166eee1da93c0a0c85f067c028b1d7c6be | /deepforest/deepforest.py | 19e80e8a67f29e7e8a2266c1c25eca30a205a678 | [
"MIT"
] | permissive | geo-py/DeepForest | 4cf8e1fd742c6a52b67cb57d2f6825e149a0903b | 39cb1db4b57ca7fbb64f0f87fee0f74487e4d7e3 | refs/heads/master | 2021-04-08T09:48:58.526848 | 2020-03-19T03:04:01 | 2020-03-19T03:04:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,798 | py | """
Deepforest main module. This module holds the deepforest class for model building and training
"""
import os
import csv
import warnings
from PIL import Image
with warnings.catch_warnings():
#Suppress some of the verbose tensorboard warnings, compromise to avoid numpy version errors
warnings.filterwarnings("ignore", category=FutureWarning)
import tensorflow as tf
import pandas as pd
import cv2
import numpy as np
from matplotlib import pyplot as plt
from deepforest import get_data
from deepforest import utilities
from deepforest import predict
from deepforest import preprocess
from deepforest.retinanet_train import main as retinanet_train
from deepforest.retinanet_train import parse_args
from keras_retinanet import models
from keras_retinanet.models import convert_model
from keras_retinanet.bin.train import create_models
from keras_retinanet.preprocessing.csv_generator import CSVGenerator, _read_classes
from keras_retinanet.utils.eval import evaluate
from keras_retinanet.utils.eval import _get_detections
from keras_retinanet.utils.visualization import draw_box
class deepforest:
'''
Class for training and predicting tree crowns in RGB images
Args:
weights (str): Path to model saved on disk from keras.model.save_weights(). A new model is created and weights are copied. Default is None.
saved_model: Path to a saved model from disk using keras.model.save(). No new model is created.
Attributes:
model: A keras training model from keras-retinanet
'''
def __init__(self, weights=None, saved_model=None):
self.weights = weights
self.saved_model = saved_model
#Read config file - if a config file exists in local dir use it, if not use installed.
if os.path.exists("deepforest_config.yml"):
config_path = "deepforest_config.yml"
else:
try:
config_path = get_data("deepforest_config.yml")
except Exception as e:
raise ValueError(
"No deepforest_config.yml found either in local directory or in installed package location. {}"
.format(e))
print("Reading config file: {}".format(config_path))
self.config = utilities.read_config(config_path)
#Create a label dict, defaults to "Tree"
self.read_classes()
#release version id to flag if release is being used
self.__release_version__ = None
#Load saved model if needed
if self.saved_model:
print("Loading saved model")
#Capture user warning, not relevant here
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=UserWarning)
self.model = utilities.load_model(saved_model)
if self.weights is not None:
print("Creating model from weights")
backbone = models.backbone(self.config["backbone"])
self.model, self.training_model, self.prediction_model = create_models(
backbone.retinanet, num_classes=1, weights=self.weights)
else:
print(
"A blank deepforest object created. To perform prediction, either train or load an existing model."
)
self.model = None
def read_classes(self):
"""Read class file in case of multi-class training. If no file has been created, DeepForest assume there is 1 class, Tree"""
# parse the provided class file
self.labels = {}
try:
with open(self.classes_file, 'r') as file:
self.classes = _read_classes(csv.reader(file, delimiter=','))
for key, value in self.classes.items():
self.labels[value] = key
except:
self.labels[0] = "Tree"
def train(self,
annotations,
input_type="fit_generator",
list_of_tfrecords=None,
comet_experiment=None,
images_per_epoch=None):
'''Train a deep learning tree detection model using keras-retinanet.
This is the main entry point for training a new model based on either existing weights or scratch
Args:
annotations (str): Path to csv label file, labels are in the format -> path/to/image.png,x1,y1,x2,y2,class_name
comet_experiment: A comet ml object to log images. Optional.
list_of_tfrecords: Ignored if input_type != "tfrecord", list of tf records to process
input_type: "fit_generator" or "tfrecord"
images_per_epoch: number of images to override default config of # images in annotations file / batch size. Useful for debug
Returns:
model (object): A trained keras model
prediction model: with bbox nms
trained model: without nms
'''
#Test if there is a new classes file in case # of classes has changed.
self.classes_file = utilities.create_classes(annotations)
self.read_classes()
arg_list = utilities.format_args(annotations, self.classes_file, self.config,
images_per_epoch)
print("Training retinanet with the following args {}".format(arg_list))
#Train model
self.model, self.prediction_model, self.training_model = retinanet_train(
forest_object=self,
args=arg_list,
input_type=input_type,
list_of_tfrecords=list_of_tfrecords,
comet_experiment=comet_experiment)
def use_release(self, gpus=1):
'''Use the latest DeepForest model release from github and load model. Optionally download if release doesn't exist
Returns:
model (object): A trained keras model
gpus: number of gpus to parallelize, default to 1
'''
#Download latest model from github release
release_tag, self.weights = utilities.use_release()
#load saved model and tag release
self.__release_version__ = release_tag
print("Loading pre-built model: {}".format(release_tag))
if gpus == 1:
with warnings.catch_warnings():
#Suppress compilte warning, not relevant here
warnings.filterwarnings("ignore", category=UserWarning)
self.model = utilities.read_model(self.weights, self.config)
#Convert model
self.prediction_model = convert_model(self.model)
elif gpus > 1:
backbone = models.backbone(self.config["backbone"])
n_classes = len(self.labels.keys())
self.model, self.training_model, self.prediction_model = create_models(
backbone.retinanet,
num_classes=n_classes,
weights=self.weights,
multi_gpu=gpus)
#add to config
self.config["weights"] = self.weights
def predict_generator(self,
annotations,
comet_experiment=None,
iou_threshold=0.5,
max_detections=200,
return_plot=False):
"""Predict bounding boxes for a model using a csv fit_generator
Args:
annotations (str): Path to csv label file, labels are in the format -> path/to/image.png,x1,y1,x2,y2,class_name
iou_threshold(float): IoU Threshold to count for a positive detection (defaults to 0.5)
max_detections (int): Maximum number of bounding box predictions
comet_experiment(object): A comet experiment class objects to track
return_plot: Whether to return prediction boxes (False) or Images (True). If True, files will be written to current working directory if model.config["save_path"] is not defined.
Return:
boxes_output: If return_plot=False, a pandas dataframe of bounding boxes for each image in the annotations file
None: If return_plot is True, images are written to save_dir as a side effect.
"""
#Format args for CSV generator
classes_file = utilities.create_classes(annotations)
arg_list = utilities.format_args(annotations, classes_file, self.config)
args = parse_args(arg_list)
#create generator
generator = CSVGenerator(
args.annotations,
args.classes,
image_min_side=args.image_min_side,
image_max_side=args.image_max_side,
config=args.config,
shuffle_groups=False,
)
if self.prediction_model:
boxes_output = []
#For each image, gather predictions
for i in range(generator.size()):
#pass image as path
plot_name = generator.image_names[i]
image_path = os.path.join(generator.base_dir, plot_name)
result = self.predict_image(image_path,
return_plot=return_plot,
score_threshold=args.score_threshold)
if return_plot:
if not self.config["save_path"]:
print(
"model.config['save_path'] is None, saving images to current working directory"
)
save_path = "."
else:
save_path = self.config["save_path"]
#Save image
fname = os.path.join(save_path, plot_name)
cv2.imwrite(fname, result)
continue
else:
#Turn boxes to pandas frame and save output
box_df = pd.DataFrame(result)
#use only plot name, not extension
box_df["plot_name"] = os.path.splitext(plot_name)[0]
boxes_output.append(box_df)
else:
raise ValueError(
"No prediction model loaded. Either load a retinanet from file, download the latest release or train a new model"
)
if return_plot:
return None
else:
#if boxes, name columns and return box data
boxes_output = pd.concat(boxes_output)
boxes_output.columns = [
"xmin", "ymin", "xmax", "ymax", "score", "label", "plot_name"
]
boxes_output = boxes_output.reindex(
columns=["plot_name", "xmin", "ymin", "xmax", "ymax", "score", "label"])
return boxes_output
def evaluate_generator(self,
annotations,
comet_experiment=None,
iou_threshold=0.5,
max_detections=200):
""" Evaluate prediction model using a csv fit_generator
Args:
annotations (str): Path to csv label file, labels are in the format -> path/to/image.png,x1,y1,x2,y2,class_name
iou_threshold(float): IoU Threshold to count for a positive detection (defaults to 0.5)
max_detections (int): Maximum number of bounding box predictions
comet_experiment(object): A comet experiment class objects to track
Return:
mAP: Mean average precision of the evaluated data
"""
#Format args for CSV generator
classes_file = utilities.create_classes(annotations)
arg_list = utilities.format_args(annotations, classes_file, self.config)
args = parse_args(arg_list)
#create generator
validation_generator = CSVGenerator(
args.annotations,
args.classes,
image_min_side=args.image_min_side,
image_max_side=args.image_max_side,
config=args.config,
shuffle_groups=False,
)
average_precisions = evaluate(validation_generator,
self.prediction_model,
iou_threshold=iou_threshold,
score_threshold=args.score_threshold,
max_detections=max_detections,
save_path=args.save_path,
comet_experiment=comet_experiment)
# print evaluation
total_instances = []
precisions = []
for label, (average_precision, num_annotations) in average_precisions.items():
print('{:.0f} instances of class'.format(num_annotations),
validation_generator.label_to_name(label),
'with average precision: {:.4f}'.format(average_precision))
total_instances.append(num_annotations)
precisions.append(average_precision)
if sum(total_instances) == 0:
print('No test instances found.')
return
print('mAP using the weighted average of precisions among classes: {:.4f}'.format(
sum([a * b for a, b in zip(total_instances, precisions)]) /
sum(total_instances)))
mAP = sum(precisions) / sum(x > 0 for x in total_instances)
print('mAP: {:.4f}'.format(mAP))
return mAP
def predict_image(self,
image_path=None,
numpy_image=None,
return_plot=True,
score_threshold=0.05,
show=False,
color=None):
"""Predict tree crowns based on loaded (or trained) model
Args:
image_path (str): Path to image on disk
numpy_image (array): Numpy image array in BGR channel order following openCV convention
color (tuple): Color of bounding boxes in BGR order (0,0,0) black default
show (bool): Plot the predicted image with bounding boxes. Ignored if return_plot=False
return_plot: Whether to return image with annotations overlaid, or just a numpy array of boxes
Returns:
predictions (array): if return_plot, an image. Otherwise a numpy array of predicted bounding boxes, with scores and labels
"""
#Check for model save
if (self.prediction_model is None):
raise ValueError(
"Model currently has no prediction weights, either train a new model using deepforest.train, loading existing model, or use prebuilt model (see deepforest.use_release()"
)
#Check the formatting
if isinstance(image_path, np.ndarray):
raise ValueError(
"image_path should be a string, but is a numpy array. If predicting a loaded image (channel order BGR), use numpy_image argument."
)
#Check for correct formatting
#Warning if image is very large and using the release model
if numpy_image is None:
numpy_image = cv2.imread(image_path)
#Predict
prediction = predict.predict_image(self.prediction_model,
image_path=image_path,
raw_image=numpy_image,
return_plot=return_plot,
score_threshold=score_threshold,
color=color,
classes=self.labels)
#cv2 channel order to matplotlib order
if return_plot & show:
plt.imshow(prediction[:, :, ::-1])
plt.show()
return prediction
def predict_tile(self,
raster_path=None,
numpy_image=None,
patch_size=400,
patch_overlap=0.15,
iou_threshold=0.15,
return_plot=False):
"""
For images too large to input into the model, predict_tile cuts the image into overlapping windows, predicts trees on each window and reassambles into a single array.
Args:
raster_path: Path to image on disk
numpy_image (array): Numpy image array in BGR channel order following openCV convention
iou_threshold: Minimum iou overlap among predictions between windows to be suppressed. Defaults to 0.5. Lower values suppress more boxes at edges.
return_plot: Should the image be returned with the predictions drawn?
Returns:
boxes (array): if return_plot, an image. Otherwise a numpy array of predicted bounding boxes, scores and labels
"""
if numpy_image:
pass
else:
#Load raster as image
raster = Image.open(raster_path)
numpy_image = np.array(raster)
#Compute sliding window index
windows = preprocess.compute_windows(numpy_image, patch_size, patch_overlap)
#Save images to tmpdir
predicted_boxes = []
for index, window in enumerate(windows):
#Crop window and predict
crop = numpy_image[windows[index].indices()]
#Crop is RGB channel order, change to BGR
crop = crop[..., ::-1]
boxes = self.predict_image(numpy_image=crop,
return_plot=False,
score_threshold=self.config["score_threshold"])
#transform coordinates to original system
xmin, ymin, xmax, ymax = windows[index].getRect()
boxes.xmin = boxes.xmin + xmin
boxes.xmax = boxes.xmax + xmin
boxes.ymin = boxes.ymin + ymin
boxes.ymax = boxes.ymax + ymin
predicted_boxes.append(boxes)
predicted_boxes = pd.concat(predicted_boxes)
#Non-max supression for overlapping boxes among window
if patch_overlap == 0:
mosaic_df = predicted_boxes
else:
with tf.Session() as sess:
print("{} predictions in overlapping windows, applying non-max supression".
format(predicted_boxes.shape[0]))
new_boxes, new_scores, new_labels = predict.non_max_suppression(
sess,
predicted_boxes[["xmin", "ymin", "xmax", "ymax"]].values,
predicted_boxes.score.values,
predicted_boxes.label.values,
max_output_size=predicted_boxes.shape[0],
iou_threshold=iou_threshold)
#Recreate box dataframe
image_detections = np.concatenate([
new_boxes,
np.expand_dims(new_scores, axis=1),
np.expand_dims(new_labels, axis=1)
],axis=1)
mosaic_df = pd.DataFrame(
image_detections,
columns=["xmin", "ymin", "xmax", "ymax", "score", "label"])
mosaic_df.label = mosaic_df.label.str.decode("utf-8")
print("{} predictions kept after non-max suppression".format(
mosaic_df.shape[0]))
if return_plot:
#Draw predictions
for box in mosaic_df[["xmin", "ymin", "xmax", "ymax"]].values:
draw_box(numpy_image, box, [0, 0, 255])
#Mantain consistancy with predict_image
return numpy_image
else:
return mosaic_df
def plot_curves(self):
"""Plot training curves"""
if self.history:
# Plot training & validation regression loss values
fig, axes, = plt.subplots(nrows=1, ncols=3)
axes = axes.flatten()
#Regression Loss
axes[0].plot(self.history.history['regression_loss'])
axes[0].set_title('Bounding Box Loss')
axes[0].set_ylabel('Loss')
axes[0].set_xlabel('Epoch')
#Classification Loss
axes[1].plot(self.history.history['classification_loss'])
axes[1].set_title('Classification Loss')
axes[1].set_ylabel('Loss')
axes[1].set_xlabel('Epoch')
# Plot validation mAP
if "mAP" in self.history.history.keys():
axes[2].plot(self.history.history['mAP'])
axes[2].set_title('Validation: Mean Average Precision')
axes[2].set_ylabel('mAP')
axes[2].set_xlabel('Epoch')
plt.show()
else:
print("No training history found.")
return None
| [
"[email protected]"
] | |
8df0a5de30770486d65f5750ddf7332158529917 | 385ce240ae264a1449079c21bd0c4cbe7c0fe3b8 | /myowntests/ifelseladder.py | 6be8802a946907017b902d6c6c70418b5968deb2 | [] | no_license | Maxcousin123/Python-workspace | 3ed60ae80d790b5c055bf47872ff0fdd39f4ec58 | 326b023190a12e082dcb35ae5ab8ef644c32159b | refs/heads/master | 2022-11-24T11:05:08.707003 | 2020-07-29T06:32:08 | 2020-07-29T06:32:08 | 283,415,557 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 576 | py | maths=int(input('fill your math grade'))
physics=int(input('fill your physics grade'))
chemistry=int(input('fill your chemistry grade'))
av=(maths+physics+chemistry)/3
if maths<35:
print('Exam Failed')
else:print('Exam passed')
if physics<35:
print('Exam failed')
else:print('Exam passed')
if physics<35:
print('Exam failed')
else:print('Exam passed')
if maths and physics and chemistry<35:
print('Exams failed')
elif av<=59:
print('your grade is c')
elif 59>av<=69:
print('your grade is b')
else:
print('your grade is a')
#69
| [
"[email protected]"
] | |
e730f6b2b0eca72e7e22623e746eb8a5f9fc2013 | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/I_to_M_Gk3_no_pad/pyramid_2side/bce_s001_tv_s0p1_L8/step09_2side_L8.py | 150ed37ed776a944b01f8ce846b25ba5de404b7b | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,319 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
from tkinter import S
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
from step08_b_use_G_generate_I_to_M import I_Generate_M_see
from step09_c_train_step import train_step_Single_output_I_to_M
from step09_d_KModel_builder_combine_step789 import KModel_builder, MODEL_NAME
import time
start_time = time.time()
###############################################################################################################################################################################################
###############################################################################################################################################################################################
########################################################### Block1
### Block1
#########################################################################################
pyramid_1side_1__2side_0 = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]
pyramid_1side_1__2side_1 = [2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2]
pyramid_1side_2__2side_0 = [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1]
pyramid_1side_2__2side_1 = [2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2]
pyramid_1side_2__2side_2 = [2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2]
pyramid_1side_3__2side_0 = [1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1]
pyramid_1side_3__2side_1 = [2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2]
pyramid_1side_3__2side_2 = [2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2]
pyramid_1side_3__2side_3 = [2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2]
pyramid_1side_4__2side_0 = [1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1]
pyramid_1side_4__2side_1 = [2, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 2]
pyramid_1side_4__2side_2 = [2, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2]
pyramid_1side_4__2side_3 = [2, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 2]
pyramid_1side_4__2side_4 = [2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2]
pyramid_1side_5__2side_0 = [1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
pyramid_1side_5__2side_1 = [2, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2]
pyramid_1side_5__2side_2 = [2, 2, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2]
pyramid_1side_5__2side_3 = [2, 2, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 2]
pyramid_1side_5__2side_4 = [2, 2, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 2, 2]
pyramid_1side_5__2side_5 = [2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2]
pyramid_1side_6__2side_0 = [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1]
pyramid_1side_6__2side_1 = [2, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2]
pyramid_1side_6__2side_2 = [2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2]
pyramid_1side_6__2side_3 = [2, 2, 2, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2]
pyramid_1side_6__2side_4 = [2, 2, 2, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, 2, 2, 2]
pyramid_1side_6__2side_5 = [2, 2, 2, 2, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 2, 2, 2]
pyramid_1side_6__2side_6 = [2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2]
pyramid_1side_7__2side_0 = [1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1]
pyramid_1side_7__2side_1 = [2, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 2]
pyramid_1side_7__2side_2 = [2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2]
pyramid_1side_7__2side_3 = [2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2]
pyramid_1side_7__2side_4 = [2, 2, 2, 2, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2]
pyramid_1side_7__2side_5 = [2, 2, 2, 2, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 2, 2, 2]
pyramid_1side_7__2side_6 = [2, 2, 2, 2, 2, 2, 1, 0, 0, 0, 1, 2, 2, 2, 2, 2, 2]
pyramid_1side_7__2side_7 = [2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2]
pyramid_1side_8__2side_0 = [1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1]
pyramid_1side_8__2side_1 = [2, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 2]
pyramid_1side_8__2side_2 = [2, 2, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 2, 2]
pyramid_1side_8__2side_3 = [2, 2, 2, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 2, 2, 2]
pyramid_1side_8__2side_4 = [2, 2, 2, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 2, 2, 2]
pyramid_1side_8__2side_5 = [2, 2, 2, 2, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 2, 2, 2]
pyramid_1side_8__2side_6 = [2, 2, 2, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 2, 2, 2]
pyramid_1side_8__2side_7 = [2, 2, 2, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 2, 2, 2]
pyramid_1side_8__2side_8 = [2, 2, 2, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 2, 2, 2]
pyramid_1side_9__2side_0 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
pyramid_1side_9__2side_1 = [2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2]
pyramid_1side_9__2side_2 = [2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2]
pyramid_1side_9__2side_3 = [2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2]
pyramid_1side_9__2side_4 = [2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2]
pyramid_1side_9__2side_5 = [2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2]
pyramid_1side_9__2side_6 = [2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2]
pyramid_1side_9__2side_7 = [2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2]
pyramid_1side_9__2side_8 = [2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2]
pyramid_1side_9__2side_9 = [2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]
#########################################################################################
ch032_pyramid_1side_1__2side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_1__2side_1, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_2__2side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_1, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_2__2side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_2, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_3__2side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_1, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_3__2side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_2, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_3__2side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_4__2side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_1, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_4__2side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_2, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_4__2side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_4__2side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_5__2side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_1, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_5__2side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_2, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_5__2side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_5__2side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_5__2side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_6__2side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_1, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_6__2side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_2, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_6__2side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_6__2side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_6__2side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_6__2side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_7__2side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_1, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_7__2side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_2, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_7__2side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_7__2side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_7__2side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_7__2side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_7__2side_7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_8__2side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_1, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_8__2side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_2, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_8__2side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_8__2side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_8__2side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_8__2side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_8__2side_7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_8__2side_8 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_9__2side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_9__2side_1, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_9__2side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_9__2side_2, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_9__2side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_9__2side_3, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_9__2side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_9__2side_4, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_9__2side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_9__2side_5, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_9__2side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_9__2side_6, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_9__2side_7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_9__2side_7, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_9__2side_8 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_9__2side_8, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
ch032_pyramid_1side_9__2side_9 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=8, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_9__2side_9, ch_upper_bound= 2 ** 14).set_gen_op(I_Generate_M_see).set_train_step(train_step_Single_output_I_to_M)
#########################################################################################
###############################################################################################################################################################################################
if(__name__ == "__main__"):
import numpy as np
print("build_model cost time:", time.time() - start_time)
data = np.zeros(shape=(1, 512, 512, 1))
use_model = ch032_pyramid_1side_4__2side_2
use_model = use_model.build()
result = use_model.generator(data)
print(result.shape)
from kong_util.tf_model_util import Show_model_weights
Show_model_weights(use_model.generator)
use_model.generator.summary()
| [
"[email protected]"
] | |
a30c882225f0729f7727634a091398bc4b341d00 | a58fcf9467749de7d269c5b17430773069e29791 | /designate/exceptions.py | bd807f15966c85208c564dccc08126f802c00c8e | [
"Apache-2.0"
] | permissive | Woody89/designate-private | 586df6c28a2da573663487e4728c3fddfef095af | 0a6ed5a1d7cdac5cb1e9dec8fd3ddfb9a77c58f5 | refs/heads/master | 2021-01-22T19:22:49.391876 | 2017-08-19T06:16:53 | 2017-08-19T06:16:53 | 100,774,211 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,905 | py | # Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
class Base(Exception):
error_code = 500
error_type = None
error_message = None
errors = None
def __init__(self, *args, **kwargs):
self.errors = kwargs.pop('errors', None)
self.object = kwargs.pop('object', None)
super(Base, self).__init__(*args, **kwargs)
if len(args) > 0 and isinstance(args[0], six.string_types):
self.error_message = args[0]
class Backend(Exception):
pass
class RelationNotLoaded(Base):
error_code = 500
error_type = 'relation_not_loaded'
def __init__(self, *args, **kwargs):
self.relation = kwargs.pop('relation', None)
super(RelationNotLoaded, self).__init__(*args, **kwargs)
self.error_message = "%(relation)s is not loaded on %(object)s" % \
{"relation": self.relation, "object": self.object.obj_name()}
def __str__(self):
return self.error_message
class AdapterNotFound(Base):
error_code = 500
error_type = 'adapter_not_found'
class NSD4SlaveBackendError(Backend):
pass
class NotImplemented(Base, NotImplementedError):
pass
class XFRFailure(Base):
pass
class ConfigurationError(Base):
error_type = 'configuration_error'
class UnknownFailure(Base):
error_code = 500
error_type = 'unknown_failure'
class CommunicationFailure(Base):
error_code = 504
error_type = 'communication_failure'
class NeutronCommunicationFailure(CommunicationFailure):
"""
Raised in case one of the alleged Neutron endpoints fails.
"""
error_type = 'neutron_communication_failure'
class NoFiltersConfigured(ConfigurationError):
error_code = 500
error_type = 'no_filters_configured'
class NoServersConfigured(ConfigurationError):
error_code = 500
error_type = 'no_servers_configured'
class MultiplePoolsFound(ConfigurationError):
error_code = 500
error_type = 'multiple_pools_found'
class NoPoolTargetsConfigured(ConfigurationError):
error_code = 500
error_type = 'no_pool_targets_configured'
class OverQuota(Base):
error_code = 413
error_type = 'over_quota'
expected = True
class QuotaResourceUnknown(Base):
error_type = 'quota_resource_unknown'
class InvalidObject(Base):
error_code = 400
error_type = 'invalid_object'
expected = True
class BadRequest(Base):
error_code = 400
error_type = 'bad_request'
expected = True
class EmptyRequestBody(BadRequest):
error_type = 'empty_request_body'
expected = True
class InvalidUUID(BadRequest):
error_type = 'invalid_uuid'
class InvalidRecord(BadRequest):
error_type = 'invalid_record'
class NetworkEndpointNotFound(BadRequest):
error_type = 'no_endpoint'
error_code = 403
class MarkerNotFound(BadRequest):
error_type = 'marker_not_found'
class NotEqual(Base):
error_type = 'udn_record_count not equals record in db'
class NoChange(Base):
error_type = 'No changes'
class ValueError(BadRequest):
error_type = 'value_error'
class InvalidMarker(BadRequest):
error_type = 'invalid_marker'
class InvalidSortDir(BadRequest):
error_type = 'invalid_sort_dir'
class InvalidLimit(BadRequest):
error_type = 'invalid_limit'
class InvalidSortKey(BadRequest):
error_type = 'invalid_sort_key'
class InvalidJson(BadRequest):
error_type = 'invalid_json'
class NoneIpAddress(BadRequest):
error_type = 'none_ip_address'
class InvalidOperation(BadRequest):
error_code = 400
error_type = 'invalid_operation'
class UnsupportedAccept(BadRequest):
error_code = 406
error_type = 'unsupported_accept'
class UnsupportedContentType(BadRequest):
error_code = 415
error_type = 'unsupported_content_type'
class InvalidZoneName(Base):
error_code = 400
error_type = 'invalid_zone_name'
expected = True
class InvalidAclName(Base):
error_code = 400
error_type = 'invalid_acl_name'
expected = True
class InvalidRecordSetName(Base):
error_code = 400
error_type = 'invalid_recordset_name'
expected = True
class InvalidRecordSetLocation(Base):
error_code = 400
error_type = 'invalid_recordset_location'
expected = True
class InvaildZoneTransfer(Base):
error_code = 400
error_type = 'invalid_zone_transfer_request'
class InvalidTTL(Base):
error_code = 400
error_type = 'invalid_ttl'
class ZoneHasSubZone(Base):
error_code = 400
error_type = 'zone_has_sub_zone'
class Forbidden(Base):
error_code = 403
error_type = 'forbidden'
expected = True
class IllegalChildZone(Forbidden):
error_type = 'illegal_child'
class IllegalParentZone(Forbidden):
error_type = 'illegal_parent'
class IncorrectZoneTransferKey(Forbidden):
error_type = 'invalid_key'
class Duplicate(Base):
expected = True
error_code = 409
error_type = 'duplicate'
class DuplicateServiceStatus(Duplicate):
error_type = 'duplicate_service_status'
class DuplicateQuota(Duplicate):
error_type = 'duplicate_quota'
class DuplicateServer(Duplicate):
error_type = 'duplicate_server'
class DuplicateTsigKey(Duplicate):
error_type = 'duplicate_tsigkey'
class DuplicateZone(Duplicate):
error_type = 'duplicate_zone'
class DuplicateAcl(Duplicate):
error_type = 'duplicate_acl'
class DuplicateTld(Duplicate):
error_type = 'duplicate_tld'
class DuplicateRecordSet(Duplicate):
error_type = 'duplicate_recordset'
class DuplicateRecord(Duplicate):
error_type = 'duplicate_record'
class DuplicateBlacklist(Duplicate):
error_type = 'duplicate_blacklist'
class DuplicatePoolManagerStatus(Duplicate):
error_type = 'duplication_pool_manager_status'
class DuplicatePool(Duplicate):
error_type = 'duplicate_pool'
class DuplicatePoolAttribute(Duplicate):
error_type = 'duplicate_pool_attribute'
class DuplicatePoolNsRecord(Duplicate):
error_type = 'duplicate_pool_ns_record'
class DuplicatePoolNameserver(Duplicate):
error_type = 'duplicate_pool_nameserver'
class DuplicatePoolTarget(Duplicate):
error_type = 'duplicate_pool_target'
class DuplicatePoolTargetOption(Duplicate):
error_type = 'duplicate_pool_target_option'
class DuplicatePoolTargetMaster(Duplicate):
error_type = 'duplicate_pool_target_master'
class DuplicatePoolAlsoNotify(Duplicate):
error_type = 'duplicate_pool_also_notify'
class DuplicateZoneImport(Duplicate):
error_type = 'duplicate_zone_import'
class DuplicateZoneExport(Duplicate):
error_type = 'duplicate_zone_export'
class DuplicateViewDuplicate(Duplicate):
error_type = 'duplicate_view_export'
class DuplicateZdnsViewInfo(Duplicate):
error_type = 'duplicate_zdns_view_info'
class DuplicateViewZdnsView(Duplicate):
error_type = 'duplicate_view_zdns_view_association'
class DuplicateView(Duplicate):
error_type = 'duplicate_view'
class NeedView(BadRequest):
error_type = 'attributes_need_view'
class MethodNotAllowed(Base):
expected = True
error_code = 405
error_type = 'method_not_allowed'
class DuplicateZoneTransferRequest(Duplicate):
error_type = 'duplicate_zone_transfer_request'
class DuplicateZoneTransferAccept(Duplicate):
error_type = 'duplicate_zone_transfer_accept'
class DuplicateZoneAttribute(Duplicate):
error_type = 'duplicate_zone_attribute'
class DuplicateZoneMaster(Duplicate):
error_type = 'duplicate_zone_attribute'
class NotFound(Base):
expected = True
error_code = 404
error_type = 'not_found'
class Failed(Base):
expected = True
error_code = 500
error_type = 'create_failed'
class ServiceStatusNotFound(NotFound):
error_type = 'service_status_not_found'
class QuotaNotFound(NotFound):
error_type = 'quota_not_found'
class ServerNotFound(NotFound):
error_type = 'server_not_found'
class TsigKeyNotFound(NotFound):
error_type = 'tsigkey_not_found'
class BlacklistNotFound(NotFound):
error_type = 'blacklist_not_found'
class ZoneNotFound(NotFound):
error_type = 'zone_not_found'
class AclNotFound(NotFound):
error_type = 'acl_not_found'
class ZoneMasterNotFound(NotFound):
error_type = 'zone_master_not_found'
class ZoneAttributeNotFound(NotFound):
error_type = 'zone_attribute_not_found'
class TldNotFound(NotFound):
error_type = 'tld_not_found'
class RecordSetNotFound(NotFound):
error_type = 'recordset_not_found'
class RecordNotFound(NotFound):
error_type = 'record_not_found'
class AllFailed(Failed):
error_type = 'all record-create failed'
class PartlyFailed(Failed):
error_type = 'some record-create failed'
class ReportNotFound(NotFound):
error_type = 'report_not_found'
class PoolManagerStatusNotFound(NotFound):
error_type = 'pool_manager_status_not_found'
class PoolNotFound(NotFound):
error_type = 'pool_not_found'
class NoValidPoolFound(NotFound):
error_type = 'no_valid_pool_found'
class PoolAttributeNotFound(NotFound):
error_type = 'pool_attribute_not_found'
class PoolNsRecordNotFound(NotFound):
error_type = 'pool_ns_record_not_found'
class PoolNameserverNotFound(NotFound):
error_type = 'pool_nameserver_not_found'
class PoolTargetNotFound(NotFound):
error_type = 'pool_target_not_found'
class PoolTargetOptionNotFound(NotFound):
error_type = 'pool_target_option_not_found'
class PoolTargetMasterNotFound(NotFound):
error_type = 'pool_target_master_not_found'
class PoolAlsoNotifyNotFound(NotFound):
error_type = 'pool_also_notify_not_found'
class ZoneTransferRequestNotFound(NotFound):
error_type = 'zone_transfer_request_not_found'
class ZoneTransferAcceptNotFound(NotFound):
error_type = 'zone_transfer_accept_not_found'
class ZoneImportNotFound(NotFound):
error_type = 'zone_import_not_found'
class ZoneExportNotFound(NotFound):
error_type = 'zone_export_not_found'
class ViewNotFound(NotFound):
error_type = 'view_not_found'
class ViewAclNotFound(NotFound):
error_type = 'view_acl_not_found'
class AclsIsNone(NotFound):
error_type = 'acl_ids_is_none'
class ParamsIsNotLegal(NotFound):
error_type = 'params_is_not_legal'
class AclidsMustBeList(NotFound):
error_type = 'acl_ids_must_be_list'
class CreateViewFailed(NotFound):
error_type = 'create_view_failed'
class LastServerDeleteNotAllowed(BadRequest):
error_type = 'last_server_delete_not_allowed'
EZDNS = {
"1": "any or none acl is read only",
"2": "acl already exists",
"3": "operate non-exist acl",
"4": "dns64 prefix should be a ipv6 addr",
"5": "invalid dns64 prefix netmask",
"6": "suffix is needed if netmask of prefix smaller than 96",
"7": "DNS64 setting already exists",
"8": "operate non-exist DNS64 setting",
"9": "tsig key already exists",
"10": "delete acl is using by view",
"11": "operate non-exist zone",
"12": "cache file not exist",
"13": "cache size too large",
"14": "operate non-exist view",
"15": "get zone from backend server failed",
"16": "zone already exists",
"17": "unsupported meta data type",
"18": "view already exists",
"19": "delete default view",
"20": "cann't modify acl of default view",
"21": "operate non-exist rr",
"22": "conflict key secret",
"23": "not supported zone type",
"24": "operate non-exist shared rr",
"25": "cann't delete the last shared rr",
"26": "operate non-exist tsig key",
"27": "reconfig dns server failed",
"28": "no rndc-confgen installed",
"29": "lack/white list already exists",
"30": "operate non-exist back/white list",
"31": "zone owner doesn't has view owner",
"32": "unsupport acl action",
"33": "no pine-control installed",
"34": "server already started",
"35": "RR format error",
"36": "zone transfer failed",
"37": "more than one ad zone owner",
"38": "update zone failed",
"39": "shared rr already exists",
"40": "add duplicate rr",
"41": "add exclusive rr",
"42": "short of glue rr",
"43": "conflict with exists cname",
"44": "delete unknown rr",
"45": "can't delete soa rr",
"46": "no ns left after delete",
"47": "delete glue needed by other rr",
"48": "reverse zone doesn't exist",
"49": "rdata is valid",
"50": "rr is out of zone",
"51": "onfigure value isn't valid",
"52": "unknown forward style",
"53": "duplicate zone master",
"54": "forwarder exists",
"55": "operate non-exist forwarder",
"56": "operate non-exist view on node",
"57": "already exists root zone",
"58": "only A/AAAA NS is allowed in hint zone",
"59": "already has root configuration",
"60": "rr type isn't supported",
"61": "can't update slave zone",
"62": "duplicate local domain policy",
"63": "zone name isn't valid",
"64": "add duplicate host",
"65": "soa serial number degraded",
"66": "root isn't support in local policy",
"67": "auth zone with same name already exists",
"68": "stub zone with same name already exists",
"69": "forward zone with same name already exists",
"70": "acl is used by view",
"71": "acl is used by AD zone",
"72": "rrl policy already exist",
"73": "non-exist rrl policy",
"74": "delete monitor strategy in use",
"75": "monitor strategy already exist",
"76": "non exist monitor strategy",
"77": "node's view querysource already exists",
"78": "node's view querysource not exist",
"79": "too much rrls(over 999)",
"100": "version is unknown",
"101": "patch file broken",
"102": "source code isn't a release version",
"103": "binding different iface with same ip address",
"104": "ntp interval out of range",
"105": "send a test mail failed, check the configuration",
"300": "invalid ip address",
"301": "no dns server installed",
"302": "not enough params",
"303": "not supported backup method",
"304": "not supported command method",
"305": "service hasn't been init",
"306": "not supported ha type",
"307": "member is not accessible",
"308": "wrong username and password",
"309": "nic config failed",
"310": "service hasn't been started",
"311": "init params is required",
"312": "invalid port",
"313": "verify node failed",
"314": "request body json format error",
"315": "connect backup server timeout",
"316": "data recovery failed",
"317": "data backup failed",
"318": "lower limit bigger than upper limit",
"319": "execute command timeout",
"320": "password/role failed",
"404": "Wrong url, please check it",
"421": "Equipment internal error !",
"600": "operate non-exist group",
"601": "member with same ip alreasy exists",
"602": "member with same name alreasy exists",
"603": "operate non-exist member",
"604": "not supported service type",
"605": "member command queue is full",
"606": "member is performing data recovery",
"607": "group already exists",
"608": "cann't operate local group",
"609": "user already exists",
"610": "operate non-exist user",
"611": "init member service failed",
"612": "owners is required",
"613": "cann't delete the last owner for resource",
"614": "add duplicate owners",
"615": "old password is wrong",
"616": "cann't delete local group",
"617": "cann't delete local member",
"618": "permission denied",
"619": "unkown authority rule",
"620": "authority rule already exist",
"621": "invalid backup data",
"622": "device already under management",
"623": "some devices don't exist any more",
"624": "cann't operation inactive cloud",
"625": "cann't add multi backup devices",
"626": "no backup device",
"627": "not master device",
"628": "not backup device",
"629": "not slave device",
"630": "hasn't managed by cloud yet",
"631": "node can't communicate with master",
"632": "invalid exception handle method",
"800": "time out while sending alarm msg"
}
class ZdnsErrMessage(Base):
error_type = "Equipment Internal Error"
expected = True
def __init__(self,*args,**kwargs):
self.errors = kwargs.pop('errors', None)
self.object = kwargs.pop('object', None)
super(Base, self).__init__(*args, **kwargs)
if len(args) > 0 and isinstance(args[0], six.string_types):
self.error_message = str(args[0]) + ": " + EZDNS[args[0]]
# @staticmethod
# def getmsg(cord):
# msg = str(cord) + ": " + EZDNS[cord]
# return msg
class AclUsedByView(Base):
error_type = 'acl used by view' | [
"[email protected]"
] | |
c737af5d1ff073a22f5a3aaaf91937cb8797fb95 | 8164fd930d78efbd3885198efbfd9692c585319b | /week7/CrawWeb/craw_web.py | b837f0305e260bb319dc6622a4866529aa9c6f96 | [] | no_license | kobso1245/Hack_BG | 7a7b7524b20fada3d9856a583e02c6959d442e66 | 7ffdb8ccefd67aeca5a49c9a9354e65c77149ad4 | refs/heads/master | 2020-05-31T17:43:10.316633 | 2015-10-05T21:43:20 | 2015-10-05T21:43:20 | 30,460,888 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 194 | py | from Crawler import *
from Histogram import *
from Plotter import *
if __name__ == '__main__':
craw("http://register.start.bg/", "histogram2")
plot("path_to_database_file/websites.db")
| [
"[email protected]"
] | |
414d29786eb51284f28473d7090b7778c546c6c3 | dd860973103347b382d8a04ef68a9376561725ea | /wazimap_ng/profile/serializers/highlights_serializer.py | 003ee147dc77b5daa5f28a8c58f772775589eb56 | [
"Apache-2.0"
] | permissive | mauricemojito/wazimap-ng | 7a7da6c9fc653054c376d77c22df120ed0abb653 | e03748cb1258cbafb43faba441bbc37dd0556a2a | refs/heads/master | 2023-03-12T01:45:53.173039 | 2020-09-30T13:55:29 | 2020-09-30T13:55:29 | 342,342,503 | 0 | 0 | Apache-2.0 | 2021-02-25T18:55:09 | 2021-02-25T18:34:39 | Python | UTF-8 | Python | false | false | 2,545 | py | from wazimap_ng.datasets.models import IndicatorData
from wazimap_ng.utils import mergedict
def get_subindicator(highlight):
subindicators = highlight.indicator.subindicators
idx = highlight.subindicator if highlight.subindicator is not None else 0
return subindicators[idx]
def sibling(highlight, geography):
siblings = geography.get_siblings()
indicator_data = IndicatorData.objects.filter(indicator__profilehighlight=highlight, geography__in=siblings)
subindicator = get_subindicator(highlight)
numerator = None
denominator = 0
for datum in indicator_data:
if datum.geography == geography:
numerator = datum.data["subindicators"].get(subindicator, 0)
s = datum.data["subindicators"][subindicator]
denominator += s
if denominator > 0 and numerator is not None:
return numerator / denominator
return None
def absolute_value(highlight, geography):
indicator_data = IndicatorData.objects.filter(indicator__profilehighlight=highlight, geography=geography)
if indicator_data.count() > 0:
subindicator = get_subindicator(highlight)
data = indicator_data.first().data # TODO what to do with multiple results
return data["subindicators"].get(subindicator, 0)
return None
def subindicator(highlight, geography):
indicator_data = IndicatorData.objects.filter(indicator__profilehighlight=highlight, geography=geography)
if indicator_data.count() > 0:
indicator_data = indicator_data.first() # Fix this need to cater for multiple results
subindicator = get_subindicator(highlight)
numerator = indicator_data.data["subindicators"].get(subindicator, 0)
denominator = 0
for datum, count in indicator_data.data["subindicators"].items():
denominator += count
if denominator > 0 and numerator is not None:
return numerator / denominator
return None
algorithms = {
"absolute_value": absolute_value,
"sibling": sibling,
"subindicators": subindicator
}
def HighlightsSerializer(profile, geography):
highlights = []
profile_highlights = profile.profilehighlight_set.all().order_by("order")
for highlight in profile_highlights:
denominator = highlight.denominator
method = algorithms.get(denominator, absolute_value)
val = method(highlight, geography)
if val is not None:
highlights.append({"label": highlight.label, "value": val, "method": denominator})
return highlights
| [
"[email protected]"
] | |
229bb954ad637b3c780475f96f2f202e84d0a4db | c8cf1bdacdbf6de75e61cc6a2ce8617479c19ec6 | /test/mobile/test_bytecode.py | 95baa86d5763e5e308c727fa87d6d77972080125 | [
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | Afonso-2403/pytorch | 7f5ddf8370de938045b4ec412b98bef9dfc193ed | e35e6237d24b6c96b122deb21f015c0fe3eccb13 | refs/heads/master | 2023-08-21T18:43:43.019194 | 2021-09-13T17:58:00 | 2021-09-13T17:58:00 | 363,847,561 | 1 | 0 | NOASSERTION | 2021-07-08T19:06:16 | 2021-05-03T07:16:49 | C++ | UTF-8 | Python | false | false | 13,355 | py | import fnmatch
import io
import shutil
import tempfile
import torch
import torch.utils.show_pickle
# from torch.utils.mobile_optimizer import optimize_for_mobile
from torch.jit.mobile import (
_load_for_lite_interpreter,
_get_model_bytecode_version,
_get_model_ops_and_info,
_backport_for_mobile_to_buffer,
_backport_for_mobile)
from torch.testing._internal.common_utils import TestCase, run_tests
from pathlib import Path
pytorch_test_dir = Path(__file__).resolve().parents[1]
# script_module_v4.ptl and script_module_v5.ptl source code
# class TestModule(torch.nn.Module):
# def __init__(self, v):
# super().__init__()
# self.x = v
# def forward(self, y: int):
# increment = torch.ones([2, 4], dtype=torch.float64)
# return self.x + y + increment
# output_model_path = Path(tmpdirname, "script_module_v5.ptl")
# script_module = torch.jit.script(TestModule(1))
# optimized_scripted_module = optimize_for_mobile(script_module)
# exported_optimized_scripted_module = optimized_scripted_module._save_for_lite_interpreter(
# str(output_model_path))
SCRIPT_MODULE_V4_BYTECODE_PKL = '''
(4,
('__torch__.*.TestModule.forward',
(('instructions',
(('STOREN', 1, 2),
('DROPR', 1, 0),
('LOADC', 0, 0),
('LOADC', 1, 0),
('MOVE', 2, 0),
('OP', 0, 0),
('LOADC', 1, 0),
('OP', 1, 0),
('RET', 0, 0))),
('operators', (('aten::add', 'int'), ('aten::add', 'Scalar'))),
('constants',
(torch._utils._rebuild_tensor_v2(pers.obj(('storage', torch.DoubleStorage, '0', 'cpu', 8),),
0,
(2, 4),
(4, 1),
False,
collections.OrderedDict()),
1)),
('types', ()),
('register_size', 2)),
(('arguments',
((('name', 'self'),
('type', '__torch__.*.TestModule'),
('default_value', None)),
(('name', 'y'), ('type', 'int'), ('default_value', None)))),
('returns',
((('name', ''), ('type', 'Tensor'), ('default_value', None)),)))))
'''
SCRIPT_MODULE_V5_BYTECODE_PKL = '''
(5,
('__torch__.*.TestModule.forward',
(('instructions',
(('STOREN', 1, 2),
('DROPR', 1, 0),
('LOADC', 0, 0),
('LOADC', 1, 0),
('MOVE', 2, 0),
('OP', 0, 0),
('LOADC', 1, 0),
('OP', 1, 0),
('RET', 0, 0))),
('operators', (('aten::add', 'int'), ('aten::add', 'Scalar'))),
('constants',
(torch._utils._rebuild_tensor_v2(pers.obj(('storage', torch.DoubleStorage, 'constants/0', 'cpu', 8),),
0,
(2, 4),
(4, 1),
False,
collections.OrderedDict()),
1)),
('types', ()),
('register_size', 2)),
(('arguments',
((('name', 'self'),
('type', '__torch__.*.TestModule'),
('default_value', None)),
(('name', 'y'), ('type', 'int'), ('default_value', None)))),
('returns',
((('name', ''), ('type', 'Tensor'), ('default_value', None)),)))))
'''
SCRIPT_MODULE_V6_BYTECODE_PKL = '''
(6,
('__torch__.*.TestModule.forward',
(('instructions',
(('STOREN', 1, 2),
('DROPR', 1, 0),
('LOADC', 0, 0),
('LOADC', 1, 0),
('MOVE', 2, 0),
('OP', 0, 0),
('OP', 1, 0),
('RET', 0, 0))),
('operators', (('aten::add', 'int', 2), ('aten::add', 'Scalar', 2))),
('constants',
(torch._utils._rebuild_tensor_v2(pers.obj(('storage', torch.DoubleStorage, '0', 'cpu', 8),),
0,
(2, 4),
(4, 1),
False,
collections.OrderedDict()),
1)),
('types', ()),
('register_size', 2)),
(('arguments',
((('name', 'self'),
('type', '__torch__.*.TestModule'),
('default_value', None)),
(('name', 'y'), ('type', 'int'), ('default_value', None)))),
('returns',
((('name', ''), ('type', 'Tensor'), ('default_value', None)),)))))
'''
SCRIPT_MODULE_BYTECODE_PKL = {
4: {
"bytecode_pkl": SCRIPT_MODULE_V4_BYTECODE_PKL,
"model_name": "script_module_v4.ptl"},
}
# The minimum version a model can be backported to
# Need to be updated when a bytecode version is completely retired
MINIMUM_TO_VERSION = 4
class testVariousModelVersions(TestCase):
def test_get_model_bytecode_version(self):
def check_model_version(model_path, expect_version):
actual_version = _get_model_bytecode_version(model_path)
assert(actual_version == expect_version)
for version, model_info in SCRIPT_MODULE_BYTECODE_PKL.items():
model_path = pytorch_test_dir / "cpp" / "jit" / model_info["model_name"]
check_model_version(model_path, version)
def test_bytecode_values_for_all_backport_functions(self):
# Find the maximum version of the checked in models, start backporting to the minimum support version,
# and comparing the bytecode pkl content.
# It can't be merged to the test `test_all_backport_functions`, because optimization is dynamic and
# the content might change when optimize function changes. This test focuses
# on bytecode.pkl content validation. For the content validation, it is not byte to byte check, but
# regular expression matching. The wildcard can be used to skip some specific content comparison.
maximum_checked_in_model_version = max(SCRIPT_MODULE_BYTECODE_PKL.keys())
current_from_version = maximum_checked_in_model_version
with tempfile.TemporaryDirectory() as tmpdirname:
while current_from_version > MINIMUM_TO_VERSION:
# Load model v5 and run forward method
model_name = SCRIPT_MODULE_BYTECODE_PKL[current_from_version]["model_name"]
input_model_path = pytorch_test_dir / "cpp" / "jit" / model_name
# A temporary model file will be export to this path, and run through bytecode.pkl
# content check.
tmp_output_model_path_backport = Path(tmpdirname, "tmp_script_module_backport.ptl")
current_to_version = current_from_version - 1
backport_success = _backport_for_mobile(input_model_path, tmp_output_model_path_backport, current_to_version)
assert(backport_success)
expect_bytecode_pkl = SCRIPT_MODULE_BYTECODE_PKL[current_to_version]["bytecode_pkl"]
buf = io.StringIO()
torch.utils.show_pickle.main(
["", tmpdirname + "/" + tmp_output_model_path_backport.name + "@*/bytecode.pkl"],
output_stream=buf)
output = buf.getvalue()
acutal_result_clean = "".join(output.split())
expect_result_clean = "".join(expect_bytecode_pkl.split())
isMatch = fnmatch.fnmatch(acutal_result_clean, expect_result_clean)
assert(isMatch)
current_from_version -= 1
shutil.rmtree(tmpdirname)
# Please run this test manually when working on backport.
# This test passes in OSS, but fails internally, likely due to missing step in build
# def test_all_backport_functions(self):
# # Backport from the latest bytecode version to the minimum support version
# # Load, run the backport model, and check version
# class TestModule(torch.nn.Module):
# def __init__(self, v):
# super().__init__()
# self.x = v
# def forward(self, y: int):
# increment = torch.ones([2, 4], dtype=torch.float64)
# return self.x + y + increment
# module_input = 1
# expected_mobile_module_result = 3 * torch.ones([2, 4], dtype=torch.float64)
# # temporary input model file and output model file will be exported in the temporary folder
# with tempfile.TemporaryDirectory() as tmpdirname:
# tmp_input_model_path = Path(tmpdirname, "tmp_script_module.ptl")
# script_module = torch.jit.script(TestModule(1))
# optimized_scripted_module = optimize_for_mobile(script_module)
# exported_optimized_scripted_module = optimized_scripted_module._save_for_lite_interpreter(str(tmp_input_model_path))
# current_from_version = _get_model_bytecode_version(tmp_input_model_path)
# current_to_version = current_from_version - 1
# tmp_output_model_path = Path(tmpdirname, "tmp_script_module_backport.ptl")
# while current_to_version >= MINIMUM_TO_VERSION:
# # Backport the latest model to `to_version` to a tmp file "tmp_script_module_backport"
# backport_success = _backport_for_mobile(tmp_input_model_path, tmp_output_model_path, current_to_version)
# assert(backport_success)
# backport_version = _get_model_bytecode_version(tmp_output_model_path)
# assert(backport_version == current_to_version)
# # Load model and run forward method
# mobile_module = _load_for_lite_interpreter(str(tmp_input_model_path))
# mobile_module_result = mobile_module(module_input)
# torch.testing.assert_close(mobile_module_result, expected_mobile_module_result)
# current_to_version -= 1
# # Check backport failure case
# backport_success = _backport_for_mobile(tmp_input_model_path, tmp_output_model_path, MINIMUM_TO_VERSION - 1)
# assert(not backport_success)
# # need to clean the folder before it closes, otherwise will run into git not clean error
# shutil.rmtree(tmpdirname)
# Check just the test_backport_bytecode_from_file_to_file mechanism but not the function implementations
def test_backport_bytecode_from_file_to_file(self):
maximum_checked_in_model_version = max(SCRIPT_MODULE_BYTECODE_PKL.keys())
script_module_v5_path = pytorch_test_dir / "cpp" / "jit" / SCRIPT_MODULE_BYTECODE_PKL[
maximum_checked_in_model_version]["model_name"]
if (maximum_checked_in_model_version > MINIMUM_TO_VERSION):
with tempfile.TemporaryDirectory() as tmpdirname:
tmp_backport_model_path = Path(tmpdirname, "tmp_script_module_v5_backported_to_v4.ptl")
# backport from file
success = _backport_for_mobile(
script_module_v5_path,
tmp_backport_model_path,
maximum_checked_in_model_version - 1)
assert(success)
buf = io.StringIO()
torch.utils.show_pickle.main(
["", tmpdirname + "/" + tmp_backport_model_path.name + "@*/bytecode.pkl"],
output_stream=buf)
output = buf.getvalue()
expected_result = SCRIPT_MODULE_V4_BYTECODE_PKL
acutal_result_clean = "".join(output.split())
expect_result_clean = "".join(expected_result.split())
isMatch = fnmatch.fnmatch(acutal_result_clean, expect_result_clean)
assert(isMatch)
# Load model v4 and run forward method
mobile_module = _load_for_lite_interpreter(str(tmp_backport_model_path))
module_input = 1
mobile_module_result = mobile_module(module_input)
expected_mobile_module_result = 3 * torch.ones([2, 4], dtype=torch.float64)
torch.testing.assert_close(mobile_module_result, expected_mobile_module_result)
shutil.rmtree(tmpdirname)
# Check just the _backport_for_mobile_to_buffer mechanism but not the function implementations
def test_backport_bytecode_from_file_to_buffer(self):
maximum_checked_in_model_version = max(SCRIPT_MODULE_BYTECODE_PKL.keys())
script_module_v5_path = pytorch_test_dir / "cpp" / "jit" / SCRIPT_MODULE_BYTECODE_PKL[
maximum_checked_in_model_version]["model_name"]
if (maximum_checked_in_model_version > MINIMUM_TO_VERSION):
# Backport model to v4
script_module_v4_buffer = _backport_for_mobile_to_buffer(
script_module_v5_path, maximum_checked_in_model_version - 1)
buf = io.StringIO()
# Check version of the model v4 from backport
bytesio = io.BytesIO(script_module_v4_buffer)
backport_version = _get_model_bytecode_version(bytesio)
assert(backport_version == maximum_checked_in_model_version - 1)
# Load model v4 from backport and run forward method
bytesio = io.BytesIO(script_module_v4_buffer)
mobile_module = _load_for_lite_interpreter(bytesio)
module_input = 1
mobile_module_result = mobile_module(module_input)
expected_mobile_module_result = 3 * torch.ones([2, 4], dtype=torch.float64)
torch.testing.assert_close(mobile_module_result, expected_mobile_module_result)
def test_get_model_ops_and_info(self):
# TODO update this to be more in the style of the above tests after a backport from 6 -> 5 exists
script_module_v6 = pytorch_test_dir / "cpp" / "jit" / "script_module_v6.ptl"
ops_v6 = _get_model_ops_and_info(script_module_v6)
assert(ops_v6["aten::add.int"].num_schema_args == 2)
assert(ops_v6["aten::add.Scalar"].num_schema_args == 2)
if __name__ == '__main__':
run_tests()
| [
"[email protected]"
] | |
a6269498158572202304da939470fc4fdd2e3b1f | 060ce17de7b5cdbd5f7064d1fceb4ded17a23649 | /fn_github/tests/test_releases.py | afd754d1f6b234e0108ff323b9fb987cfd825a66 | [
"MIT"
] | permissive | ibmresilient/resilient-community-apps | 74bbd770062a22801cef585d4415c29cbb4d34e2 | 6878c78b94eeca407998a41ce8db2cc00f2b6758 | refs/heads/main | 2023-06-26T20:47:15.059297 | 2023-06-23T16:33:58 | 2023-06-23T16:33:58 | 101,410,006 | 81 | 107 | MIT | 2023-03-29T20:40:31 | 2017-08-25T14:07:33 | Python | UTF-8 | Python | false | false | 4,464 | py | # -*- coding: utf-8 -*-
"""Tests using pytest_resilient_circuits"""
import pytest
from .common_config import github_config, TS
from resilient_circuits.util import get_config_data, get_function_definition
from resilient_circuits import SubmitTestFunction, FunctionResult
PACKAGE_NAME = "fn_github"
FUNCTION_NAME = "github_create_release"
# Read the default configuration-data section from the package
config_data = get_config_data(PACKAGE_NAME)
# Provide a simulation of the Resilient REST API (uncomment to connect to a real appliance)
resilient_mock = "pytest_resilient_circuits.BasicResilientMock"
def call_function(circuits, function_name, function_params, timeout=5):
# Create the submitTestFunction event
evt = SubmitTestFunction(function_name, function_params)
# Fire a message to the function
circuits.manager.fire(evt)
# circuits will fire an "exception" event if an exception is raised in the FunctionComponent
# return this exception if it is raised
exception_event = circuits.watcher.wait("exception", parent=None, timeout=timeout)
if exception_event is not False:
exception = exception_event.args[1]
raise exception
# else return the FunctionComponent's results
else:
event = circuits.watcher.wait(f"{function_name}_result", parent=evt, timeout=timeout)
assert event
assert isinstance(event.kwargs["result"], FunctionResult)
pytest.wait_for(event, "complete", True)
return event.kwargs["result"].value
def call_github_create_release_function(circuits, function_params, timeout=5):
# Create the submitTestFunction event
evt = SubmitTestFunction("github_create_release", function_params)
# Fire a message to the function
circuits.manager.fire(evt)
# circuits will fire an "exception" event if an exception is raised in the FunctionComponent
# return this exception if it is raised
exception_event = circuits.watcher.wait("exception", parent=None, timeout=timeout)
if exception_event is not False:
exception = exception_event.args[1]
raise exception
# else return the FunctionComponent's results
else:
event = circuits.watcher.wait("github_create_release_result", parent=evt, timeout=timeout)
assert event
assert isinstance(event.kwargs["result"], FunctionResult)
pytest.wait_for(event, "complete", True)
return event.kwargs["result"].value
class TestGithubCreateRelease:
""" Tests for the github_create_release function"""
def test_function_definition(self):
""" Test that the package provides customization_data that defines the function """
func = get_function_definition(PACKAGE_NAME, FUNCTION_NAME)
assert func is not None
@pytest.mark.livetest
def test_create_release(self, circuits_app):
""" Test calling with sample values for the parameters """
create_release_setup = github_config('create_release')
create_release_setup['github_release_name'] = f"{create_release_setup['github_release_name']}_{TS.strftime('%Y%m%d_%H%M%S')}"
create_release_setup['github_release_tag'] = f"{create_release_setup['github_release_tag']}_{TS.strftime('%Y%m%d_%H%M%S')}"
results = call_function(circuits_app, "github_create_release", create_release_setup)
assert(results['success'])
@pytest.mark.livetest
def test_get_release(self, circuits_app):
""" Test calling with sample values for the parameters """
get_release_setup = github_config('get_release')
get_release_setup['github_release_tag'] = f"{get_release_setup['github_release_tag']}_{TS.strftime('%Y%m%d_%H%M%S')}"
results = call_function(circuits_app, "github_get_release", get_release_setup)
assert(results['success'])
assert(results['content'])
@pytest.mark.livetest
def test_get_releases(self, circuits_app):
get_releases_setup = github_config('get_releases')
results = call_function(circuits_app, "github_get_releases", get_releases_setup)
assert(results['success'])
assert(results['content'])
@pytest.mark.livetest
def test_get_latest_release(self, circuits_app):
get_releases_setup = github_config('get_latest_release')
results = call_function(circuits_app, "github_get_latest_release", get_releases_setup)
assert(results['success'])
assert(results['content'])
| [
"[email protected]"
] | |
8ebabb8929c847e3c9edcd7a71bcd0940adfa0c2 | d44bfb67b8b19f3773558870a71a42e0cd3ec002 | /telemetry-library/telemetry/telemetry_mqtt.py | cc84776095c2500d7a5842bdd8449b8635c5956f | [
"Apache-2.0"
] | permissive | Abstract-Horizon/pyros-telemetry | 764cdbb8cc98b7d72b1b2a04490c4989c003cbd2 | 7ecb5deaf266689555cbf0721f9c156e4dfe28d7 | refs/heads/master | 2023-08-17T19:52:24.684594 | 2021-10-11T17:46:40 | 2021-10-11T17:46:40 | 272,370,230 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,580 | py | ################################################################################
# Copyright (C) 2016-2020 Abstract Horizon
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License v2.0
# which accompanies this distribution, and is available at
# https://www.apache.org/licenses/LICENSE-2.0
#
# Contributors:
# Daniel Sendula - initial API and implementation
#
#################################################################################
import paho.mqtt.client as mqtt
import random
import re
import sys
import threading
import time
import traceback
from telemetry.telemetry_logger import TelemetryLogger, LocalPipeTelemetryLoggerDestination, PubSubTelemetryLoggerClient
from telemetry.telemetry_client import PubSubTelemetryClient
class MQTTLocalPipeTelemetryLogger(TelemetryLogger):
def __init__(self, stream_name, host="localhost", port=1883, topic='telemetry'):
self.mqtt = MQTTWrapper(host, port)
super(MQTTLocalPipeTelemetryLogger, self).__init__(stream_name,
destination=LocalPipeTelemetryLoggerDestination(),
telemetry_client=PubSubTelemetryLoggerClient(topic, self.mqtt.publish, self.mqtt.subscribe))
def init(self):
while not self.mqtt.is_connected():
self.mqtt.loop(0.02)
super(MQTTLocalPipeTelemetryLogger, self).init()
while not self.stream_ready and self.registration_error == 0:
self.mqtt.loop(0.02)
class MQTTTelemetryClient(PubSubTelemetryClient):
def __init__(self, host="localhost", port=1883, topic='telemetry'):
self.mqtt = MQTTWrapper(host, port)
super(MQTTTelemetryClient, self).__init__(topic, self.mqtt.publish, self.mqtt.subscribe)
class MQTTWrapper:
def __init__(self, host="localhost", port=1883, auto_init=True):
self.client = None
self.host = host
self.port = port
self.name = "telemetry-server-" + str(random.randint(10000, 99999))
self._subscribers = []
self._regexToLambda = {}
self._received = False
self.connected = False
if auto_init:
self.init()
def init(self, wait_to_connect=True):
self.client = mqtt.Client(self.name)
self.client.on_disconnect = self._on_disconnect
self.client.on_connect = self._on_connect
self.client.on_message = self._on_message
if self.host is not None:
self._connect()
if wait_to_connect:
print(" " + self.name + " waiting to connect to broker...")
while not self.connected:
self.loop(0.02)
print(" " + self.name + " connected to broker.")
def _connect(self):
self.connected = False
if self.client is not None:
try:
self.client.disconnect()
except Exception:
pass
self.client.connect_async(self.host, self.port, 60)
thread = threading.Thread(target=self._reconnect)
thread.daemon = True
thread.start()
def _on_disconnect(self, _mqtt_client, _data, _rc):
self._connect()
def _on_connect(self, mqtt_client, _data, _flags, rc):
if rc == 0:
self.connected = True
for subscriber in self._subscribers:
mqtt_client.subscribe(subscriber, 0)
else:
print("ERROR: Connection returned error result: " + str(rc))
sys.exit(rc)
def _on_message(self, _mqtt_client, _data, msg):
global _received
_received = True
topic = msg.topic
try:
for regex in self._regexToLambda:
matching = regex.match(topic)
if matching:
method = self._regexToLambda[regex]
method(topic, msg.payload)
return
except Exception as ex:
print("ERROR: Got exception in on message processing; " + str(ex) + "\n" + ''.join(traceback.format_tb(ex.__traceback__)))
def _reconnect(self):
try:
self.client.reconnect()
except Exception:
pass
def publish(self, topic, message):
if self.connected:
self.client.publish(topic, message)
def subscribe(self, topic, method):
self._subscribers.append(topic)
regex_string = "^" + topic.replace("+", "([^/]+)").replace("#", "(.*)") + "$"
regex = re.compile(regex_string)
self._regexToLambda[regex] = method
if self.connected:
self.client.subscribe(topic, 0)
def is_connected(self):
return self.connected
def sleep(self, delta_time):
self.loop(self, delta_time)
def loop(self, delta_time, _inner=None):
current_time = time.time()
self._received = False
self.client.loop(0.0005) # wait for 0.5 ms
until = current_time + delta_time
while current_time < until:
if self._received:
self._received = False
self.client.loop(0.0005) # wait for 0.1 ms
current_time = time.time()
else:
time.sleep(0.002) # wait for 2 ms
current_time = time.time()
if current_time + 0.0005 < until:
self.client.loop(0.0005) # wait for 0.1 ms
current_time = time.time()
def forever(self, delta_time, outer=None, inner=None):
current_time = time.time()
next_time = current_time
while True:
next_time = next_time + delta_time
try:
if outer is not None:
outer()
except BaseException as ex:
print("ERROR: Got exception in main loop; " + str(ex) + "\n" + ''.join(traceback.format_tb(ex.__traceback__)))
current_time = time.time()
sleep_time = next_time - current_time
if sleep_time < 0.002:
next_time = current_time
self._received = False
self.client.loop(0.0005) # wait for 0.1 ms
count = 10 # allow at least 5 messages
while count > 0 and self._received:
self._received = True
count -= 1
self.client.loop(0.0005) # wait for 0.1 ms
else:
self.loop(sleep_time, inner=inner)
| [
"[email protected]"
] | |
466373d76ca24cb2a34c4824c5097fc46feafc28 | 88cc55bc6e6feee2fa5f186d57ccb5e1d420217c | /django_websocket/__init__.py | 6e4e96ace8d71052cc4fc96b0c8e0a06953e792e | [
"BSD-3-Clause"
] | permissive | gregmuellegger/django-websocket | 7c16d3ba27970284b8fbeab5cecd1358d47373a2 | cb4804e98f397f242e74c6f9e6f4fabab41a7ab7 | refs/heads/master | 2021-07-05T00:13:21.719973 | 2016-05-10T06:27:40 | 2016-05-10T06:27:40 | 773,121 | 68 | 19 | BSD-3-Clause | 2021-06-10T17:32:54 | 2010-07-13T19:40:14 | Python | UTF-8 | Python | false | false | 42 | py | from django_websocket.decorators import *
| [
"[email protected]"
] | |
09da46de08db8efd21ef86e80c0bd1b0bfa4641f | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/fbs_0140+360/sdB_FBS_0140+360_lc.py | 757218cb78912bdc16ba760e680f6cef9c974b74 | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 346 | py | from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[25.926708,36.25925], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_FBS_0140+360 /sdB_FBS_0140+360_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
0f923332d74ab03086681ff9097adf5ed4fd7603 | 70ec704fdd3e30c5df97323cd4c9532ebfd544ea | /ml_wiki/ch2/selenium-login.py | d53a91bf593dc64f47bd9b445885c8954f53b454 | [] | no_license | smart1004/learn_src | e02c13c82bae65b7de2a572e4a1ae58e2ea11588 | 353f92f7657a6f676a271d8d7f00d7c20e39d234 | refs/heads/master | 2021-01-25T11:49:49.906805 | 2018-04-08T13:26:18 | 2018-04-08T13:26:18 | 123,435,997 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,073 | py | from selenium import webdriver
USER = "<아이디>"
PASS = "<비밀번호>"
# PhantomJS 드라이버 추출하기 --- (※1)
browser = webdriver.PhantomJS()
browser.implicitly_wait(3)
# 로그인 페이지에 접근하기 --- (※2)
url_login = "https://nid.naver.com/nidlogin.login"
browser.get(url_login)
print("로그인 페이지에 접근합니다.")
# 텍스트 박스에 아이디와 비밀번호 입력하기 --- (※3)
e = browser.find_element_by_id("id")
e.clear()
e.send_keys(USER)
e = browser.find_element_by_id("pw")
e.clear()
e.send_keys(PASS)
# 입력 양식 전송해서 로그인하기 --- (※4)
form = browser.find_element_by_css_selector("input.btn_global[type=submit]")
form.submit()
print("로그인 버튼을 클릭합니다.")
# 쇼핑 페이지의 데이터 가져오기 --- (※5)
browser.get("https://order.pay.naver.com/home?tabMenu=SHOPPING")
# 쇼핑 목록 출력하기 --- (※6)
products = browser.find_elements_by_css_selector(".p_info span")
print(products)
for product in products:
print("-", product.text) | [
"[email protected]"
] | |
38459d585c7f1861e8774c7571859a85236be08b | 6515a47190986c4f3b6beececfabab42e3d34e34 | /Models/GPT2_Model/Model/GPT2LMHead.py | 7f0304b3b13ce6e44e8c037aec7823bb34427b7e | [] | no_license | jk96491/Advanced_Models | f4140936f5004ed9a9464ad745b33e52d63157fa | cde49356fec3c53296446a54f4be497a89dd08cd | refs/heads/master | 2023-06-14T02:26:43.869417 | 2021-06-30T13:07:31 | 2021-06-30T13:07:31 | 143,489,382 | 60 | 16 | null | null | null | null | UTF-8 | Python | false | false | 782 | py | import torch.nn as nn
class GPT2LMHead(nn.Module):
def __init__(self, model_embeddings_weights, config):
super(GPT2LMHead, self).__init__()
self.n_embd = config.n_embd
self.set_embeddings_weights(model_embeddings_weights)
def set_embeddings_weights(self, model_embeddings_weights):
embed_shape = model_embeddings_weights.shape
self.decoder = nn.Linear(embed_shape[1], embed_shape[0], bias=False)
self.decoder.weight = model_embeddings_weights # Tied weights
def forward(self, hidden_state):
# Truncated Language modeling logits (we remove the last token)
# h_trunc = h[:, :-1].contiguous().view(-1, self.n_embd)
lm_logits = self.decoder(hidden_state)
return lm_logits | [
"[email protected]"
] | |
1e236e30d75c559339f1261b732a9a70d9df7122 | 35053a371d85c2d45a4f52239d8a70b38194ef48 | /Can Place Flowers.py | 86bf8aeeb427181d1fe805cbf5b1d1bcb364a643 | [] | no_license | Kuehar/LeetCode | 51d169c81a2e572ea854399fc78e1130220388f9 | 4555c20455f181f9dd7b3aba2a8779dea795edfb | refs/heads/master | 2023-04-16T10:13:03.584541 | 2023-04-06T11:47:21 | 2023-04-06T11:47:21 | 243,361,421 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 930 | py | class Solution:
def canPlaceFlowers(self, flowerbed: List[int], n: int) -> bool:
if n == 0: return True
if len(flowerbed) == 0: return False
if len(flowerbed) == 1: return flowerbed[0] == 0
pre,cur = flowerbed[0],flowerbed[1]
if pre + cur == 0:
flowerbed[0] = 1
n -= 1
cur,nex = flowerbed[-1],flowerbed[-2]
if cur + nex == 0:
flowerbed[-1] = 1
n -= 1
for i in range(2,len(flowerbed)-2):
pre = flowerbed[i-1]
cur = flowerbed[i]
nex = flowerbed[i+1]
if (pre + cur + nex) == 0:
flowerbed[i] = 1
n -= 1
return n <= 0
# Runtime: 164 ms, faster than 58.48% of Python3 online submissions for Can Place Flowers.
# Memory Usage: 14.5 MB, less than 89.00% of Python3 online submissions for Can Place Flowers.
| [
"[email protected]"
] | |
2e79679ebdc6ebb91b85f95ac5bccc7866b865ab | ace30d0a4b1452171123c46eb0f917e106a70225 | /filesystems/vnx_rootfs_lxc_ubuntu64-16.04-v025-openstack-compute/rootfs/usr/lib/python2.7/dist-packages/openstackclient/tests/functional/image/v2/test_image.py | 6faff94a3295cd738875bfb8fda2baf1ac162efb | [
"Python-2.0"
] | permissive | juancarlosdiaztorres/Ansible-OpenStack | e98aa8c1c59b0c0040c05df292964520dd796f71 | c01951b33e278de9e769c2d0609c0be61d2cb26b | refs/heads/master | 2022-11-21T18:08:21.948330 | 2018-10-15T11:39:20 | 2018-10-15T11:39:20 | 152,568,204 | 0 | 3 | null | 2022-11-19T17:38:49 | 2018-10-11T09:45:48 | Python | UTF-8 | Python | false | false | 3,865 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
from openstackclient.tests.functional import base
class ImageTests(base.TestCase):
"""Functional tests for image. """
NAME = uuid.uuid4().hex
OTHER_NAME = uuid.uuid4().hex
HEADERS = ['Name']
FIELDS = ['name']
@classmethod
def setUpClass(cls):
os.environ['OS_IMAGE_API_VERSION'] = '2'
opts = cls.get_opts(cls.FIELDS)
raw_output = cls.openstack('image create ' + cls.NAME + opts)
expected = cls.NAME + '\n'
cls.assertOutput(expected, raw_output)
@classmethod
def tearDownClass(cls):
# Rename test
raw_output = cls.openstack('image set --name ' + cls.OTHER_NAME + ' '
+ cls.NAME)
cls.assertOutput('', raw_output)
# Delete test
raw_output = cls.openstack('image delete ' + cls.OTHER_NAME)
cls.assertOutput('', raw_output)
def test_image_list(self):
opts = self.get_opts(self.HEADERS)
raw_output = self.openstack('image list' + opts)
self.assertIn(self.NAME, raw_output)
def test_image_show(self):
opts = self.get_opts(self.FIELDS)
raw_output = self.openstack('image show ' + self.NAME + opts)
self.assertEqual(self.NAME + "\n", raw_output)
def test_image_set(self):
opts = self.get_opts([
"disk_format", "visibility", "min_disk", "min_ram", "name"])
self.openstack('image set --min-disk 4 --min-ram 5 ' +
'--public ' + self.NAME)
raw_output = self.openstack('image show ' + self.NAME + opts)
self.assertEqual("raw\n4\n5\n" + self.NAME + '\npublic\n', raw_output)
def test_image_metadata(self):
opts = self.get_opts(["name", "properties"])
self.openstack('image set --property a=b --property c=d ' + self.NAME)
raw_output = self.openstack('image show ' + self.NAME + opts)
self.assertEqual(self.NAME + "\na='b', c='d'\n", raw_output)
def test_image_unset(self):
opts = self.get_opts(["name", "tags", "properties"])
self.openstack('image set --tag 01 ' + self.NAME)
self.openstack('image unset --tag 01 ' + self.NAME)
# test_image_metadata has set image properties "a" and "c"
self.openstack('image unset --property a --property c ' + self.NAME)
raw_output = self.openstack('image show ' + self.NAME + opts)
self.assertEqual(self.NAME + "\n\n", raw_output)
def test_image_members(self):
opts = self.get_opts(['project_id'])
my_project_id = self.openstack('token issue' + opts).strip()
self.openstack(
'image add project {} {}'.format(self.NAME, my_project_id))
self.openstack(
'image set --accept ' + self.NAME)
shared_img_list = self.parse_listing(
self.openstack('image list --shared', self.get_opts(['name']))
)
self.assertIn(self.NAME, [img['Name'] for img in shared_img_list])
self.openstack(
'image set --reject ' + self.NAME)
shared_img_list = self.parse_listing(
self.openstack('image list --shared', self.get_opts(['name']))
)
self.openstack(
'image remove project {} {}'.format(self.NAME, my_project_id))
| [
"[email protected]"
] | |
4fe46a3e69863bca6e98c1cb6ab5c17fd36f8261 | 5c531de5e4759c904e608b4fc653b2b041f79a0e | /779. K-th Symbol in Grammar.py | 06e46cf683b3f090a9b595db7b7a9fd6675029aa | [] | no_license | jianhui-ben/leetcode_python | 133c7e6e5c7316d00607ba2e327239e002de28b2 | fcc16124cc24a5993e27f5d97e78d8f290e68230 | refs/heads/master | 2022-06-05T22:32:18.034581 | 2022-05-17T02:27:11 | 2022-05-17T02:27:11 | 250,683,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 728 | py | #779. K-th Symbol in Grammar
#On the first row, we write a 0. Now in every subsequent row, we look at the previous row and replace each occurrence of 0 with 01, and each occurrence of 1 with 10.
#Given row N and index K, return the K-th indexed symbol in row N. (The values of K are 1-indexed.) (1 indexed).
#Examples:
#Input: N = 1, K = 1
#Output: 0
#Input: N = 2, K = 1
#Output: 0
#Input: N = 2, K = 2
#Output: 1
#Input: N = 4, K = 5
#Output: 1
class Solution:
def kthGrammar(self, N: int, K: int) -> int:
## recursion
if N==1: return 0
if K%2==1:
return self.kthGrammar(N-1,(K+1)//2)
else:
return 1-self.kthGrammar(N-1,(K+1)//2)
| [
"[email protected]"
] | |
5adf7e2ecf8e0908041fd9b472f7833397d98c44 | e990606b3db8429e87f9a0a5a10e3c10b17ae13b | /data_proc/wordpiece/mlperf_tags.py | 88313a6ba9df4bb9205a4755d963ed60a5c9aa46 | [] | no_license | michael-wzhu/redesign_vocab_for_zh_bert | 4c9ae36295815ea58cfb34d4f46ac2ace097861f | 4e7e72e8f4554feebba6ae2f553a5240e5bedcd4 | refs/heads/master | 2022-12-16T17:41:09.704451 | 2020-09-26T12:06:35 | 2020-09-26T12:06:35 | 274,569,222 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,882 | py | # coding=utf-8
# Copyright 2020 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2018 MLBenchmark Group. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Master list of MLPerf tags to be logged for benchmark submissions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# ==============================================================================
# == Benchmarks ================================================================
# ==============================================================================
# translation/
TRANSFORMER = "transformer"
INPUT_MAX_LENGTH = "input_max_length"
OPT_LR_WARMUP_STEPS = "opt_learning_rate_warmup_steps"
MODEL_HP_INITIALIZER_GAIN = "model_hp_initializer_gain"
MODEL_HP_VOCAB_SIZE = "model_hp_vocab_size"
MODEL_HP_NUM_HIDDEN_LAYERS = "model_hp_hidden_layers"
MODEL_HP_EMBEDDING_SHARED_WEIGHTS = "model_hp_embedding_shared_weights"
MODEL_HP_ATTENTION_DENSE = "model_hp_attention_dense"
MODEL_HP_ATTENTION_DROPOUT = "model_hp_attention_dropout"
MODEL_HP_FFN_OUTPUT_DENSE = "model_hp_ffn_output_dense"
MODEL_HP_FFN_FILTER_DENSE = "model_hp_ffn_filter_dense"
MODEL_HP_RELU_DROPOUT = "model_hp_relu_dropout"
MODEL_HP_LAYER_POSTPROCESS_DROPOUT = "model_hp_layer_postprocess_dropout"
MODEL_HP_NORM = "model_hp_norm"
MODEL_HP_SEQ_BEAM_SEARCH = "model_hp_sequence_beam_search"
# ==============================================================================
# == Tags ======================================================================
# ==============================================================================
"""
Tags may be used by all models, a subset of models, or only one model. A
specification for which models require which tags can be found below the tag
definitions.
"""
# \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
# All models: Tags which should appear in absolutely every MLPerf model.
# //////////////////////////////////////////////////////////////////////////////
# This tag signals to start the timer. Emission of this tag need not be (and
# generally will not be) the first part of a submission script. Rather, this
# tag must be emitted prior to performing any work which the MLPerf rules
# state must be timed. This tag is generally emitted directly before the first
# step which invokes random number generation or the first step which must be
# performed on the system under test. (Whichever comes first.) If clarification
# is needed, please file an issue under:
# https://github.com/mlperf/policies
RUN_START = "run_start"
# This tag signals that a submission has reached the relevant stopping criteria,
# and has completed all tasks which are performed in the reference. The wall
# time for a submission will be computed as the difference between the time
# when this tag is emitted and the time whe the RUN_START is emitted.
RUN_STOP = "run_stop"
# This tag should be emitted immediately before ending a run, and should be the
# last tag emitted. This tag should indicate the completion of untimed post
# processing work such as system specific cleanup.
RUN_FINAL = "run_final"
# Emit this tag in the place(s) where random seeds are set.
RUN_SET_RANDOM_SEED = "run_set_random_seed"
# \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
# Common Values: Constants which are expected to be reported across many models.
# These values are included for convenience.
# //////////////////////////////////////////////////////////////////////////////
BCE = "binary_cross_entropy"
CCE = "categorical_cross_entropy"
SGD = "stochastic_gradient_descent"
# Some conventions distinguish between "vanilla" SGD and SGD with momentum
# (where vanilla SGD would be the specific case of momentum=0)
SGD_WITH_MOMENTUM = "stochastic_gradient_descent_with_momentum"
ADAM = "adam"
LAZY_ADAM = "lazy_adam"
TRUNCATED_NORMAL = "truncated_normal"
RELU = "relu"
# \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
# Preprocessing: Tags for generic preprocessing steps
# //////////////////////////////////////////////////////////////////////////////
# The number of training examples in a single epoch
PREPROC_NUM_TRAIN_EXAMPLES = "preproc_num_train_examples"
# The number of evaluation examples in a single epoch
PREPROC_NUM_EVAL_EXAMPLES = "preproc_num_eval_examples"
# This tag is used to declare what part of code tokenizes the training data.
PREPROC_TOKENIZE_TRAINING = "preproc_tokenize_training"
# This tag is used to declare what part of code tokenizes the evaluation data.
PREPROC_TOKENIZE_EVAL = "preproc_tokenize_eval"
# The vocabulary size used for tokenization.
PREPROC_VOCAB_SIZE = "preproc_vocab_size"
# \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
# Input: Tags for the timed portion of the data input pipeline
# //////////////////////////////////////////////////////////////////////////////
# The number of examples in the training portion of the data pipeline. Generally
# this should match PREPROC_NUM_TRAIN_EXAMPLES. If it does not (for instance
# if certain examples are dropped in compliance with MLPerf rules), the
# call which declares this tag is a good place for a comment stating why the
# disparity is expected.
INPUT_SIZE = "input_size"
# The size of a training minibatch size. If this value is variable, please emit
# "-1" and then log an implementation specific characterization of the batch
# size which is a reasonable analog to the reference. (For instance log that
# all but the last batch has size 64, and the last batch is a partial batch)
INPUT_BATCH_SIZE = "input_batch_size"
# This tag indicates where the location of the code which defines the order in
# which training examples are traversed. It is not necessary to describe the
# method in the tag emission (though comments are always welcome). Rather, this
# should simply provide a good starting point to an interested party.
INPUT_ORDER = "input_order"
# --------------------------------------
# -- Data Augmentation and Alteration --
# --------------------------------------
# ResNet random cropping
INPUT_CENTRAL_CROP = "input_central_crop"
INPUT_DISTORTED_CROP_MIN_OBJ_COV = "input_distorted_crop_min_object_covered"
INPUT_DISTORTED_CROP_RATIO_RANGE = "input_distorted_crop_aspect_ratio_range"
INPUT_DISTORTED_CROP_AREA_RANGE = "input_distorted_crop_area_range"
INPUT_DISTORTED_CROP_MAX_ATTEMPTS = "input_distorted_crop_max_attempts"
INPUT_MEAN_SUBTRACTION = "input_mean_subtraction"
# Random flip of an image for data augmentation
INPUT_RANDOM_FLIP = "input_random_flip"
INPUT_RESIZE = "input_resize"
INPUT_RESIZE_ASPECT_PRESERVING = "input_resize_aspect_preserving"
# \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
# Opt: Tags for declaring optimizer specific information. Submissions should
# declare and log explicit values rather than relying on defaults.
# //////////////////////////////////////////////////////////////////////////////
# The name of the optimizer used. (SGD, Adam, etc.)
OPT_NAME = "opt_name"
OPT_LR = "opt_learning_rate"
OPT_MOMENTUM = "opt_momentum"
OPT_WEIGHT_DECAY = "opt_weight_decay"
# beta1, beta2, and epsilon are optimizer hyperparameters associated with the
# Adam optimizer and its variants (e.g. LazyAdam).
OPT_HP_ADAM_BETA1 = "opt_hp_Adam_beta1"
OPT_HP_ADAM_BETA2 = "opt_hp_Adam_beta2"
OPT_HP_ADAM_EPSILON = "opt_hp_Adam_epsilon"
# \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
# Train: Tags for control flow during model training.
# //////////////////////////////////////////////////////////////////////////////
# This tag is emitted when a model first enters its training loop. This is not
# necessarily when it begins to apply gradients; rather, it should be placed at
# a location which logically partitions the submission code.
TRAIN_LOOP = "train_loop"
# The current epoch as said epoch begins training.
TRAIN_EPOCH = "train_epoch"
# This tag is used to indicate approximately where checkpoints are written. Some
# frameworks abstract away checkpoint saving; in such cases simply choose a
# logical place in the code which signals that the framework has been instructed
# to save checkpoints, along with an explanatory comment.
TRAIN_CHECKPOINT = "train_checkpoint"
# \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
# Eval: Tags for control flow during model evaluation.
# //////////////////////////////////////////////////////////////////////////////
# This tag should be emitted whenever the submission begins an evaluation pass
# for a given set of weights.
EVAL_START = "eval_start"
# The number of examples on which evaluation is performed.
EVAL_SIZE = "eval_size"
# The target quality at which the model may stop training.
EVAL_TARGET = "eval_target"
# The observed accuracy of the model at a given epoch.
EVAL_ACCURACY = "eval_accuracy"
# This tag should be emitted when the model has determined that it has met the
# target quality set by the reference.
EVAL_STOP = "eval_stop"
# \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
# Model: Tags for logging topology specific information.
# //////////////////////////////////////////////////////////////////////////////
# The loss function (cross entropy, squared error, etc.) used by the model. For
# more exotic loss functions such as those encountered in object detection
# models, additional benchmark specific subcomponents should also be logged.
MODEL_HP_LOSS_FN = "model_hp_loss_fn"
MODEL_HP_INITIAL_SHAPE = "model_hp_initial_shape"
MODEL_HP_FINAL_SHAPE = "model_hp_final_shape"
MODEL_L2_REGULARIZATION = "model_l2_regularization"
MODEL_EXCLUDE_BN_FROM_L2 = "model_exclude_bn_from_l2"
MODEL_HP_RELU = "model_hp_relu"
MODEL_HP_CONV2D_FIXED_PADDING = "model_hp_conv2d_fixed_padding"
MODEL_HP_BATCH_NORM = "model_hp_batch_norm"
MODEL_HP_DENSE = "model_hp_dense"
# ==============================================================================
# == Stdout tags ===============================================================
# ==============================================================================
# These tags are always logged to stdout. The rest will be logged to a file if
# one is available.
STDOUT_TAG_SET = {
RUN_START,
RUN_STOP,
RUN_FINAL,
TRAIN_LOOP,
TRAIN_EPOCH,
EVAL_START,
EVAL_SIZE,
EVAL_TARGET,
EVAL_ACCURACY,
EVAL_STOP,
}
# ==============================================================================
# == Benchmark tag sets ========================================================
# ==============================================================================
ALL_USED_TAGS = set()
TRANSFORMER_TAGS = (
RUN_START,
RUN_STOP,
RUN_FINAL,
RUN_SET_RANDOM_SEED,
PREPROC_NUM_TRAIN_EXAMPLES,
PREPROC_NUM_EVAL_EXAMPLES,
PREPROC_TOKENIZE_TRAINING,
PREPROC_TOKENIZE_EVAL,
PREPROC_VOCAB_SIZE,
INPUT_BATCH_SIZE,
INPUT_MAX_LENGTH,
INPUT_ORDER,
OPT_NAME,
OPT_LR,
OPT_LR_WARMUP_STEPS,
OPT_HP_ADAM_BETA1,
OPT_HP_ADAM_BETA2,
OPT_HP_ADAM_EPSILON,
TRAIN_LOOP,
TRAIN_EPOCH,
EVAL_START,
EVAL_SIZE,
EVAL_TARGET,
EVAL_ACCURACY,
EVAL_STOP,
MODEL_HP_INITIALIZER_GAIN,
MODEL_HP_VOCAB_SIZE,
MODEL_HP_NUM_HIDDEN_LAYERS,
MODEL_HP_EMBEDDING_SHARED_WEIGHTS,
MODEL_HP_ATTENTION_DENSE,
MODEL_HP_ATTENTION_DROPOUT,
MODEL_HP_FFN_OUTPUT_DENSE,
MODEL_HP_FFN_FILTER_DENSE,
MODEL_HP_RELU_DROPOUT,
MODEL_HP_LAYER_POSTPROCESS_DROPOUT,
MODEL_HP_NORM,
MODEL_HP_SEQ_BEAM_SEARCH,
)
ALL_USED_TAGS.update(TRANSFORMER_TAGS) | [
"[email protected]"
] | |
e806e3218cd8bc55131bce1e7d166c76ad1ec718 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03288/s779086883.py | 309d8d3ea1e3ed5219effec5418e8a99e6125de3 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64 | py | r=int(input())
print("ABC"if r<1200else"ARC"if r<2800else"AGC") | [
"[email protected]"
] | |
600d648aef968fa6d9aaf3ddd8d410059382df4b | 65f856bb3c782fe2fec794192260d5b7aa997ef3 | /wsc_django/wsc_django/apps/shop/services.py | 0a53f3c8e183bdcaeeefad41252f7a5440069671 | [
"MIT"
] | permissive | hzh595395786/wsc_django | 0c8faf0cac1d8db8d9e3fa22f6914b6b64bf788b | c0a4de1a4479fe83f36108c1fdd4d68d18348b8d | refs/heads/main | 2023-06-06T07:26:17.979944 | 2021-06-24T13:14:53 | 2021-06-24T13:14:53 | 336,303,377 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,909 | py | from uuid import uuid4
from django.db.models import Count
from product.constant import ProductStatus
from shop.models import Shop, HistoryRealName, ShopRejectReason, PayChannel
from shop.utils import get_shop_mini_program_qcode, put_qcode_file_to_tencent_cos
from user.models import User
from shop.constant import (
ShopStatus,
)
def create_shop(shop_info: dict, user: User):
"""
创建一个商铺
:param shop_info:{
"shop_name": "name",
"shop_img": "http://xxx",
"shop_province": 420000,
"shop_city": 420100,
"shop_county": 420101,
"shop_address": "光谷智慧谷一栋505",
"description": "xxxx",
"suggest_phone": "153xxxxxxxx",
"shop_phone": "152xxxxxxxx",
"super_admin_id": 1
}
:param user: 创建商铺的用户对象
:return:
"""
# 创建店铺
# 随机一个商铺编码, 查一下,万一重复就再来一个
while True:
shop_code = str(uuid4())[-9:]
shop = Shop.objects.filter(shop_code=shop_code)
if not shop:
break
shop_info["shop_code"] = shop_code
shop_info["shop_phone"] = user.phone
shop_info["super_admin_id"] = user.id
shop = Shop(**shop_info)
shop.save()
return shop
def create_pay_channel(pay_channel_info: dict, shop_id: int):
"""
创建一个商铺的pay_channel
:param pay_channel_info:
:param shop_id:
:return:
"""
shop_pay_channel = PayChannel(shop_id=shop_id, **pay_channel_info)
shop_pay_channel.save()
return shop_pay_channel
def create_shop_reject_reason_by_shop_id(shop_id: int, reject_reason: str):
"""
给拒绝的商铺创建一个拒绝理由
:param shop_id:
:return:
"""
reject_reason = ShopRejectReason(id=shop_id, reject_reason=reject_reason)
reject_reason.save()
return reject_reason
def create_shop_creator_history_realname(shop_id: int, history_realname: str):
"""
储存商铺创建者的历史真实姓名, 与店铺绑定
:param shop_id:
:param history_realname:
:return:
"""
history_realname = HistoryRealName(id=shop_id, realname=history_realname)
history_realname.save()
return history_realname
def create_shop_mini_program_qcode(shop_code: str):
"""
为商铺创建小程序码
:param shop_code:
:return:
"""
qcode_file = get_shop_mini_program_qcode(shop_code)
success, url = put_qcode_file_to_tencent_cos(qcode_file, shop_code)
return success, url
def update_shop_data(shop: Shop, args: dict):
"""
修改商铺信息
:param shop:
:param args:
:return:
"""
for k, v in args.items():
setattr(shop, k, v)
shop.save()
return shop
def get_shop_by_shop_code(shop_code: str, only_normal: bool = True):
"""
通过shop_code获取shop对象
:param shop_code: 商铺编码
:param only_normal: 只查询正常
:return:
"""
shop = Shop.objects.filter(shop_code=shop_code)
if shop and only_normal:
shop = shop.filter(status=ShopStatus.NORMAL)
shop = shop.first()
return shop
def get_shop_by_shop_id(shop_id: int, filter_close: bool = True):
"""
通过商铺id获取商
:param shop_id: 商铺id
:param filter_close: 不查询关闭的
:return:
"""
shop = Shop.objects.filter(id=shop_id)
if shop and filter_close:
shop = shop.exclude(status=ShopStatus.CLOSED)
shop = shop.first()
return shop
def list_shop_by_shop_ids(shop_ids: list, filter_close: bool = True, role: int = 1):
"""
通过ship_id列表查询商铺列表
:param shop_ids:
:param filter_close:过滤关闭
:param role: 访问角色,1:为普通用户,2.为admin用户,普通用户访问时只能查到已审核的店铺
:return:
"""
shop_list_query = Shop.objects.filter(id__in=shop_ids)
if shop_list_query and filter_close:
shop_list_query = shop_list_query.exclude(status=ShopStatus.CLOSED)
if role == 1:
shop_list_query = shop_list_query.filter(status=ShopStatus.NORMAL)
shop_list = shop_list_query.all()
return shop_list
def list_shop_by_shop_status(shop_status: int):
"""
查询某一状态的所有商铺
:param shop_status:
:return:
"""
shop_list = Shop.objects.filter(status=shop_status).order_by('update_at').all()
return shop_list
def list_shop_creator_history_realname(shop_ids: list):
"""
找出商铺创建的历史真实姓名列表
:param shop_ids:
:return:
"""
history_realname_list = (
HistoryRealName.objects.filter(id__in=shop_ids).all()
)
return history_realname_list
def list_shop_reject_reason(shop_ids: list):
"""查询出所有的商铺拒绝信息"""
reject_reason_list = ShopRejectReason.objects.filter(id__in=shop_ids).all()
return reject_reason_list | [
"[email protected]"
] | |
a4988105b8f44db42f20393940d9d3a3ae4e6178 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_exercises/templates/_algorithms_challenges/pybites/intermediate/191/bmi.py | 0ee130d805eb92fd958498062113b022207001d6 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 1,056 | py | # data """Luke Skywalker,172,77
# C-3PO,167,75
# R2-D2,96,32
# Darth Vader,202,136
# Leia Organa,150,49
# Owen Lars,178,120
# Beru Whitesun lars,165,75
# R5-D4,97,32
# Biggs Darklighter,183,84
# Obi-Wan Kenobi,182,77
# Anakin Skywalker,188,84
# Chewbacca,228,112
# Han Solo,180,80
# Greedo,173,74
# Jek Tono Porkins,180,110
# Yoda,66,17
# Palpatine,170,75
# Boba Fett,183,78.2
# IG-88,200,140
# Bossk,190,113
# """
#
#
# ___ person_max_bmi data_?
# """Return (name, BMI float) of the character in data that
# has the highest BMI (rounded on 2 decimals)"""
# bmi # dict
# data_list ?.s.. "\n"
#
# ___ row __ ?
# current ?.s...s.. ","
# __ l.. ? > 1
# ? ? 0 f__ c.. 2 / i.. ? 1 / 100) ** 2
#
# name_max_bmi m.. b.. key b__.g..
# r.. ? r.. b.. ? 2
#
# # if __name__ == "__main__":
# # print(person_max_bmi()) | [
"[email protected]"
] | |
6e0eaa8412f98de3cb193a1d8b771c2ac490c3db | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/dev/cv/image_classification/SimplePose_ID1038_for_PyTorch/parallel_encoding/paralle.py | 22a4742535a0677b59076b72a684e6b7562466f4 | [
"BSD-3-Clause",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 13,118 | py | #
# BSD 3-Clause License
#
# Copyright (c) 2017 xxxx
# All rights reserved.
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ============================================================================
#
# #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# # Created by: Hang Zhang, Rutgers University, Email: [email protected]
# # Modified by Thomas Wolf, HuggingFace Inc., Email: [email protected]
# # Copyright (c) 2017-2018
##
# # This source code is licensed under the MIT-style license found in the
# # LICENSE file in the root directory of this source tree
# #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
"""Encoding Data Parallel"""
import threading
import functools
import torch
from torch.autograd import Variable, Function
import torch.npu.comm as comm
from torch.nn.parallel.data_parallel import DataParallel
from torch.nn.parallel.distributed import DistributedDataParallel
from torch.nn.parallel.parallel_apply import get_a_var
from torch.nn.parallel.scatter_gather import gather
from torch.nn.parallel._functions import ReduceAddCoalesced, Broadcast
torch_ver = torch.__version__[:3]
__all__ = ['allreduce', 'DataParallelModel', 'DataParallelCriterion',
'patch_replication_callback']
def allreduce(*inputs):
"""Cross GPU all reduce autograd operation for calculate mean and
variance in SyncBN.
"""
return AllReduce.apply(*inputs)
class AllReduce(Function):
@staticmethod
def forward(ctx, num_inputs, *inputs):
ctx.num_inputs = num_inputs
ctx.target_gpus = [inputs[i].get_device() for i in range(0, len(inputs), num_inputs)]
inputs = [inputs[i:i + num_inputs]
for i in range(0, len(inputs), num_inputs)]
# sort before reduce sum
inputs = sorted(inputs, key=lambda i: i[0].get_device())
results = comm.reduce_add_coalesced(inputs, ctx.target_gpus[0])
outputs = comm.broadcast_coalesced(results, ctx.target_gpus)
return tuple([t for tensors in outputs for t in tensors])
@staticmethod
def backward(ctx, *inputs):
inputs = [i.data for i in inputs]
inputs = [inputs[i:i + ctx.num_inputs]
for i in range(0, len(inputs), ctx.num_inputs)]
results = comm.reduce_add_coalesced(inputs, ctx.target_gpus[0])
outputs = comm.broadcast_coalesced(results, ctx.target_gpus)
return (None,) + tuple([Variable(t) for tensors in outputs for t in tensors])
class Reduce(Function):
@staticmethod
def forward(ctx, *inputs):
ctx.target_gpus = [inputs[i].get_device() for i in range(len(inputs))]
inputs = sorted(inputs, key=lambda i: i.get_device())
return comm.reduce_add(inputs)
@staticmethod
def backward(ctx, gradOutput):
return Broadcast.apply(ctx.target_gpus, gradOutput)
class DistributedDataParallelModel(DistributedDataParallel):
"""Implements data parallelism at the module level for the DistributedDataParallel module.
This container parallelizes the application of the given module by
splitting the input across the specified devices by chunking in the
batch dimension.
In the forward pass, the module is replicated on each device,
and each replica handles a portion of the input. During the backwards pass,
gradients from each replica are summed into the original module.
Note that the outputs are not gathered, please use compatible
:class:`encoding.parallel.DataParallelCriterion`.
The batch size should be larger than the number of GPUs used. It should
also be an integer multiple of the number of GPUs so that each chunk is
the same size (so that each GPU processes the same number of samples).
Args:
module: module to be parallelized
device_ids: npu devices (default: all devices)
Reference:
Hang Zhang, Kristin Dana, Jianping Shi, Zhongyue Zhang, Xiaogang Wang, Ambrish Tyagi,
Amit Agrawal. 鈥淐ontext Encoding for Semantic Segmentation.
*The IEEE Conference on Computer Vision and Pattern Recognition (CVPR) 2018*
Example::
>>> net = encoding.nn.DistributedDataParallelModel(model, device_ids=[0, 1, 2])
>>> y = net(x)
"""
def gather(self, outputs, output_device):
return outputs
class DataParallelModel(DataParallel):
"""Implements data parallelism at the module level.
This container parallelizes the application of the given module by
splitting the input across the specified devices by chunking in the
batch dimension.
In the forward pass, the module is replicated on each device,
and each replica handles a portion of the input. During the backwards pass,
gradients from each replica are summed into the original module.
Note that the outputs are not gathered, please use compatible
:class:`encoding.parallel.DataParallelCriterion`.
The batch size should be larger than the number of GPUs used. It should
also be an integer multiple of the number of GPUs so that each chunk is
the same size (so that each GPU processes the same number of samples).
Args:
module: module to be parallelized
device_ids: npu devices (default: all devices)
Reference:
Hang Zhang, Kristin Dana, Jianping Shi, Zhongyue Zhang, Xiaogang Wang, Ambrish Tyagi,
Amit Agrawal. 鈥淐ontext Encoding for Semantic Segmentation.
*The IEEE Conference on Computer Vision and Pattern Recognition (CVPR) 2018*
Example::
>>> net = encoding.nn.DataParallelModel(model, device_ids=[0, 1, 2])
>>> y = net(x)
"""
def gather(self, outputs, output_device):
return outputs
def replicate(self, module, device_ids):
modules = super(DataParallelModel, self).replicate(module, device_ids)
execute_replication_callbacks(modules)
return modules
class DataParallelCriterion(DataParallel):
"""
Calculate loss in multiple-GPUs, which balance the memory usage.
The targets are splitted across the specified devices by chunking in
the batch dimension. Please use together with :class:`encoding.parallel.DataParallelModel`.
Reference:
Hang Zhang, Kristin Dana, Jianping Shi, Zhongyue Zhang, Xiaogang Wang, Ambrish Tyagi,
Amit Agrawal. 鈥淐ontext Encoding for Semantic Segmentation.
*The IEEE Conference on Computer Vision and Pattern Recognition (CVPR) 2018*
Example::
>>> net = encoding.nn.DataParallelModel(model, device_ids=[0, 1, 2])
>>> criterion = encoding.nn.DataParallelCriterion(criterion, device_ids=[0, 1, 2])
>>> y = net(x)
>>> loss = criterion(y, target)
"""
def forward(self, inputs, *targets, **kwargs):
# input should be already scattered
# scattering the targets instead
if not self.device_ids:
return self.module(inputs, *targets, **kwargs)
targets, kwargs = self.scatter(targets, kwargs, self.device_ids)
if len(self.device_ids) == 1:
return self.module(inputs, *targets[0], **kwargs[0])
replicas = self.replicate(self.module, self.device_ids[:len(inputs)])
outputs = _criterion_parallel_apply(replicas, inputs, targets, kwargs)
# return Reduce.apply(*outputs) / len(outputs)
# return self.gather(outputs, self.output_device).mean()
return self.gather(outputs, self.output_device)
def _criterion_parallel_apply(modules, inputs, targets, kwargs_tup=None, devices=None):
assert len(modules) == len(inputs)
assert len(targets) == len(inputs)
if kwargs_tup:
assert len(modules) == len(kwargs_tup)
else:
kwargs_tup = ({},) * len(modules)
if devices is not None:
assert len(modules) == len(devices)
else:
devices = [None] * len(modules)
lock = threading.Lock()
results = {}
if torch_ver != "0.3":
grad_enabled = torch.is_grad_enabled()
def _worker(i, module, input, target, kwargs, device=None):
if torch_ver != "0.3":
torch.set_grad_enabled(grad_enabled)
if device is None:
device = get_a_var(input).get_device()
try:
with torch.npu.device(device):
# this also avoids accidental slicing of `input` if it is a Tensor
if not isinstance(input, (list, tuple)):
input = (input,)
if not isinstance(target, (list, tuple)):
target = (target,)
output = module(*(input + target), **kwargs)
with lock:
results[i] = output
except Exception as e:
with lock:
results[i] = e
if len(modules) > 1:
threads = [threading.Thread(target=_worker,
args=(i, module, input, target,
kwargs, device), )
for i, (module, input, target, kwargs, device) in
enumerate(zip(modules, inputs, targets, kwargs_tup, devices))]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
else:
_worker(0, modules[0], inputs[0], kwargs_tup[0], devices[0])
outputs = []
for i in range(len(inputs)):
output = results[i]
if isinstance(output, Exception):
raise output
outputs.append(output)
return outputs
###########################################################################
# Adapted from Synchronized-BatchNorm-PyTorch.
# https://github.com/vacancy/Synchronized-BatchNorm-PyTorch
#
class CallbackContext(object):
pass
def execute_replication_callbacks(modules):
"""
Execute an replication callback `__data_parallel_replicate__` on each module created
by original replication.
The callback will be invoked with arguments `__data_parallel_replicate__(ctx, copy_id)`
Note that, as all modules are isomorphism, we assign each sub-module with a context
(shared among multiple copies of this module on different devices).
Through this context, different copies can share some information.
We guarantee that the callback on the master copy (the first copy) will be called ahead
of calling the callback of any slave copies.
"""
master_copy = modules[0]
nr_modules = len(list(master_copy.modules()))
ctxs = [CallbackContext() for _ in range(nr_modules)]
for i, module in enumerate(modules):
for j, m in enumerate(module.modules()):
if hasattr(m, '__data_parallel_replicate__'):
m.__data_parallel_replicate__(ctxs[j], i)
def patch_replication_callback(data_parallel):
"""
Monkey-patch an existing `DataParallel` object. Add the replication callback.
Useful when you have customized `DataParallel` implementation.
Examples:
> sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False)
> sync_bn = DataParallel(sync_bn, device_ids=[0, 1])
> patch_replication_callback(sync_bn)
# this is equivalent to
> sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False)
> sync_bn = DataParallelWithCallback(sync_bn, device_ids=[0, 1])
"""
assert isinstance(data_parallel, DataParallel)
old_replicate = data_parallel.replicate
@functools.wraps(old_replicate)
def new_replicate(module, device_ids):
modules = old_replicate(module, device_ids)
execute_replication_callbacks(modules)
return modules
data_parallel.replicate = new_replicate
| [
"[email protected]"
] | |
94171e19440d59601861aee4f580b056a82ba31e | 104085f6878411a137521b17c06612e5f648ef33 | /service_pro/service_pro/doctype/agent_payment_request/agent_payment_request_dashboard.py | bf5bad55f86a6116faab28263273e7b0828fce28 | [
"MIT"
] | permissive | ksbbalean/service-pro | d39f0d12977dd66627b9f7c0336c605d7be4c388 | c89b39a8e9967dada50dc0db4b08460ed45843bf | refs/heads/master | 2023-04-13T05:35:19.842021 | 2021-04-22T11:05:18 | 2021-04-22T11:05:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | from frappe import _
def get_data():
return {
'fieldname': 'agent_payment_request',
'transactions': [
{
'label': _('Linked Forms'),
'items': ["Journal Entry"]
}
]
} | [
"[email protected]"
] | |
9efe79a16c6f27bddfc4536d573389398935b830 | 3b5d1a53af8d2f4094005f342403eabc7af9c980 | /moderation_module/storage/logging_data.py | 5f1eb3107e2c0a2e75844b5cbdd60700cde60414 | [
"MIT"
] | permissive | alentoghostflame/StupidAlentoBot | daa828be3d47b24d3e13d500155a6a0d2019f724 | c024bfb79a9ecb0d9fda5ddc4e361a0cb878baba | refs/heads/master | 2021-06-30T17:50:14.997416 | 2021-06-08T03:54:24 | 2021-06-08T03:54:24 | 237,541,303 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | from alento_bot import guild_data_transformer
import logging
import typing
logger = logging.getLogger("main_bot")
@guild_data_transformer(name="guild_logging_config")
class GuildLoggingConfig:
def __init__(self):
self.toggled_on: bool = False
self.log_channel_id: int = 0
self.exempt_channels: typing.Set[int] = set()
self.log_bots: bool = False
| [
"[email protected]"
] | |
b176a24c12a2f81960d594b227e0eb66dc7ca889 | c192132c7c2b815d480b243b591c2c9dac8d969b | /result.py | f369eb20ad333d584dbf29c1573ee366b42523f5 | [] | no_license | Annihilater/blast.ncbi.nlm.nih.gov | b967dd3abf1ca1b075566262ee11906d7f5170ce | e62dabb4f9fc7c0e359051e3cdbc97c45f1fbdee | refs/heads/master | 2020-08-24T14:32:54.451809 | 2019-10-23T09:41:37 | 2019-10-23T09:41:37 | 216,845,290 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,854 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2019/10/23 11:16
# @Author: yanmiexingkong
# @email : [email protected]
# @File : result.py
import requests
def get_result(rid):
"""
通过 rid 获取结果
:param rid:
:return:
"""
url = "https://blast.ncbi.nlm.nih.gov/Blast.cgi"
headers = {
'authority': "blast.ncbi.nlm.nih.gov",
'pragma': "no-cache",
'cache-control': "no-cache,no-cache",
'upgrade-insecure-requests': "1",
'user-agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.120 Safari/537.36",
'sec-fetch-mode': "navigate",
'sec-fetch-user': "?1",
'origin': "https://blast.ncbi.nlm.nih.gov",
'content-type': "application/x-www-form-urlencoded",
'accept': "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
'sec-fetch-site': "same-origin",
'referer': "https://blast.ncbi.nlm.nih.gov/Blast.cgi",
'accept-encoding': "gzip, deflate, br",
'accept-language': "zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7,zh-TW;q=0.6",
'cookie': "MyBlastUser=1-K62_H2PRYnAJWAW8C499055; ncbi_sid=5AAB49C2DAD5CBD1_0000SID; _ga=GA1.2.1760716258.1571642619; _gid=GA1.2.1120047674.1571642619; _ga=GA1.3.1760716258.1571642619; _gid=GA1.3.1120047674.1571642619; QSI_HistorySession=https%3A%2F%2Fwww.nlm.nih.gov%2F%23~1571655071695; ___rl__test__cookies=1571655469341; OUTFOX_SEARCH_USER_ID_NCOO=1690574555.305844; books.article.report=; MyNcbiSigninPreferences=O25jYmlscyY%3D; ncbi_prevPHID=CE8CB87BDAD9B9910000000000110007.m_8.09; WebCubbyUser=YFK36EUALZSLFML717L8VTJ8L7VZ587M%3Blogged-in%3Dtrue%3Bmy-name%3Dyanmiexingkong%3Bpersistent%3Dfalse%405AAB49C2DAD5CBD1_0000SID; BlastCubbyImported=active; ncbi_pinger=N4IgDgTgpgbg+mAFgSwCYgFwgKwEFcBCALAJwDCATACK5XZkFUkCM2A7Mx6UcwGzbYAygEkqIADQgArgDsANgHsAhqhlQAHgBdMoCphAAjOUoDO2yQGZ9MiBJBFrUAO5HTm6CalzNJu9n12nPoAZkpyJlB2FAAM+tjRZCQWFKw0AKK8ABy4LNF5+QWFzFHFWK5mGDaVzuXuUJ7eJhgAcgDyzWlRemXGZgB0MgDGBsgDcgC2A8iIfQDmCjBRJPrMJDF2FrFYzDGxlqUgq+uW3YecbBtWWKHhkZYOWO5SdyAWmQGWy1i8JD8kbFZJEQtiAfmxeGwKBcgVcQNE+hReH1ikCHtJ5MpVBpzPZ/Ns/LDMmwHJJsGjmNFeLw/NTttgKHpSRcsHtQQcAQ4AL6coA,MyBlastUser=1-K62_H2PRYnAJWAW8C499055; ncbi_sid=5AAB49C2DAD5CBD1_0000SID; _ga=GA1.2.1760716258.1571642619; _gid=GA1.2.1120047674.1571642619; _ga=GA1.3.1760716258.1571642619; _gid=GA1.3.1120047674.1571642619; QSI_HistorySession=https%3A%2F%2Fwww.nlm.nih.gov%2F%23~1571655071695; ___rl__test__cookies=1571655469341; OUTFOX_SEARCH_USER_ID_NCOO=1690574555.305844; books.article.report=; MyNcbiSigninPreferences=O25jYmlscyY%3D; ncbi_prevPHID=CE8CB87BDAD9B9910000000000110007.m_8.09; WebCubbyUser=YFK36EUALZSLFML717L8VTJ8L7VZ587M%3Blogged-in%3Dtrue%3Bmy-name%3Dyanmiexingkong%3Bpersistent%3Dfalse%405AAB49C2DAD5CBD1_0000SID; BlastCubbyImported=active; ncbi_pinger=N4IgDgTgpgbg+mAFgSwCYgFwgKwEFcBCALAJwDCATACK5XZkFUkCM2A7Mx6UcwGzbYAygEkqIADQgArgDsANgHsAhqhlQAHgBdMoCphAAjOUoDO2yQGZ9MiBJBFrUAO5HTm6CalzNJu9n12nPoAZkpyJlB2FAAM+tjRZCQWFKw0AKK8ABy4LNF5+QWFzFHFWK5mGDaVzuXuUJ7eJhgAcgDyzWlRemXGZgB0MgDGBsgDcgC2A8iIfQDmCjBRJPrMJDF2FrFYzDGxlqUgq+uW3YecbBtWWKHhkZYOWO5SdyAWmQGWy1i8JD8kbFZJEQtiAfmxeGwKBcgVcQNE+hReH1ikCHtJ5MpVBpzPZ/Ns/LDMmwHJJsGjmNFeLw/NTttgKHpSRcsHtQQcAQ4AL6coA; ncbi_sid=5AAB49C2DAD5CBD1_0000SID; BlastCubbyImported=passive",
'Postman-Token': "effbbf9e-09ff-4958-8a0e-a8c3d2719ae1,ffe9004f-7563-4ea5-ad02-943a343657a8",
'Host': "blast.ncbi.nlm.nih.gov",
'Content-Length': "1354",
'Connection': "keep-alive"
}
data = {'ADV_VIEW': 'true', 'ALIGNMENTS': '100', 'ALIGNMENT_VIEW': 'Pairwise', 'BLAST_PROGRAMS': 'blastp',
'CDD_RID': 'UWU3DJDS015', 'CDD_SEARCH_STATE': '2', 'CLIENT': 'web', 'COMPOSITION_BASED_STATISTICS': '2',
'CONFIG_DESCR': '2%2C3%2C4%2C5%2C6%2C7%2C8', 'DATABASE': 'nr_v5', 'DB_DISPLAY_NAME': 'nr',
'DESCRIPTIONS': '100', 'EQ_OP': 'AND', 'EXPECT': '10', 'FILTER': 'F', 'FORMAT_NUM_ORG': '1',
'FORMAT_OBJECT': 'Alignment', 'FORMAT_TYPE': 'HTML', 'FULL_DBNAME': 'nr_v5', 'GAPCOSTS': '11%2B1',
'GET_SEQUENCE': 'true', 'HSP_RANGE_MAX': '0', 'JOB_TITLE': '%2B5ubb%2B', 'LAYOUT': 'OneWindow',
'LINE_LENGTH': '60', 'MASK_CHAR': '2', 'MASK_COLOR': '1', 'MATRIX_NAME': 'BLOSUM62', 'MAX_NUM_SEQ': '100',
'NCBI_GI': 'false', 'NEW_VIEW': 'true', 'NUM_DIFFS': '0', 'NUM_OPTS_DIFFS': '0', 'NUM_ORG': '1',
'NUM_OVERVIEW': '100', 'ORG_DBS': 'giless_dbvers5', 'PAGE': 'Proteins', 'PAGE_TYPE': 'BlastSearch',
'PROGRAM': 'blastp',
'QUERYFILE': '%3E5ubb%0D%0ASQVINGEMQFYARAKLFYQEVPATEEGMMGNFIELSSPDIQASQKFLRKFVGGPGRAGTDCALDCGSGIGRVSKHVLLPVFNSVELVDMMESFLLEAQNYLQVKGDESYHCYSLQEFTPPFRRYDVIWIQWVSGHLTDKDLLAFLSRCRDGLKENGIIILKDNVAREGCILDLSDSSVTRDMDILRSLIRKSGLVVLGQEKQDGFPEQCIPVWMFALH%0D%0A',
'QUERY_INFO': '%2B5ubb%2B', 'QUERY_LENGTH': '218', 'REPEATS': '45518', 'RTOE': '27',
'SAVED_SEARCH': 'true', 'SEARCH_DB_STATUS': '31', 'SELECTED_PROG_TYPE': 'blastp', 'SERVICE': 'plain',
'SHORT_QUERY_ADJUST': 'on', 'SHOW_CDS_FEATURE': 'false', 'SHOW_LINKOUT': 'true', 'SHOW_OVERVIEW': 'true',
'UNIQ_DEFAULTS_NAME': 'A_SearchDefaults_1iMhfz_1v71_duIAy0mW1FA_GTXQl_2J8uR3', 'USER_DEFAULT_MATRIX': '4',
'USER_DEFAULT_PROG_TYPE': 'blastp', 'USER_TYPE': '1', 'WORD_SIZE': '6', '_PGR': '6', 'db': 'protein',
'stype': 'protein', 'CMD': 'Get'}
data.update({'RID': rid})
response = requests.post(url=url, data=data, headers=headers)
html = response.text
with open('data/html/result.html', 'w') as f:
f.write(html)
if __name__ == '__main__':
rid = 'UX0VYAFM015'
rid2 = 'UXTZXYRW015'
rid3 = 'V019SAM701R'
get_result(rid3)
| [
"[email protected]"
] | |
6f361c7d8b2af01f6ee96c8df06630eaf5cef7f8 | 1929a989d1e2a5c5caabad32aa8baf4444250574 | /h2o-py/tests/testdir_munging/pyunit_upload_large.py | 3d4d69107d8603c202a6d6e94a6ae18df88df391 | [
"Apache-2.0"
] | permissive | codelibs/h2o-3 | 9c417c0c6ee4ae9a6eaffe5a0373c0d78c37527e | cf96fb28da4732870a0d65c24f0d99f422d140d1 | refs/heads/master | 2023-05-27T10:04:14.408620 | 2023-04-28T18:16:48 | 2023-04-28T18:16:48 | 253,197,280 | 0 | 0 | Apache-2.0 | 2020-04-05T09:22:41 | 2020-04-05T09:22:40 | null | UTF-8 | Python | false | false | 772 | py | from __future__ import print_function
import sys
sys.path.insert(1,"../../")
import h2o
import os
import tempfile
from tests import pyunit_utils
def generate_large_file(path, size):
with open(path, "wb") as f:
f.seek(size-1)
f.write(b"\0")
assert size == os.stat(path).st_size
def upload_large_file():
path = os.path.join(tempfile.mkdtemp(), "large.bin")
byte_size = 2 * 1024 * 1024 * 1024 + 1 # 2GB + 1 byte
generate_large_file(path, byte_size)
raw_data = h2o.api("POST /3/PostFile", filename=path)
print(raw_data)
assert raw_data["total_bytes"] == byte_size
h2o.remove(raw_data["destination_frame"])
if __name__ == "__main__":
pyunit_utils.standalone_test(upload_large_file)
else:
upload_large_file()
| [
"[email protected]"
] | |
6f927f95ffc8e9ede4b6ba26df040a784d1f5146 | 8f5cb19e9c6a0670100b4a4fbdbb892d94ccd4a8 | /deployment/georegistry.py | 5c1e240d6a6b964f6e177dd39a8f7f9b1dc6a607 | [] | no_license | invisibleroads/georegistry | 84438e680e56ac716f60d23784f05469c4888841 | df56cc17b01a794bfbd53f354bb5fa9abeb420cc | refs/heads/master | 2023-08-24T03:34:50.554375 | 2011-05-05T16:36:19 | 2011-05-05T16:36:19 | 966,680 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,359 | py | 'GeoRegistry API Python wrapper'
# Import system modules
import urllib
import urllib2
import simplejson
# Core
baseURL = 'http://georegistry.invisibleroads.com'
def updateFeatures(key, srid, featureCollection, tags, public=False):
'Update features using the GeoRegistry web service'
# Initialize
url = baseURL + '/features'
# Call
responseData = call(url, {
'key': key,
'srid': srid,
'featureCollection': featureCollection,
'tags': '\n'.join(tags),
'public': 1 if public else 0,
}, 'POST')
# Return
return [int(x) for x in responseData.splitlines()]
def deleteFeatures(key, featureIDs):
'Delete features using the GeoRegistry web service'
# Initialize
url = baseURL + '/features'
# Call
call(url, {
'key': key,
'featureIDs': '\n'.join(str(x) for x in featureIDs),
}, 'DELETE')
def getTags(key):
'Get tags with visible features using the GeoRegistry web service'
# Initialize
url = baseURL + '/tags'
# Call
responseData = call(url + '.json', {
'key': key,
}, 'GET')
# Return
return responseData.splitlines()
def viewMaps(key, srid, tags, simplified=True, bboxFormat='yxyx', bbox=None):
'Assemble a map using the GeoRegistry web service'
# Initialize
url = baseURL + '/maps'
# Call
responseData = call(url + '.json', {
'key': key,
'srid': srid,
'tags': '\n'.join(tags),
'bboxFormat': bboxFormat,
'bbox': bbox if bbox else '',
'simplified': 1 if simplified else 0,
}, 'GET')
# Return
return responseData
# Helpers
def call(url, valueByName, method):
'Call a method in the GeoRegistry web service'
requestData = urllib.urlencode(valueByName.items())
request = Request(method, url, requestData) if method.upper() == 'POST' else Request(method, url + '?' + requestData)
try:
response = urllib2.urlopen(request)
except urllib2.HTTPError, error:
raise GeoRegistryError(error.read())
return response.read()
class Request(urllib2.Request):
def __init__(self, method, *args, **kwargs):
self._method = method
urllib2.Request.__init__(self, *args, **kwargs)
def get_method(self):
return self._method
# Error
class GeoRegistryError(Exception):
pass
| [
"[email protected]"
] | |
ba3339eeda813a3c7d315fcb1cb1c530a8080125 | d094ba0c8a9b1217fbf014aa79a283a49aabe88c | /env/lib/python3.6/site-packages/sklearn/preprocessing/_discretization.py | 7b26ce916e1893d6d13fc02304699fa07bd412b3 | [
"Apache-2.0"
] | permissive | Raniac/NEURO-LEARN | d9274e0baadd97bb02da54bdfcf6ca091fc1c703 | 3c3acc55de8ba741e673063378e6cbaf10b64c7a | refs/heads/master | 2022-12-25T23:46:54.922237 | 2020-09-06T03:15:14 | 2020-09-06T03:15:14 | 182,013,100 | 9 | 2 | Apache-2.0 | 2022-12-09T21:01:00 | 2019-04-18T03:57:00 | CSS | UTF-8 | Python | false | false | 12,083 | py | # -*- coding: utf-8 -*-
# Author: Henry Lin <[email protected]>
# Tom Dupré la Tour
# License: BSD
from __future__ import division, absolute_import
import numbers
import numpy as np
import warnings
from . import OneHotEncoder
from ..base import BaseEstimator, TransformerMixin
from ..utils.validation import check_array
from ..utils.validation import check_is_fitted
from ..utils.validation import FLOAT_DTYPES
from ..utils.fixes import np_version
class KBinsDiscretizer(BaseEstimator, TransformerMixin):
"""Bin continuous data into intervals.
Read more in the :ref:`User Guide <preprocessing_discretization>`.
Parameters
----------
n_bins : int or array-like, shape (n_features,) (default=5)
The number of bins to produce. Raises ValueError if ``n_bins < 2``.
encode : {'onehot', 'onehot-dense', 'ordinal'}, (default='onehot')
Method used to encode the transformed result.
onehot
Encode the transformed result with one-hot encoding
and return a sparse matrix. Ignored features are always
stacked to the right.
onehot-dense
Encode the transformed result with one-hot encoding
and return a dense array. Ignored features are always
stacked to the right.
ordinal
Return the bin identifier encoded as an integer value.
strategy : {'uniform', 'quantile', 'kmeans'}, (default='quantile')
Strategy used to define the widths of the bins.
uniform
All bins in each feature have identical widths.
quantile
All bins in each feature have the same number of points.
kmeans
Values in each bin have the same nearest center of a 1D k-means
cluster.
Attributes
----------
n_bins_ : int array, shape (n_features,)
Number of bins per feature. Bins whose width are too small
(i.e., <= 1e-8) are removed with a warning.
bin_edges_ : array of arrays, shape (n_features, )
The edges of each bin. Contain arrays of varying shapes ``(n_bins_, )``
Ignored features will have empty arrays.
Examples
--------
>>> X = [[-2, 1, -4, -1],
... [-1, 2, -3, -0.5],
... [ 0, 3, -2, 0.5],
... [ 1, 4, -1, 2]]
>>> est = KBinsDiscretizer(n_bins=3, encode='ordinal', strategy='uniform')
>>> est.fit(X) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
KBinsDiscretizer(...)
>>> Xt = est.transform(X)
>>> Xt # doctest: +SKIP
array([[ 0., 0., 0., 0.],
[ 1., 1., 1., 0.],
[ 2., 2., 2., 1.],
[ 2., 2., 2., 2.]])
Sometimes it may be useful to convert the data back into the original
feature space. The ``inverse_transform`` function converts the binned
data into the original feature space. Each value will be equal to the mean
of the two bin edges.
>>> est.bin_edges_[0]
array([-2., -1., 0., 1.])
>>> est.inverse_transform(Xt)
array([[-1.5, 1.5, -3.5, -0.5],
[-0.5, 2.5, -2.5, -0.5],
[ 0.5, 3.5, -1.5, 0.5],
[ 0.5, 3.5, -1.5, 1.5]])
Notes
-----
In bin edges for feature ``i``, the first and last values are used only for
``inverse_transform``. During transform, bin edges are extended to::
np.concatenate([-np.inf, bin_edges_[i][1:-1], np.inf])
You can combine ``KBinsDiscretizer`` with
:class:`sklearn.compose.ColumnTransformer` if you only want to preprocess
part of the features.
``KBinsDiscretizer`` might produce constant features (e.g., when
``encode = 'onehot'`` and certain bins do not contain any data).
These features can be removed with feature selection algorithms
(e.g., :class:`sklearn.feature_selection.VarianceThreshold`).
See also
--------
sklearn.preprocessing.Binarizer : class used to bin values as ``0`` or
``1`` based on a parameter ``threshold``.
"""
def __init__(self, n_bins=5, encode='onehot', strategy='quantile'):
self.n_bins = n_bins
self.encode = encode
self.strategy = strategy
def fit(self, X, y=None):
"""Fits the estimator.
Parameters
----------
X : numeric array-like, shape (n_samples, n_features)
Data to be discretized.
y : ignored
Returns
-------
self
"""
X = check_array(X, dtype='numeric')
valid_encode = ('onehot', 'onehot-dense', 'ordinal')
if self.encode not in valid_encode:
raise ValueError("Valid options for 'encode' are {}. "
"Got encode={!r} instead."
.format(valid_encode, self.encode))
valid_strategy = ('uniform', 'quantile', 'kmeans')
if self.strategy not in valid_strategy:
raise ValueError("Valid options for 'strategy' are {}. "
"Got strategy={!r} instead."
.format(valid_strategy, self.strategy))
n_features = X.shape[1]
n_bins = self._validate_n_bins(n_features)
bin_edges = np.zeros(n_features, dtype=object)
for jj in range(n_features):
column = X[:, jj]
col_min, col_max = column.min(), column.max()
if col_min == col_max:
warnings.warn("Feature %d is constant and will be "
"replaced with 0." % jj)
n_bins[jj] = 1
bin_edges[jj] = np.array([-np.inf, np.inf])
continue
if self.strategy == 'uniform':
bin_edges[jj] = np.linspace(col_min, col_max, n_bins[jj] + 1)
elif self.strategy == 'quantile':
quantiles = np.linspace(0, 100, n_bins[jj] + 1)
if np_version < (1, 9):
quantiles = list(quantiles)
bin_edges[jj] = np.asarray(np.percentile(column, quantiles))
elif self.strategy == 'kmeans':
from ..cluster import KMeans # fixes import loops
# Deterministic initialization with uniform spacing
uniform_edges = np.linspace(col_min, col_max, n_bins[jj] + 1)
init = (uniform_edges[1:] + uniform_edges[:-1])[:, None] * 0.5
# 1D k-means procedure
km = KMeans(n_clusters=n_bins[jj], init=init, n_init=1)
centers = km.fit(column[:, None]).cluster_centers_[:, 0]
# Must sort, centers may be unsorted even with sorted init
centers.sort()
bin_edges[jj] = (centers[1:] + centers[:-1]) * 0.5
bin_edges[jj] = np.r_[col_min, bin_edges[jj], col_max]
# Remove bins whose width are too small (i.e., <= 1e-8)
if self.strategy in ('quantile', 'kmeans'):
mask = np.ediff1d(bin_edges[jj], to_begin=np.inf) > 1e-8
bin_edges[jj] = bin_edges[jj][mask]
if len(bin_edges[jj]) - 1 != n_bins[jj]:
warnings.warn('Bins whose width are too small (i.e., <= '
'1e-8) in feature %d are removed. Consider '
'decreasing the number of bins.' % jj)
n_bins[jj] = len(bin_edges[jj]) - 1
self.bin_edges_ = bin_edges
self.n_bins_ = n_bins
if 'onehot' in self.encode:
self._encoder = OneHotEncoder(
categories=[np.arange(i) for i in self.n_bins_],
sparse=self.encode == 'onehot')
# Fit the OneHotEncoder with toy datasets
# so that it's ready for use after the KBinsDiscretizer is fitted
self._encoder.fit(np.zeros((1, len(self.n_bins_)), dtype=int))
return self
def _validate_n_bins(self, n_features):
"""Returns n_bins_, the number of bins per feature.
"""
orig_bins = self.n_bins
if isinstance(orig_bins, numbers.Number):
if not isinstance(orig_bins, (numbers.Integral, np.integer)):
raise ValueError("{} received an invalid n_bins type. "
"Received {}, expected int."
.format(KBinsDiscretizer.__name__,
type(orig_bins).__name__))
if orig_bins < 2:
raise ValueError("{} received an invalid number "
"of bins. Received {}, expected at least 2."
.format(KBinsDiscretizer.__name__, orig_bins))
return np.full(n_features, orig_bins, dtype=np.int)
n_bins = check_array(orig_bins, dtype=np.int, copy=True,
ensure_2d=False)
if n_bins.ndim > 1 or n_bins.shape[0] != n_features:
raise ValueError("n_bins must be a scalar or array "
"of shape (n_features,).")
bad_nbins_value = (n_bins < 2) | (n_bins != orig_bins)
violating_indices = np.where(bad_nbins_value)[0]
if violating_indices.shape[0] > 0:
indices = ", ".join(str(i) for i in violating_indices)
raise ValueError("{} received an invalid number "
"of bins at indices {}. Number of bins "
"must be at least 2, and must be an int."
.format(KBinsDiscretizer.__name__, indices))
return n_bins
def transform(self, X):
"""Discretizes the data.
Parameters
----------
X : numeric array-like, shape (n_samples, n_features)
Data to be discretized.
Returns
-------
Xt : numeric array-like or sparse matrix
Data in the binned space.
"""
check_is_fitted(self, ["bin_edges_"])
Xt = check_array(X, copy=True, dtype=FLOAT_DTYPES)
n_features = self.n_bins_.shape[0]
if Xt.shape[1] != n_features:
raise ValueError("Incorrect number of features. Expecting {}, "
"received {}.".format(n_features, Xt.shape[1]))
bin_edges = self.bin_edges_
for jj in range(Xt.shape[1]):
# Values which are close to a bin edge are susceptible to numeric
# instability. Add eps to X so these values are binned correctly
# with respect to their decimal truncation. See documentation of
# numpy.isclose for an explanation of ``rtol`` and ``atol``.
rtol = 1.e-5
atol = 1.e-8
eps = atol + rtol * np.abs(Xt[:, jj])
Xt[:, jj] = np.digitize(Xt[:, jj] + eps, bin_edges[jj][1:])
np.clip(Xt, 0, self.n_bins_ - 1, out=Xt)
if self.encode == 'ordinal':
return Xt
return self._encoder.transform(Xt)
def inverse_transform(self, Xt):
"""Transforms discretized data back to original feature space.
Note that this function does not regenerate the original data
due to discretization rounding.
Parameters
----------
Xt : numeric array-like, shape (n_sample, n_features)
Transformed data in the binned space.
Returns
-------
Xinv : numeric array-like
Data in the original feature space.
"""
check_is_fitted(self, ["bin_edges_"])
if 'onehot' in self.encode:
Xt = self._encoder.inverse_transform(Xt)
Xinv = check_array(Xt, copy=True, dtype=FLOAT_DTYPES)
n_features = self.n_bins_.shape[0]
if Xinv.shape[1] != n_features:
raise ValueError("Incorrect number of features. Expecting {}, "
"received {}.".format(n_features, Xinv.shape[1]))
for jj in range(n_features):
bin_edges = self.bin_edges_[jj]
bin_centers = (bin_edges[1:] + bin_edges[:-1]) * 0.5
Xinv[:, jj] = bin_centers[np.int_(Xinv[:, jj])]
return Xinv
| [
"[email protected]"
] | |
8071db56a1faa459eccd4c3bfbd0c735f51f2c1e | 6ace7e15e3191d1b8228ad7922a8552ca84f84e7 | /.history/image_detector_20200614203341.py | 2465a36001cd934f7bd739e37f170e75e719b85c | [] | no_license | mehmetaliarican/Similar-Image-Finder | f72e95be50c51aa03fc64954a03124b199ca64b1 | a9e0015c443b4a73394099cccf60329cfc4c7cef | refs/heads/master | 2022-10-27T00:57:43.173993 | 2020-06-14T18:02:16 | 2020-06-14T18:02:16 | 272,256,295 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,485 | py | from skimage.metrics import structural_similarity as ssim
from imutils import paths
import matplotlib.pyplot as plt
import numpy as np
import cv2
import glob
import os
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("-t", "--threshold", type=float, default=0.9,
help="threshold")
ap.add_argument("-d", "--dataset", required=True,
help="path to input dataset")
args = vars(ap.parse_args())
class Utility:
totalFound = 0
totalSearch = 0
searching = False
def mse(self, imageA, imageB):
# the 'Mean Squared Error' between the two images is the
# sum of the squared difference between the two images;
# NOTE: the two images must have the same dimension
err = np.sum((imageA.astype("float") - imageB.astype("float")) ** 2)
err /= float(imageA.shape[0] * imageA.shape[1])
# return the MSE, the lower the error, the more "similar"
# the two images are
return err
def compare_images(self, im1, im2, imageA, imageB):
# compute the mean squared error and structural similarity
# index for the images
m = self.mse(imageA, imageB)
s = ssim(imageA, imageB)
tres = args['threshold']
totalSearch++
if s >= tres:
print("Image[{c1}] '{p1}' compared to Image[{c2}] '{p2}' Simility:{sim}".format(c1=im1['comp'], c2=im2['comp'],p1=im1['path'], p2=im2['path'], sim=str(s)))
twin = np.hstack([imageA, imageB])
cv2.imshow('', twin)
cv2.waitKey(0)
self.searching = False
elif self.searching is False:
print('Searching...')
self.searching = True
imagePaths = list(paths.list_images(args['dataset']))
companies = ['dhl', 'paypal', 'wellsfargo']
all_data = []
for path in imagePaths:
company = ''
for c in companies:
if c in path:
company = c
all_data.append({'comp': c, 'path': path})
print(all_data)
u = Utility()
for image in all_data:
try:
p1 = cv2.imread(image['path'])
p1 = cv2.resize(p1, (300, 300))
p1 = cv2.cvtColor(p1, cv2.COLOR_BGR2GRAY)
for i in all_data:
if i['path'] != image['path']:
p2 = cv2.imread(i['path'])
p2 = cv2.resize(p2, (300, 300))
p2 = cv2.cvtColor(p2, cv2.COLOR_BGR2GRAY)
u.compare_images(image, i, p1, p2)
except Exception as e:
print(str(e))
| [
"[email protected]"
] | |
5268cff948f9c48f0fd6138032a6afd729243dd6 | 2a6412a9359a1df5f8f12e319e73b9e4e46fd64c | /specializedSubjects/AlgorithmII/dijkstra_ON2.py | f7130f780bcfbbcb7e2864c816bf76de51c44942 | [] | no_license | danganhvu1998/myINIAD | 504d1147a02f12e593f30e369daf82f85aa01bfd | 01547673dd3065efb6c7cc8db77ec93a5a4f5d98 | refs/heads/master | 2022-03-17T12:58:34.647229 | 2022-02-08T06:34:19 | 2022-02-08T06:34:19 | 143,675,719 | 1 | 4 | null | null | null | null | UTF-8 | Python | false | false | 770 | py | import networkx as nx
import matplotlib.pyplot as plt
def get_min(D, X):
arg_min= -1
min_value= float('inf')
for i in range(len(D)):
if D[i] < min_value:
if i in X:
arg_min= i
min_value= D[i]
return arg_min
def dijkstra(src, G):
D= [float('inf')] * nx.number_of_nodes(G)
D[src]= 0.0
X= set(G.nodes)
while X:
u= get_min(D, X)
X.remove(u)
neighbors= G.neighbors(u)
for v in neighbors:
if v in X:
if (D[u] + G.edges[u, v]['weight'] < D[v]):
D[v]= D[u] + G.edges[u, v]['weight']
return D
G= nx.read_weighted_edgelist('dij.edgelist', nodetype=int)
print(dijkstra(0, G))
nx.draw_networkx(G)
plt.show() | [
"[email protected]"
] | |
62f8f6e45e2c8fa0b96b0ee822ef9e2ee1a0d83b | 44a7b4879c1da661cc2e8aa51c7fcc24cfb0fd3b | /src/scs_core/osio/manager/user_manager.py | f14e3e70118f2019ef5dd083551e6ca93ec113de | [
"MIT"
] | permissive | seoss/scs_core | 21cd235c9630c68f651b9a8c88120ab98fe5f513 | a813f85f86b6973fa77722a7d61cc93762ceba09 | refs/heads/master | 2021-08-08T08:09:56.905078 | 2020-04-16T19:46:52 | 2020-04-16T19:46:52 | 156,239,538 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,315 | py | """
Created on 21 Mar 2017
@author: Bruno Beloff ([email protected])
"""
from scs_core.osio.client.rest_client import RESTClient
from scs_core.osio.data.user import User
from scs_core.osio.data.user_metadata import UserMetadata
# --------------------------------------------------------------------------------------------------------------------
class UserManager(object):
"""
classdocs
"""
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, http_client, api_key):
"""
Constructor
"""
self.__rest_client = RESTClient(http_client, api_key)
# ----------------------------------------------------------------------------------------------------------------
def find(self, user_id):
request_path = '/v1/users/' + user_id
# request...
self.__rest_client.connect()
try:
response_jdict = self.__rest_client.get(request_path)
except RuntimeError:
response_jdict = None
self.__rest_client.close()
user = User.construct_from_jdict(response_jdict)
return user
def find_public(self, user_id):
request_path = '/v1/public/users/' + user_id
# request...
self.__rest_client.connect()
try:
response_jdict = self.__rest_client.get(request_path)
except RuntimeError:
response_jdict = None
self.__rest_client.close()
user = UserMetadata.construct_from_jdict(response_jdict)
return user
def find_members_of_org(self, org_id):
pass
# ----------------------------------------------------------------------------------------------------------------
def update(self, user_id, user):
request_path = '/v1/users/' + user_id
# request...
self.__rest_client.connect()
try:
self.__rest_client.put(request_path, user.as_json())
finally:
self.__rest_client.close()
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "UserManager:{rest_client:%s}" % self.__rest_client
| [
"[email protected]"
] | |
2db16db5c0570084ec0dbb9abc064697f824fa90 | f51aff57f826aeea1be21e2d0c03cce0adaadefc | /exp/utils/rand.py | 70cd3809130978b9f18a56c77772c3f8afb2594d | [
"MIT"
] | permissive | zv5dmjq5/vivit | 0a26f8b61e6f00da75fce7a9bbc75b0185ffea76 | a05f448d1badb2db42e724c80676ce7e309194d2 | refs/heads/master | 2023-07-12T06:36:10.627912 | 2021-08-26T12:02:59 | 2021-08-26T12:02:59 | 370,409,161 | 1 | 0 | MIT | 2021-08-10T12:58:27 | 2021-05-24T16:00:04 | Python | UTF-8 | Python | false | false | 1,198 | py | """Utility functions to control random seeds."""
import torch
class temporary_seed:
"""Temporarily set PyTorch seed to a different value, then restore current value.
This has the effect that code inside this context does not influence the outer
loop's random generator state.
"""
def __init__(self, temp_seed):
self._temp_seed = temp_seed
def __enter__(self):
"""Store the current seed."""
self._old_state = torch.get_rng_state()
torch.manual_seed(self._temp_seed)
def __exit__(self, exc_type, exc_value, traceback):
"""Restore the old random generator state."""
torch.set_rng_state(self._old_state)
def test_temporary_seed():
"""Test if temporary_seed works as expected."""
torch.manual_seed(3)
num1 = torch.rand(1)
with temporary_seed(2):
num2 = torch.rand(1)
num3 = torch.rand(1)
torch.manual_seed(3)
num4 = torch.rand(1)
num5 = torch.rand(1)
torch.manual_seed(2)
num6 = torch.rand(1)
assert torch.allclose(num1, num4)
assert torch.allclose(num3, num5)
assert torch.allclose(num2, num6)
if __name__ == "__main__":
test_temporary_seed()
| [
"Anonymous"
] | Anonymous |
a645386e0c34474857014299151a5d45a2ce0535 | b8115bc7503581bf3bb44bfa87b270793aff4381 | /ddsp/training/data_preparation/ddsp_prepare_tfrecord.py | f7a4c0f3576f5ff085e50c33af5f4a887ed246f2 | [
"Apache-2.0"
] | permissive | pollinations/ddsp | 7a5cfd18efcd8a77729d26231d294a4c03c2d286 | 4bbb3b1b0aa9e9a4c1f77e8758f409cbd1ec03f7 | refs/heads/main | 2023-06-21T18:21:37.230721 | 2021-07-22T09:45:54 | 2021-07-22T09:45:54 | 388,399,770 | 0 | 0 | Apache-2.0 | 2021-07-22T09:14:11 | 2021-07-22T09:14:10 | null | UTF-8 | Python | false | false | 3,340 | py | # Copyright 2021 The DDSP Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
r"""Create a TFRecord dataset from audio files.
Usage:
====================
ddsp_prepare_tfrecord \
--input_audio_filepatterns=/path/to/wavs/*wav,/path/to/mp3s/*mp3 \
--output_tfrecord_path=/path/to/output.tfrecord \
--num_shards=10 \
--alsologtostderr
"""
from absl import app
from absl import flags
from ddsp.training.data_preparation.prepare_tfrecord_lib import prepare_tfrecord
import tensorflow.compat.v2 as tf
FLAGS = flags.FLAGS
flags.DEFINE_list('input_audio_filepatterns', [],
'List of filepatterns to glob for input audio files.')
flags.DEFINE_string(
'output_tfrecord_path', None,
'The prefix path to the output TFRecord. Shard numbers will be added to '
'actual path(s).')
flags.DEFINE_integer(
'num_shards', None,
'The number of shards to use for the TFRecord. If None, this number will '
'be determined automatically.')
flags.DEFINE_integer('sample_rate', 16000,
'The sample rate to use for the audio.')
flags.DEFINE_integer(
'frame_rate', 250,
'The frame rate to use for f0 and loudness features. If set to 0, '
'these features will not be computed.')
flags.DEFINE_float(
'example_secs', 4,
'The length of each example in seconds. Input audio will be split to this '
'length using a sliding window. If 0, each full piece of audio will be '
'used as an example.')
flags.DEFINE_float(
'sliding_window_hop_secs', 1,
'The hop size in seconds to use when splitting audio into constant-length '
'examples.')
flags.DEFINE_float(
'eval_split_fraction', 0.0,
'Fraction of the dataset to reserve for eval split. If set to 0, no eval '
'split is created.'
)
flags.DEFINE_float(
'coarse_chunk_secs', 20.0,
'Chunk size in seconds used to split the input audio files.')
flags.DEFINE_list(
'pipeline_options', '--runner=DirectRunner',
'A comma-separated list of command line arguments to be used as options '
'for the Beam Pipeline.')
def run():
input_audio_paths = []
for filepattern in FLAGS.input_audio_filepatterns:
input_audio_paths.extend(tf.io.gfile.glob(filepattern))
prepare_tfrecord(
input_audio_paths,
FLAGS.output_tfrecord_path,
num_shards=FLAGS.num_shards,
sample_rate=FLAGS.sample_rate,
frame_rate=FLAGS.frame_rate,
window_secs=FLAGS.example_secs,
hop_secs=FLAGS.sliding_window_hop_secs,
eval_split_fraction=FLAGS.eval_split_fraction,
coarse_chunk_secs=FLAGS.coarse_chunk_secs,
pipeline_options=FLAGS.pipeline_options)
def main(unused_argv):
"""From command line."""
run()
def console_entry_point():
"""From pip installed script."""
app.run(main)
if __name__ == '__main__':
console_entry_point()
| [
"[email protected]"
] | |
8e1fb53a12cc8169be23e1cdcdc37884cdf551ec | a6cbc03780b5c390f4f8ce9063bd8a7f6d75e8aa | /mail2.py | 1c0dfcd906c6beffc8d5efcafb1c822ea969e238 | [] | no_license | KimaruThagna/Email_and_Regex | 5f825554bd17e56ff091a79187c5ab7a758960d9 | c250e37d6e09f1a9c35fb6af873ff1c77707a8fd | refs/heads/master | 2021-09-06T11:59:04.784389 | 2018-02-06T12:16:58 | 2018-02-06T12:16:58 | 110,789,925 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,992 | py | #This example still uses gmail but this time includes an attachment
import os,smtplib
from email.mime.text import MIMEText
from email.encoders import encode_base64
from email.mime.multipart import MIMEMultipart
from tkinter.filedialog import askopenfilename
from email.mime.base import MIMEBase
# function that sends the email. Feed it with relevant parameters
def sendMail(sender,pwd,subject,body,receiver,q):
message=MIMEMultipart() # define the whole message as a mimemultipart and add releven
#metadata
message['Subject']=subject
message['From']=sender
message['To']=receiver
text=MIMEText(body)
message.attach(text)# attach the body or actual message to the message object
if q=='y':
file=askopenfilename()# create window which allows you to browse file system\
#and select file
data=open(file,'rb').read() # read file in binary mode
part=MIMEBase('application','octet-stream')
part.set_payload(data) # set the payload as the file read in binary mode
encode_base64(part) #encode the attachment to base64
part.add_header('Content-disposition','attachment; filename='+os.path.basename(file))
message.attach(part)
print('Connecting ...')
server=smtplib.SMTP('smtp.gmail.com',587) # setup email server
server.ehlo() # identify yourself to gmail client
server.starttls() # start transport layer security
server.ehlo() #re-identify yourself after encryption
server.login(sender,pwd) # login to sender account
print('Connected')
server.sendmail(sender,receiver,message.as_string()) # perform actual sending of mail
print('Mail Sent.')
server.quit()
#prompts
sender=input('Input Your email ')
receiver=input('Provide Recepient ')
pwd=input('Provide password ' )
subject=input('Mail Subject ')
body=input('Type your message ')
con=input('Do you want to send an attachment? Enter y for YES ')
#call method
sendMail(sender,pwd,subject,body,receiver,con)
| [
"[email protected]"
] | |
d5ab6e2c2301fa7c2de21056b961275cd20e463d | 840b98f14f181f7dbd693f2ee4b3c46e5be59305 | /demos/demo_pycloudmessenger/POM1/NeuralNetworks/pom1_NN_worker_pycloudmessenger.py | 3bb1b70be16f54c7404843da2a380711222b695e | [
"Apache-2.0"
] | permissive | Musketeer-H2020/MMLL-Robust | 4ef6b2ff5dff18d4d2b2a403a89d9455ba861e2b | ccc0a7674a04ae0d00bedc38893b33184c5f68c6 | refs/heads/main | 2023-09-01T18:47:46.065297 | 2021-09-28T15:34:12 | 2021-09-28T15:34:12 | 386,264,004 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,786 | py | # -*- coding: utf-8 -*-
'''
@author: Marcos Fernandez Diaz
November 2020
Example of use: python pom1_NN_worker_pycloudmessenger.py --user <user> --password <password> --task_name <task_name> --id <id>
Parameters:
- user: String with the name of the user. If the user does not exist in the pycloudmessenger platform a new one will be created
- password: String with the password
- task_name: String with the name of the task. If the task already exists, an error will be displayed
- id: Integer representing the partition of data to be used by the worker. Each worker should use a different partition, possible values are 0 to 4.
'''
# Import general modules
import argparse
import logging
import json
import numpy as np
import sys, os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # Disables tensorflow warnings
import tensorflow as tf
import onnxruntime
from sklearn.metrics import accuracy_score
# Add higher directory to python modules path.
sys.path.append("../../../../")
# To be imported from MMLL (pip installed)
from MMLL.nodes.WorkerNode import WorkerNode
from MMLL.comms.comms_pycloudmessenger import Comms_worker as Comms
# To be imported from demo_tools
from demo_tools.task_manager_pycloudmessenger import Task_Manager
from demo_tools.data_connectors.Load_from_file import Load_From_File as DC
from demo_tools.mylogging.logger_v1 import Logger
from demo_tools.evaluation_tools import display, plot_cm_seaborn, create_folders
# Set up logger
logging.basicConfig(
level=logging.ERROR,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
LOGGER = logging.getLogger()
LOGGER.setLevel(logging.DEBUG)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--user', type=str, default=None, help='User')
parser.add_argument('--password', type=str, default=None, help='Password')
parser.add_argument('--task_name', type=str, default=None, help='Name of the task')
parser.add_argument('--id', type=int, default=None, choices=[0, 1, 2, 3, 4], help='The address of the worker')
FLAGS, unparsed = parser.parse_known_args()
user_name = FLAGS.user
user_password = FLAGS.password
task_name = FLAGS.task_name
data_partition_id = FLAGS.id # This integer identifies the data partition used for the worker
# Set basic configuration
dataset_name = 'mnist'
verbose = False
pom = 1
model_type = 'NN'
# Create the directories for storing relevant outputs if they do not exist
create_folders("./results/")
# Setting up the logger
logger = Logger('./results/logs/Worker_' + str(user_name) + '.log')
# Load the credentials for pycloudmessenger
display('===========================================', logger, verbose)
display('Creating Worker...', logger, verbose)
# Note: this part creates the worker (participant) and it joins the task. This code is
# intended to be used only at the demos, in Musketeer this part must be done in the client.
credentials_filename = '../../musketeer.json'
try:
with open(credentials_filename, 'r') as f:
credentials = json.load(f)
except:
display('Error - The file musketeer.json is not available, please put it under the following path: "' + os.path.abspath(os.path.join("","../../")) + '"', logger, verbose)
sys.exit()
# Create user and join task
tm = Task_Manager(credentials_filename)
participant = tm.create_worker_and_join_task(user_name, user_password, task_name, display, logger)
display("Worker %s has joined task %s" %(user_name, task_name), logger, verbose)
# Creating the comms object
display('Creating WorkerNode under POM %d, communicating through pycloudmessenger' %pom, logger, verbose)
comms = Comms(participant, user_name)
# Creating Workernode
wn = WorkerNode(pom, comms, logger, verbose)
display('-------------------- Loading dataset %s --------------------------' % dataset_name, logger, verbose)
# Load data
# Warning: this data connector is only designed for the demos. In Musketeer, appropriate data
# connectors must be provided
data_file = '../../../../input_data/' + dataset_name + '_demonstrator_data.pkl'
try:
dc = DC(data_file)
except:
display('Error - The file ' + dataset_name + '_demonstrator_data.pkl does not exist. Please download it from Box and put it under the following path: "' + os.path.abspath(os.path.join("","../../../../input_data/")) + '"', logger, verbose)
sys.exit()
# Get train/test data and set training data
[Xtr, ytr, _, _, Xtst, ytst] = dc.get_all_data_Worker(int(data_partition_id))
wn.set_training_data(dataset_name, Xtr, ytr)
display('WorkerNode loaded %d patterns for training' % wn.NPtr, logger, verbose)
# Creating a ML model and start training procedure
wn.create_model_worker(model_type)
display('MMLL model %s is ready for training!' %model_type, logger, verbose)
display('Worker_' + model_type + ' %s is running...' %user_name, logger, verbose)
wn.run()
display('Worker_' + model_type + ' %s: EXIT' %user_name, logger, verbose)
# Retrieving and saving the trained model
display('Retrieving the trained model from WorkerNode', logger, verbose)
model = wn.get_model()
# Warning: this save_model utility is only for demo purposes
output_filename_model = './results/models/Worker_' + str(user_name) + '_' + dataset_name + '_model'
model.save(output_filename_model)
# Making predictions on test data
display('------------- Obtaining predictions------------------------------------\n', logger, verbose)
preprocessors = wn.get_preprocessors()
if preprocessors is not None:
for prep_model in preprocessors: # Apply stored preprocessor sequentially (in the same order received)
Xtst = prep_model.transform(Xtst)
display('Test data transformed using %s' %prep_model.name, logger, verbose)
preds_tst = model.predict(Xtst)
preds_tst = np.argmax(preds_tst, axis=-1) # Convert to labels
y = np.argmax(ytst, axis=-1) # Convert to labels
classes = np.arange(ytst.shape[1]) # 0 to 9
# Evaluating the results
display('------------- Evaluating --------------------------------------------\n', logger, verbose)
# Warning, these evaluation methods are not part of the MMLL library, they are only intended
# to be used for the demos. Use them at your own risk.
output_filename = 'Worker_' + str(user_name) + '_NN_confusion_matrix_' + dataset_name + '.png'
title = 'NN confusion matrix in test set worker'
plot_cm_seaborn(preds_tst, y, classes, title, output_filename, logger, verbose, normalize=True)
# Load Tf SavedModel and check results
model_loaded = tf.keras.models.load_model(output_filename_model)
preds_tst = model_loaded.predict(Xtst)
preds_tst = np.argmax(preds_tst, axis=-1) # Convert to labels
# Model export to ONXX
output_filename_model = './results/models/Worker_' + str(user_name) + '_' + dataset_name + '_model.onnx'
model.save(output_filename_model)
# Compute the prediction with ONNX Runtime
onnx_session = onnxruntime.InferenceSession(output_filename_model)
onnx_inputs = {onnx_session.get_inputs()[0].name: Xtst}
onnx_output = onnx_session.run(None, onnx_inputs)[0]
onnx_output = np.argmax(onnx_output, axis=-1) # Convert to labels
err_onnx = accuracy_score(y,onnx_output)
display('Test accuracy in ONNX model is %f' %err_onnx, logger, verbose)
| [
"[email protected]"
] | |
95986ae73d179770f7292b38dbaaf00b540d68bb | 67ecf1aca10c6b3504027edc131d3f295a66ae08 | /00-deleteintreeview.py | d3e2d07a7e5d08d4338fd394ee4a32966af7637f | [
"MIT"
] | permissive | UncleEngineer/TkinterTrick | 5efa58dee8612d48d18040debe7868c6b5815e3c | 471a5f4906ddad195731410e9df1a2b35f466fcb | refs/heads/master | 2020-03-16T09:57:28.696335 | 2018-06-22T10:23:39 | 2018-06-22T10:23:39 | 132,626,504 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,029 | py | from tkinter import *
from tkinter import ttk
root = Tk()
tree = ttk.Treeview(root)
tree["columns"]=("one","two")
tree.column("one", width=100 )
tree.column("two", width=100)
tree.heading("one", text="coulmn A")
tree.heading("two", text="column B")
tree.insert("" , 0, text="Line 1", values=("1A","1b"))
id2 = tree.insert("", 1, "dir2", text="Dir 2")
tree.insert(id2, "end", "dir 2", text="sub dir 2", values=("2A","2B"))
##alternatively:
tree.insert("", 3, "dir3", text="Dir 3")
tree.insert("dir3", 3, text=" sub dir 3",values=("3A"," 3B"))
def edit():
x = tree.get_children()
for item in x: ## Changing all children from root item
tree.item(item, text="blub", values=("foo", "bar"))
def delete():
selected_item = tree.selection()[0] ## get selected item
tree.delete(selected_item)
tree.pack()
button_del = Button(root, text="del", command=delete)
button_del.pack()
button_del = Button(root, text="edit", command=edit)
button_del.pack()
root.mainloop() | [
"[email protected]"
] | |
d2af03e3a4906a1fa23e9f3a1ce18e723be2b7dd | a5b4d77e760c6131ba1c5f040265a3b08d3c0478 | /enemy_bot/enemy_bot_level5/burger_detect/scripts/image_save.py | e695863b746ba773cd8cf5ea415ec5f9c57f2dab | [
"BSD-3-Clause"
] | permissive | kenjirotorii/burger_war_kit | 700b511739299a9d90d23c70262ecf4856d234b7 | d9b1b443f220980a4118c13cdf22174696c3db9c | refs/heads/main | 2023-03-21T23:32:24.415502 | 2021-03-11T15:59:12 | 2021-03-11T15:59:12 | 337,704,943 | 0 | 1 | BSD-3-Clause | 2021-03-11T15:59:13 | 2021-02-10T11:36:22 | Python | UTF-8 | Python | false | false | 1,046 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import rospy
import random
#
from geometry_msgs.msg import Twist
from sensor_msgs.msg import Image
import sys
from cv_bridge import CvBridge, CvBridgeError
import cv2
import time
import os
dir="Image/test/"
num=10*1000
class ImageGet():
def __init__(self):
rospy.Subscriber('/image_raw', Image, self.Image_save)
self.bridge = CvBridge()
self.count=0
def Image_save(self,data):
cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8")
cv2.imshow("sample.jpg",cv_image)
cv2.waitKey(5)
#cv2.imwrite(dir+"sample"+repr(self.count)+".jpg",cv_image)
print("save done.")
#self.count+=1
def get_image(self):
r = rospy.Rate(1) # change speed 1fps
while not rospy.is_shutdown():
r.sleep()
if self.count>num:
break
if __name__ == '__main__':
if not os.path.exists(dir):
os.mkdir(dir)
rospy.init_node('get_image')
bot = ImageGet()
bot.get_image()
| [
"[email protected]"
] | |
327f5bed18063bc5103443d55e4856bea69453da | 009c5522fe7fd1b6ffad167097535e592818c9d7 | /app/inheritance/abstract/migrations/0003_auto_20191223_0545.py | 02758dcc786385764c2036954dc49dd6a0eb3c57 | [] | no_license | moorekwon/django-document | d891d3d329bc697598517c0918e912da89cf5f6a | 983de2babdabd106e17467af27bac4efced170b8 | refs/heads/master | 2021-09-29T00:37:04.647977 | 2019-12-26T09:07:55 | 2019-12-26T09:07:55 | 228,784,209 | 0 | 0 | null | 2021-09-22T18:10:05 | 2019-12-18T07:35:12 | Python | UTF-8 | Python | false | false | 740 | py | # Generated by Django 3.0 on 2019-12-23 05:45
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('abstract', '0002_childa_childb'),
]
operations = [
migrations.AlterField(
model_name='childa',
name='m2m',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='abstract_childa_set', to='abstract.Student'),
),
migrations.AlterField(
model_name='childb',
name='m2m',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='abstract_childb_set', to='abstract.Student'),
),
]
| [
"[email protected]"
] | |
37cda467832d9959605d1668f2ef07cc8c293df9 | ece6f45409ee2bcbff1be64fa1ac98e7805e0e18 | /API:Data_Visualization/population_json.py | 78c7c29a84dd29b055d74f7b1bbb767d4b2871b3 | [] | no_license | PickertJoe/python_exercises | 5b9ac3334eec32e35a477d126c911d4ca07a4343 | 77955427db9c3342c9a51618a0cd9cf6f884fbee | refs/heads/master | 2022-12-12T11:57:08.267814 | 2019-12-08T21:52:31 | 2019-12-08T21:52:31 | 184,834,676 | 1 | 0 | null | 2022-12-08T05:14:43 | 2019-05-04T00:16:12 | Python | UTF-8 | Python | false | false | 1,293 | py | # A program to read and analyze the data in population_data.json
import json
from comma import comma
from country_code import code_search
from pygal.maps.world import World
from pygal.style import RotateStyle
# Importing the data in the json file into a list
filename = "chapter_16/population_data.json"
with open(filename) as f:
population_data = json.load(f)
# Building a dictionary of the population data
cc_populations = {}
for country in population_data:
if country['Year'] == '2010':
country_name = country["Country Name"]
population = int(float(country["Value"]))
code = code_search(country_name)
if code:
cc_populations[code] = population
# Creating three separate categories for different population ranges
cc_pop1, cc_pop2, cc_pop3 = {}, {}, {}
for code, population in cc_populations.items():
if population > 1000000000:
cc_pop1[code] = population
elif population > 10000000:
cc_pop2[code] = population
else:
cc_pop3[code] = population
wm_style = RotateStyle('#336699')
wm = World(style=wm_style)
wm.title = "World Population in 2010, by Select Countries"
wm.add('1bn+', cc_pop1)
wm.add('10m - 1bn', cc_pop2)
wm.add('0-10m', cc_pop3)
wm.render_to_file('country_populations_category.svg')
| [
"[email protected]"
] | |
65c4da75fb004f1520cb29a69802bcce620518d9 | 40c4b0c31a5870a9201d3d42a63c5547092e5912 | /frappe/recorder.py | 8cbcaa01bb980c8cbdc9a77613591f7700643486 | [
"MIT"
] | permissive | ektai/frappe3 | fab138cdbe15bab8214cf623d9eb461e9b9fb1cd | 44aa948b4d5a0d729eacfb3dabdc9c8894ae1799 | refs/heads/master | 2022-12-25T15:48:36.926197 | 2020-10-07T09:19:20 | 2020-10-07T09:19:20 | 301,951,677 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,212 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from collections import Counter
import datetime
import inspect
import json
import re
import time
import traceback
import frappe
import sqlparse
from pygments import highlight
from pygments.lexers import PythonLexer
from pygments.formatters import HtmlFormatter
from frappe import _
RECORDER_INTERCEPT_FLAG = "recorder-intercept"
RECORDER_REQUEST_SPARSE_HASH = "recorder-requests-sparse"
RECORDER_REQUEST_HASH = "recorder-requests"
def sql(*args, **kwargs):
start_time = time.time()
result = frappe.db._sql(*args, **kwargs)
end_time = time.time()
stack = list(get_current_stack_frames())
if frappe.conf.db_type == 'postgres':
query = frappe.db._cursor.query
else:
query = frappe.db._cursor._executed
query = sqlparse.format(query.strip(), keyword_case="upper", reindent=True)
# Collect EXPLAIN for executed query
if query.lower().strip().split()[0] in ("select", "update", "delete"):
# Only SELECT/UPDATE/DELETE queries can be "EXPLAIN"ed
explain_result = frappe.db._sql("EXPLAIN {}".format(query), as_dict=True)
else:
explain_result = []
data = {
"query": query,
"stack": stack,
"explain_result": explain_result,
"time": start_time,
"duration": float("{:.3f}".format((end_time - start_time) * 1000)),
}
frappe.local._recorder.register(data)
return result
def get_current_stack_frames():
current = inspect.currentframe()
frames = inspect.getouterframes(current, context=10)
for frame, filename, lineno, function, context, index in list(reversed(frames))[:-2]:
if "/apps/" in filename:
yield {
"filename": re.sub(".*/apps/", "", filename),
"lineno": lineno,
"function": function,
"context": "".join(context),
"index": index,
"locals": json.dumps(frame.f_locals, skipkeys=True, default=str)
}
def record():
if __debug__:
if frappe.cache().get_value(RECORDER_INTERCEPT_FLAG):
frappe.local._recorder = Recorder()
def dump():
if __debug__:
if hasattr(frappe.local, "_recorder"):
frappe.local._recorder.dump()
class Recorder():
def __init__(self):
self.uuid = frappe.generate_hash(length=10)
self.time = datetime.datetime.now()
self.calls = []
self.path = frappe.request.path
self.cmd = frappe.local.form_dict.cmd or ""
self.method = frappe.request.method
self.headers = dict(frappe.local.request.headers)
self.form_dict = frappe.local.form_dict
_patch()
def register(self, data):
self.calls.append(data)
def dump(self):
request_data = {
"uuid": self.uuid,
"path": self.path,
"cmd": self.cmd,
"time": self.time,
"queries": len(self.calls),
"time_queries": float("{:0.3f}".format(sum(call["duration"] for call in self.calls))),
"duration": float("{:0.3f}".format((datetime.datetime.now() - self.time).total_seconds() * 1000)),
"method": self.method,
}
frappe.cache().hset(RECORDER_REQUEST_SPARSE_HASH, self.uuid, request_data)
frappe.publish_realtime(event="recorder-dump-event", message=json.dumps(request_data, default=str))
self.mark_duplicates()
request_data["calls"] = self.calls
request_data["headers"] = self.headers
request_data["form_dict"] = self.form_dict
frappe.cache().hset(RECORDER_REQUEST_HASH, self.uuid, request_data)
def mark_duplicates(self):
counts = Counter([call["query"] for call in self.calls])
for index, call in enumerate(self.calls):
call["index"] = index
call["exact_copies"] = counts[call["query"]]
def _patch():
frappe.db._sql = frappe.db.sql
frappe.db.sql = sql
def do_not_record(function):
def wrapper(*args, **kwargs):
if hasattr(frappe.local, "_recorder"):
del frappe.local._recorder
frappe.db.sql = frappe.db._sql
return function(*args, **kwargs)
return wrapper
def administrator_only(function):
def wrapper(*args, **kwargs):
if frappe.session.user != "Administrator":
frappe.throw(_("Only Administrator is allowed to use Recorder"))
return function(*args, **kwargs)
return wrapper
@frappe.whitelist()
@do_not_record
@administrator_only
def status(*args, **kwargs):
return bool(frappe.cache().get_value(RECORDER_INTERCEPT_FLAG))
@frappe.whitelist()
@do_not_record
@administrator_only
def start(*args, **kwargs):
frappe.cache().set_value(RECORDER_INTERCEPT_FLAG, 1)
@frappe.whitelist()
@do_not_record
@administrator_only
def stop(*args, **kwargs):
frappe.cache().delete_value(RECORDER_INTERCEPT_FLAG)
@frappe.whitelist()
@do_not_record
@administrator_only
def get(uuid=None, *args, **kwargs):
if uuid:
result = frappe.cache().hget(RECORDER_REQUEST_HASH, uuid)
lexer = PythonLexer(tabsize=4)
for call in result["calls"]:
for stack in call["stack"]:
formatter = HtmlFormatter(noclasses=True, hl_lines=[stack["index"] + 1])
stack["context"] = highlight(stack["context"], lexer, formatter)
else:
result = list(frappe.cache().hgetall(RECORDER_REQUEST_SPARSE_HASH).values())
return result
@frappe.whitelist()
@do_not_record
@administrator_only
def delete(*args, **kwargs):
frappe.cache().delete_value(RECORDER_REQUEST_SPARSE_HASH)
frappe.cache().delete_value(RECORDER_REQUEST_HASH) | [
"[email protected]"
] | |
871e74c940da56c3387dffad57b313ca22cdc089 | 9d961bd6a590cc96db0c1f9c72d84e3a66636edf | /심심풀이땅콩/[백준]4673.py | 76fb463f5fde7bf3313b00ca4769b70034e63f75 | [] | no_license | 0equal2/Python_Programming | bae65338929e8e1a88247b8d23de805caa026702 | 2ac1d0262320220f49cbdb45e787e55e994d0b0f | refs/heads/master | 2023-05-14T22:13:41.583214 | 2021-06-09T03:04:51 | 2021-06-09T03:04:51 | 304,628,012 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 195 | py | ###[백준]4673
memo=[0]*10001
for i in range(1,10001):
newnum=i+sum(list(map(int,list(str(i)))))
if newnum<=10000:
memo[newnum]=1
if memo[i]==0:
print(i)
| [
"[email protected]"
] | |
4b99c672e34294a5f110b6531518b6d7056de15a | 1be2cbc9fd62cf77cc05a64807acf7d857b84eee | /blackopt/config.py | a2114330cc418e849f46670ef206413bcc1d54b6 | [] | no_license | ikamensh/blackopt | 4fdce2c0147b1a5a85024c9b59925d3d1a35b13f | a6ab24ce1be21a5ca9e26d0bb1f59bb50fd007a2 | refs/heads/master | 2023-01-23T12:55:42.087216 | 2020-12-05T19:18:30 | 2020-12-05T19:18:30 | 178,232,685 | 0 | 0 | null | 2020-10-18T20:57:29 | 2019-03-28T15:33:53 | Python | UTF-8 | Python | false | false | 382 | py | import os
default_workspace = "_blackopt_workspace"
_rootdir = default_workspace
def set_rootdir(path):
path = os.path.expanduser(path)
global _rootdir
_rootdir = path
def prepend_rootdir(prefix):
prefix = os.path.expanduser(prefix)
path = os.path.join(prefix, default_workspace)
global _rootdir
_rootdir = path
def get_rootdir():
return _rootdir | [
"[email protected]"
] | |
3cc86621a38c55c60af190e6064d74da255a9e2b | 14d8adc86adc14c1d64a5550b1bbd5663e984545 | /combination_sum_ii.py | 97ed76da8e3052e379884a175fa1c814d6591641 | [] | no_license | milllu/leetcode | e1b68ef7774cc0c1b49325ec1b87280d27570d94 | 458b3e72cd82a203b10bdca747c4c3ba85708f75 | refs/heads/master | 2020-03-30T23:41:46.180308 | 2018-10-11T01:08:31 | 2018-10-11T01:08:31 | 151,709,941 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,375 | py | """
给定一个数组 candidates 和一个目标数 target ,找出 candidates 中所有可以使数字和为 target 的组合。
candidates 中的每个数字在每个组合中只能使用一次。
说明:
所有数字(包括目标数)都是正整数。
解集不能包含重复的组合。
示例 1:
输入: candidates = [10,1,2,7,6,1,5], target = 8,
所求解集为:
[
[1, 7],
[1, 2, 5],
[2, 6],
[1, 1, 6]
]
示例 2:
输入: candidates = [2,5,2,1,2], target = 5,
所求解集为:
[
[1,2,2],
[5]
]
"""
class Solution(object):
def combinationSum2(self, nums, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
def dfs(dic, target, lst, suml):
if suml == target:
lst.sort()
if lst not in result:
result.append(lst)
return
if suml > target:
return
for key in dic:
if dic[key] > 0:
dic[key] -= 1
dfs(dic, target, lst+[key], suml+key)
dic[key] += 1
dic = {}
for num in nums:
dic[num] = dic.get(num, 0) + 1
result = []
dfs(dic, target, [], 0)
return result | [
"[email protected]"
] | |
aace7c3ac8ae4dfbaf9a425ce523bb342eaafc68 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/pg_2204+034/sdB_pg_2204+034_coadd.py | 1d84ed11150bebd234261af2a75e6b07e15f708b | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[331.818708,3.705497], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_pg_2204+034/sdB_pg_2204+034_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_pg_2204+034/sdB_pg_2204+034_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
c0bdac944aed5cb00d3ab2541709a23fecbc22e3 | 50948d4cb10dcb1cc9bc0355918478fb2841322a | /azure-mgmt-network/azure/mgmt/network/v2018_11_01/models/application_gateway_rewrite_rule_set.py | bf4a4ba184e7533ad006b1884070f8b6fb8071ac | [
"MIT"
] | permissive | xiafu-msft/azure-sdk-for-python | de9cd680b39962702b629a8e94726bb4ab261594 | 4d9560cfd519ee60667f3cc2f5295a58c18625db | refs/heads/master | 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 | MIT | 2020-10-02T01:17:02 | 2019-05-22T07:33:46 | Python | UTF-8 | Python | false | false | 2,111 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class ApplicationGatewayRewriteRuleSet(SubResource):
"""Rewrite rule set of an application gateway.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:param rewrite_rules: Rewrite rules in the rewrite rule set.
:type rewrite_rules:
list[~azure.mgmt.network.v2018_11_01.models.ApplicationGatewayRewriteRule]
:ivar provisioning_state: Provisioning state of the rewrite rule set
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param name: Name of the rewrite rule set that is unique within an
Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource
is updated.
:vartype etag: str
"""
_validation = {
'provisioning_state': {'readonly': True},
'etag': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'rewrite_rules': {'key': 'properties.rewriteRules', 'type': '[ApplicationGatewayRewriteRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ApplicationGatewayRewriteRuleSet, self).__init__(**kwargs)
self.rewrite_rules = kwargs.get('rewrite_rules', None)
self.provisioning_state = None
self.name = kwargs.get('name', None)
self.etag = None
| [
"[email protected]"
] | |
c61d44bcc8be1346b3b1a8bb2742d5847838cc8a | 2b115f9c5929fedd06d4dd6969100ab2df484adf | /messenger/urls.py | 9903187e19c8320adcd0f81ea319f70a102f44a3 | [] | no_license | sorXCode/Avina-API | 0b790bfd8ac8b9c84d1db45db0819e0585d954b9 | 7687ba7434b77d6c33944c65fff0409459a9d5ce | refs/heads/master | 2023-04-11T01:17:35.570752 | 2021-04-14T15:01:45 | 2021-04-14T15:01:45 | 342,429,666 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | from django.urls import path
from rest_framework.urlpatterns import format_suffix_patterns
from .views import StartMessaging, Conversation
urlpatterns = [
path('', StartMessaging.as_view()),
path('<str:product_uid>', Conversation.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns) | [
"[email protected]"
] | |
a21618f0ce0aa6432175d36b0042e7df8e21bb69 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/prime-big-291.py | dac9afb7dc0a8d107113bc4bc06e8af627553a69 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,479 | py | # Get the n-th prime starting from 2
def get_prime(n:int) -> int:
candidate:int = 2
found:int = 0
while True:
if is_prime(candidate):
found = found + 1
if found == n:
return candidate
candidate = candidate + 1
return 0 # Never happens
def is_prime(x:int) -> bool:
div:int = 2
div2:int = 2
div3:int = 2
div4:int = 2
div5:int = 2
while div < x:
if x % div == 0:
return False
div = div + 1
return True
def is_prime2(x:int, x2:int) -> bool:
div:int = 2
div2:int = 2
div3:int = 2
div4:int = 2
div5:int = 2
while div < x:
if x % div == 0:
return False
div = div + 1
return True
$Definition
def is_prime4(x:int, x2:int, x3:int, x4:int) -> bool:
div:int = 2
div2:int = 2
div3:int = 2
div4:int = 2
div5:int = 2
while div < x:
if x % div == 0:
return False
div = div + 1
return True
def is_prime5(x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
div:int = 2
div2:int = 2
div3:int = 2
div4:int = 2
div5:int = 2
while div < x:
if x % div == 0:
return False
div = div + 1
return True
# Input parameter
n:int = 15
n2:int = 15
n3:int = 15
n4:int = 15
n5:int = 15
# Run [1, n]
i:int = 1
i2:int = 1
i3:int = 1
i4:int = 1
i5:int = 1
# Crunch
while i <= n:
print(get_prime(i))
i = i + 1
| [
"[email protected]"
] | |
6186282c3446f83d419a1ac0c4b554890f932503 | b5a9d42f7ea5e26cd82b3be2b26c324d5da79ba1 | /tensorflow/python/framework/dtypes.py | f1c6251c483d69a34f5710f9b346fb9ccd2b713e | [
"Apache-2.0"
] | permissive | uve/tensorflow | e48cb29f39ed24ee27e81afd1687960682e1fbef | e08079463bf43e5963acc41da1f57e95603f8080 | refs/heads/master | 2020-11-29T11:30:40.391232 | 2020-01-11T13:43:10 | 2020-01-11T13:43:10 | 230,088,347 | 0 | 0 | Apache-2.0 | 2019-12-25T10:49:15 | 2019-12-25T10:49:14 | null | UTF-8 | Python | false | false | 24,187 | py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Library of dtypes (Tensor element types)."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import builtins
from tensorflow.core.framework import types_pb2
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.util.tf_export import tf_export
_np_bfloat16 = pywrap_tensorflow.TF_bfloat16_type()
@tf_export("dtypes.DType", "DType")
class DType(object):
"""Represents the type of the elements in a `Tensor`.
The following `DType` objects are defined:
* `tf.float16`: 16-bit half-precision floating-point.
* `tf.float32`: 32-bit single-precision floating-point.
* `tf.float64`: 64-bit double-precision floating-point.
* `tf.bfloat16`: 16-bit truncated floating-point.
* `tf.complex64`: 64-bit single-precision complex.
* `tf.complex128`: 128-bit double-precision complex.
* `tf.int8`: 8-bit signed integer.
* `tf.uint8`: 8-bit unsigned integer.
* `tf.uint16`: 16-bit unsigned integer.
* `tf.uint32`: 32-bit unsigned integer.
* `tf.uint64`: 64-bit unsigned integer.
* `tf.int16`: 16-bit signed integer.
* `tf.int32`: 32-bit signed integer.
* `tf.int64`: 64-bit signed integer.
* `tf.bool`: Boolean.
* `tf.string`: String.
* `tf.qint8`: Quantized 8-bit signed integer.
* `tf.quint8`: Quantized 8-bit unsigned integer.
* `tf.qint16`: Quantized 16-bit signed integer.
* `tf.quint16`: Quantized 16-bit unsigned integer.
* `tf.qint32`: Quantized 32-bit signed integer.
* `tf.resource`: Handle to a mutable resource.
* `tf.variant`: Values of arbitrary types.
The `tf.as_dtype()` function converts numpy types and string type
names to a `DType` object.
"""
def __init__(self, type_enum):
"""Creates a new `DataType`.
NOTE(mrry): In normal circumstances, you should not need to
construct a `DataType` object directly. Instead, use the
`tf.as_dtype()` function.
Args:
type_enum: A `types_pb2.DataType` enum value.
Raises:
TypeError: If `type_enum` is not a value `types_pb2.DataType`.
"""
# TODO(mrry): Make the necessary changes (using __new__) to ensure
# that calling this returns one of the interned values.
type_enum = int(type_enum)
if (type_enum not in types_pb2.DataType.values() or
type_enum == types_pb2.DT_INVALID):
raise TypeError("type_enum is not a valid types_pb2.DataType: %s" %
type_enum)
self._type_enum = type_enum
@property
def _is_ref_dtype(self):
"""Returns `True` if this `DType` represents a reference type."""
return self._type_enum > 100
@property
def _as_ref(self):
"""Returns a reference `DType` based on this `DType`."""
if self._is_ref_dtype:
return self
else:
return _INTERN_TABLE[self._type_enum + 100]
@property
def base_dtype(self):
"""Returns a non-reference `DType` based on this `DType`."""
if self._is_ref_dtype:
return _INTERN_TABLE[self._type_enum - 100]
else:
return self
@property
def real_dtype(self):
"""Returns the dtype correspond to this dtype's real part."""
base = self.base_dtype
if base == complex64:
return float32
elif base == complex128:
return float64
else:
return self
@property
def is_numpy_compatible(self):
return self._type_enum not in _NUMPY_INCOMPATIBLE
@property
def as_numpy_dtype(self):
"""Returns a `numpy.dtype` based on this `DType`."""
return _TF_TO_NP[self._type_enum]
@property
def as_datatype_enum(self):
"""Returns a `types_pb2.DataType` enum value based on this `DType`."""
return self._type_enum
@property
def is_bool(self):
"""Returns whether this is a boolean data type"""
return self.base_dtype == bool
@property
def is_integer(self):
"""Returns whether this is a (non-quantized) integer type."""
return (self.is_numpy_compatible and not self.is_quantized and
np.issubdtype(self.as_numpy_dtype, np.integer))
@property
def is_floating(self):
"""Returns whether this is a (non-quantized, real) floating point type."""
return ((self.is_numpy_compatible and
np.issubdtype(self.as_numpy_dtype, np.floating)) or
self.base_dtype == bfloat16)
@property
def is_complex(self):
"""Returns whether this is a complex floating point type."""
return self.base_dtype in (complex64, complex128)
@property
def is_quantized(self):
"""Returns whether this is a quantized data type."""
return self.base_dtype in _QUANTIZED_DTYPES_NO_REF
@property
def is_unsigned(self):
"""Returns whether this type is unsigned.
Non-numeric, unordered, and quantized types are not considered unsigned, and
this function returns `False`.
Returns:
Whether a `DType` is unsigned.
"""
try:
return self.min == 0
except TypeError:
return False
@property
def min(self):
"""Returns the minimum representable value in this data type.
Raises:
TypeError: if this is a non-numeric, unordered, or quantized type.
"""
if (self.is_quantized or
self.base_dtype in (bool, string, complex64, complex128)):
raise TypeError("Cannot find minimum value of %s." % self)
# there is no simple way to get the min value of a dtype, we have to check
# float and int types separately
try:
return np.finfo(self.as_numpy_dtype()).min
except: # bare except as possible raises by finfo not documented
try:
return np.iinfo(self.as_numpy_dtype()).min
except:
if self.base_dtype == bfloat16:
return _np_bfloat16(float.fromhex("-0x1.FEp127"))
raise TypeError("Cannot find minimum value of %s." % self)
@property
def max(self):
"""Returns the maximum representable value in this data type.
Raises:
TypeError: if this is a non-numeric, unordered, or quantized type.
"""
if (self.is_quantized or
self.base_dtype in (bool, string, complex64, complex128)):
raise TypeError("Cannot find maximum value of %s." % self)
# there is no simple way to get the max value of a dtype, we have to check
# float and int types separately
try:
return np.finfo(self.as_numpy_dtype()).max
except: # bare except as possible raises by finfo not documented
try:
return np.iinfo(self.as_numpy_dtype()).max
except:
if self.base_dtype == bfloat16:
return _np_bfloat16(float.fromhex("0x1.FEp127"))
raise TypeError("Cannot find maximum value of %s." % self)
@property
def limits(self, clip_negative=True):
"""Return intensity limits, i.e.
(min, max) tuple, of the dtype.
Args:
clip_negative : bool, optional If True, clip the negative range (i.e.
return 0 for min intensity) even if the image dtype allows negative
values. Returns
min, max : tuple Lower and upper intensity limits.
"""
min, max = dtype_range[self.as_numpy_dtype] # pylint: disable=redefined-builtin
if clip_negative:
min = 0 # pylint: disable=redefined-builtin
return min, max
def is_compatible_with(self, other):
"""Returns True if the `other` DType will be converted to this DType.
The conversion rules are as follows:
```python
DType(T) .is_compatible_with(DType(T)) == True
```
Args:
other: A `DType` (or object that may be converted to a `DType`).
Returns:
True if a Tensor of the `other` `DType` will be implicitly converted to
this `DType`.
"""
other = as_dtype(other)
return self._type_enum in (other.as_datatype_enum,
other.base_dtype.as_datatype_enum)
def __eq__(self, other):
"""Returns True iff this DType refers to the same type as `other`."""
if other is None:
return False
try:
dtype = as_dtype(other).as_datatype_enum
return self._type_enum == dtype # pylint: disable=protected-access
except TypeError:
return False
def __ne__(self, other):
"""Returns True iff self != other."""
return not self.__eq__(other)
@property
def name(self):
"""Returns the string name for this `DType`."""
return _TYPE_TO_STRING[self._type_enum]
def __str__(self):
return "<dtype: %r>" % self.name
def __repr__(self):
return "tf." + self.name
def __hash__(self):
return self._type_enum
def __reduce__(self):
return as_dtype, (self.name,)
@property
def size(self):
if (self._type_enum == types_pb2.DT_VARIANT or
self._type_enum == types_pb2.DT_RESOURCE):
return 1
return np.dtype(self.as_numpy_dtype).itemsize
# Define data type range of numpy dtype
dtype_range = {
np.bool_: (False, True),
np.bool8: (False, True),
np.uint8: (0, 255),
np.uint16: (0, 65535),
np.int8: (-128, 127),
np.int16: (-32768, 32767),
np.int64: (-2**63, 2**63 - 1),
np.uint64: (0, 2**64 - 1),
np.int32: (-2**31, 2**31 - 1),
np.uint32: (0, 2**32 - 1),
np.float32: (-1, 1),
np.float64: (-1, 1)
}
# Define standard wrappers for the types_pb2.DataType enum.
resource = DType(types_pb2.DT_RESOURCE)
tf_export("dtypes.resource", "resource").export_constant(__name__, "resource")
variant = DType(types_pb2.DT_VARIANT)
tf_export("dtypes.variant", "variant").export_constant(__name__, "variant")
float16 = DType(types_pb2.DT_HALF)
tf_export("dtypes.float16", "float16").export_constant(__name__, "float16")
half = float16
tf_export("dtypes.half", "half").export_constant(__name__, "half")
float32 = DType(types_pb2.DT_FLOAT)
tf_export("dtypes.float32", "float32").export_constant(__name__, "float32")
float64 = DType(types_pb2.DT_DOUBLE)
tf_export("dtypes.float64", "float64").export_constant(__name__, "float64")
double = float64
tf_export("dtypes.double", "double").export_constant(__name__, "double")
int32 = DType(types_pb2.DT_INT32)
tf_export("dtypes.int32", "int32").export_constant(__name__, "int32")
uint8 = DType(types_pb2.DT_UINT8)
tf_export("dtypes.uint8", "uint8").export_constant(__name__, "uint8")
uint16 = DType(types_pb2.DT_UINT16)
tf_export("dtypes.uint16", "uint16").export_constant(__name__, "uint16")
uint32 = DType(types_pb2.DT_UINT32)
tf_export("dtypes.uint32", "uint32").export_constant(__name__, "uint32")
uint64 = DType(types_pb2.DT_UINT64)
tf_export("dtypes.uint64", "uint64").export_constant(__name__, "uint64")
int16 = DType(types_pb2.DT_INT16)
tf_export("dtypes.int16", "int16").export_constant(__name__, "int16")
int8 = DType(types_pb2.DT_INT8)
tf_export("dtypes.int8", "int8").export_constant(__name__, "int8")
string = DType(types_pb2.DT_STRING)
tf_export("dtypes.string", "string").export_constant(__name__, "string")
complex64 = DType(types_pb2.DT_COMPLEX64)
tf_export("dtypes.complex64",
"complex64").export_constant(__name__, "complex64")
complex128 = DType(types_pb2.DT_COMPLEX128)
tf_export("dtypes.complex128",
"complex128").export_constant(__name__, "complex128")
int64 = DType(types_pb2.DT_INT64)
tf_export("dtypes.int64", "int64").export_constant(__name__, "int64")
bool = DType(types_pb2.DT_BOOL) # pylint: disable=redefined-builtin
tf_export("dtypes.bool", "bool").export_constant(__name__, "bool")
qint8 = DType(types_pb2.DT_QINT8)
tf_export("dtypes.qint8", "qint8").export_constant(__name__, "qint8")
quint8 = DType(types_pb2.DT_QUINT8)
tf_export("dtypes.quint8", "quint8").export_constant(__name__, "quint8")
qint16 = DType(types_pb2.DT_QINT16)
tf_export("dtypes.qint16", "qint16").export_constant(__name__, "qint16")
quint16 = DType(types_pb2.DT_QUINT16)
tf_export("dtypes.quint16", "quint16").export_constant(__name__, "quint16")
qint32 = DType(types_pb2.DT_QINT32)
tf_export("dtypes.qint32", "qint32").export_constant(__name__, "qint32")
resource_ref = DType(types_pb2.DT_RESOURCE_REF)
variant_ref = DType(types_pb2.DT_VARIANT_REF)
bfloat16 = DType(types_pb2.DT_BFLOAT16)
tf_export("dtypes.bfloat16", "bfloat16").export_constant(__name__, "bfloat16")
float16_ref = DType(types_pb2.DT_HALF_REF)
half_ref = float16_ref
float32_ref = DType(types_pb2.DT_FLOAT_REF)
float64_ref = DType(types_pb2.DT_DOUBLE_REF)
double_ref = float64_ref
int32_ref = DType(types_pb2.DT_INT32_REF)
uint32_ref = DType(types_pb2.DT_UINT32_REF)
uint8_ref = DType(types_pb2.DT_UINT8_REF)
uint16_ref = DType(types_pb2.DT_UINT16_REF)
int16_ref = DType(types_pb2.DT_INT16_REF)
int8_ref = DType(types_pb2.DT_INT8_REF)
string_ref = DType(types_pb2.DT_STRING_REF)
complex64_ref = DType(types_pb2.DT_COMPLEX64_REF)
complex128_ref = DType(types_pb2.DT_COMPLEX128_REF)
int64_ref = DType(types_pb2.DT_INT64_REF)
uint64_ref = DType(types_pb2.DT_UINT64_REF)
bool_ref = DType(types_pb2.DT_BOOL_REF)
qint8_ref = DType(types_pb2.DT_QINT8_REF)
quint8_ref = DType(types_pb2.DT_QUINT8_REF)
qint16_ref = DType(types_pb2.DT_QINT16_REF)
quint16_ref = DType(types_pb2.DT_QUINT16_REF)
qint32_ref = DType(types_pb2.DT_QINT32_REF)
bfloat16_ref = DType(types_pb2.DT_BFLOAT16_REF)
_NUMPY_INCOMPATIBLE = frozenset([
types_pb2.DT_VARIANT, types_pb2.DT_VARIANT_REF, types_pb2.DT_RESOURCE,
types_pb2.DT_RESOURCE_REF
])
# Maintain an intern table so that we don't have to create a large
# number of small objects.
_INTERN_TABLE = {
types_pb2.DT_HALF: float16,
types_pb2.DT_FLOAT: float32,
types_pb2.DT_DOUBLE: float64,
types_pb2.DT_INT32: int32,
types_pb2.DT_UINT8: uint8,
types_pb2.DT_UINT16: uint16,
types_pb2.DT_UINT32: uint32,
types_pb2.DT_UINT64: uint64,
types_pb2.DT_INT16: int16,
types_pb2.DT_INT8: int8,
types_pb2.DT_STRING: string,
types_pb2.DT_COMPLEX64: complex64,
types_pb2.DT_COMPLEX128: complex128,
types_pb2.DT_INT64: int64,
types_pb2.DT_BOOL: bool,
types_pb2.DT_QINT8: qint8,
types_pb2.DT_QUINT8: quint8,
types_pb2.DT_QINT16: qint16,
types_pb2.DT_QUINT16: quint16,
types_pb2.DT_QINT32: qint32,
types_pb2.DT_BFLOAT16: bfloat16,
types_pb2.DT_RESOURCE: resource,
types_pb2.DT_VARIANT: variant,
types_pb2.DT_HALF_REF: float16_ref,
types_pb2.DT_FLOAT_REF: float32_ref,
types_pb2.DT_DOUBLE_REF: float64_ref,
types_pb2.DT_INT32_REF: int32_ref,
types_pb2.DT_UINT32_REF: uint32_ref,
types_pb2.DT_UINT8_REF: uint8_ref,
types_pb2.DT_UINT16_REF: uint16_ref,
types_pb2.DT_INT16_REF: int16_ref,
types_pb2.DT_INT8_REF: int8_ref,
types_pb2.DT_STRING_REF: string_ref,
types_pb2.DT_COMPLEX64_REF: complex64_ref,
types_pb2.DT_COMPLEX128_REF: complex128_ref,
types_pb2.DT_INT64_REF: int64_ref,
types_pb2.DT_UINT64_REF: uint64_ref,
types_pb2.DT_BOOL_REF: bool_ref,
types_pb2.DT_QINT8_REF: qint8_ref,
types_pb2.DT_QUINT8_REF: quint8_ref,
types_pb2.DT_QINT16_REF: qint16_ref,
types_pb2.DT_QUINT16_REF: quint16_ref,
types_pb2.DT_QINT32_REF: qint32_ref,
types_pb2.DT_BFLOAT16_REF: bfloat16_ref,
types_pb2.DT_RESOURCE_REF: resource_ref,
types_pb2.DT_VARIANT_REF: variant_ref,
}
# Standard mappings between types_pb2.DataType values and string names.
_TYPE_TO_STRING = {
types_pb2.DT_HALF: "float16",
types_pb2.DT_FLOAT: "float32",
types_pb2.DT_DOUBLE: "float64",
types_pb2.DT_INT32: "int32",
types_pb2.DT_UINT8: "uint8",
types_pb2.DT_UINT16: "uint16",
types_pb2.DT_UINT32: "uint32",
types_pb2.DT_UINT64: "uint64",
types_pb2.DT_INT16: "int16",
types_pb2.DT_INT8: "int8",
types_pb2.DT_STRING: "string",
types_pb2.DT_COMPLEX64: "complex64",
types_pb2.DT_COMPLEX128: "complex128",
types_pb2.DT_INT64: "int64",
types_pb2.DT_BOOL: "bool",
types_pb2.DT_QINT8: "qint8",
types_pb2.DT_QUINT8: "quint8",
types_pb2.DT_QINT16: "qint16",
types_pb2.DT_QUINT16: "quint16",
types_pb2.DT_QINT32: "qint32",
types_pb2.DT_BFLOAT16: "bfloat16",
types_pb2.DT_RESOURCE: "resource",
types_pb2.DT_VARIANT: "variant",
types_pb2.DT_HALF_REF: "float16_ref",
types_pb2.DT_FLOAT_REF: "float32_ref",
types_pb2.DT_DOUBLE_REF: "float64_ref",
types_pb2.DT_INT32_REF: "int32_ref",
types_pb2.DT_UINT32_REF: "uint32_ref",
types_pb2.DT_UINT8_REF: "uint8_ref",
types_pb2.DT_UINT16_REF: "uint16_ref",
types_pb2.DT_INT16_REF: "int16_ref",
types_pb2.DT_INT8_REF: "int8_ref",
types_pb2.DT_STRING_REF: "string_ref",
types_pb2.DT_COMPLEX64_REF: "complex64_ref",
types_pb2.DT_COMPLEX128_REF: "complex128_ref",
types_pb2.DT_INT64_REF: "int64_ref",
types_pb2.DT_UINT64_REF: "uint64_ref",
types_pb2.DT_BOOL_REF: "bool_ref",
types_pb2.DT_QINT8_REF: "qint8_ref",
types_pb2.DT_QUINT8_REF: "quint8_ref",
types_pb2.DT_QINT16_REF: "qint16_ref",
types_pb2.DT_QUINT16_REF: "quint16_ref",
types_pb2.DT_QINT32_REF: "qint32_ref",
types_pb2.DT_BFLOAT16_REF: "bfloat16_ref",
types_pb2.DT_RESOURCE_REF: "resource_ref",
types_pb2.DT_VARIANT_REF: "variant_ref",
}
_STRING_TO_TF = {
value: _INTERN_TABLE[key] for key, value in _TYPE_TO_STRING.items()
}
# Add non-canonical aliases.
_STRING_TO_TF["half"] = float16
_STRING_TO_TF["half_ref"] = float16_ref
_STRING_TO_TF["float"] = float32
_STRING_TO_TF["float_ref"] = float32_ref
_STRING_TO_TF["double"] = float64
_STRING_TO_TF["double_ref"] = float64_ref
# Numpy representation for quantized dtypes.
#
# These are magic strings that are used in the swig wrapper to identify
# quantized types.
# TODO(mrry,keveman): Investigate Numpy type registration to replace this
# hard-coding of names.
_np_qint8 = np.dtype([("qint8", np.int8)])
_np_quint8 = np.dtype([("quint8", np.uint8)])
_np_qint16 = np.dtype([("qint16", np.int16)])
_np_quint16 = np.dtype([("quint16", np.uint16)])
_np_qint32 = np.dtype([("qint32", np.int32)])
# _np_bfloat16 is defined by a module import.
# Custom struct dtype for directly-fed ResourceHandles of supported type(s).
np_resource = np.dtype([("resource", np.ubyte)])
# Standard mappings between types_pb2.DataType values and numpy.dtypes.
_NP_TO_TF = {
np.float16: float16,
np.float32: float32,
np.float64: float64,
np.int32: int32,
np.int64: int64,
np.uint8: uint8,
np.uint16: uint16,
np.uint32: uint32,
np.uint64: uint64,
np.int16: int16,
np.int8: int8,
np.complex64: complex64,
np.complex128: complex128,
np.object_: string,
np.string_: string,
np.unicode_: string,
np.bool_: bool,
_np_qint8: qint8,
_np_quint8: quint8,
_np_qint16: qint16,
_np_quint16: quint16,
_np_qint32: qint32,
_np_bfloat16: bfloat16,
}
# Map (some) NumPy platform dtypes to TF ones using their fixed-width
# synonyms. Note that platform dtypes are not always simples aliases,
# i.e. reference equality is not guaranteed. See e.g. numpy/numpy#9799.
for pdt in [
np.intc,
np.uintc,
np.int_,
np.uint,
np.longlong,
np.ulonglong,
]:
if pdt not in _NP_TO_TF:
_NP_TO_TF[pdt] = next(
_NP_TO_TF[dt] for dt in _NP_TO_TF if dt == pdt().dtype)
TF_VALUE_DTYPES = set(_NP_TO_TF.values())
_TF_TO_NP = {
types_pb2.DT_HALF:
np.float16,
types_pb2.DT_FLOAT:
np.float32,
types_pb2.DT_DOUBLE:
np.float64,
types_pb2.DT_INT32:
np.int32,
types_pb2.DT_UINT8:
np.uint8,
types_pb2.DT_UINT16:
np.uint16,
types_pb2.DT_UINT32:
np.uint32,
types_pb2.DT_UINT64:
np.uint64,
types_pb2.DT_INT16:
np.int16,
types_pb2.DT_INT8:
np.int8,
# NOTE(touts): For strings we use np.object as it supports variable length
# strings.
types_pb2.DT_STRING:
np.object,
types_pb2.DT_COMPLEX64:
np.complex64,
types_pb2.DT_COMPLEX128:
np.complex128,
types_pb2.DT_INT64:
np.int64,
types_pb2.DT_BOOL:
np.bool,
types_pb2.DT_QINT8:
_np_qint8,
types_pb2.DT_QUINT8:
_np_quint8,
types_pb2.DT_QINT16:
_np_qint16,
types_pb2.DT_QUINT16:
_np_quint16,
types_pb2.DT_QINT32:
_np_qint32,
types_pb2.DT_BFLOAT16:
_np_bfloat16,
# Ref types
types_pb2.DT_HALF_REF:
np.float16,
types_pb2.DT_FLOAT_REF:
np.float32,
types_pb2.DT_DOUBLE_REF:
np.float64,
types_pb2.DT_INT32_REF:
np.int32,
types_pb2.DT_UINT32_REF:
np.uint32,
types_pb2.DT_UINT8_REF:
np.uint8,
types_pb2.DT_UINT16_REF:
np.uint16,
types_pb2.DT_INT16_REF:
np.int16,
types_pb2.DT_INT8_REF:
np.int8,
types_pb2.DT_STRING_REF:
np.object,
types_pb2.DT_COMPLEX64_REF:
np.complex64,
types_pb2.DT_COMPLEX128_REF:
np.complex128,
types_pb2.DT_INT64_REF:
np.int64,
types_pb2.DT_UINT64_REF:
np.uint64,
types_pb2.DT_BOOL_REF:
np.bool,
types_pb2.DT_QINT8_REF:
_np_qint8,
types_pb2.DT_QUINT8_REF:
_np_quint8,
types_pb2.DT_QINT16_REF:
_np_qint16,
types_pb2.DT_QUINT16_REF:
_np_quint16,
types_pb2.DT_QINT32_REF:
_np_qint32,
types_pb2.DT_BFLOAT16_REF:
_np_bfloat16,
}
_QUANTIZED_DTYPES_NO_REF = frozenset([qint8, quint8, qint16, quint16, qint32])
_QUANTIZED_DTYPES_REF = frozenset(
[qint8_ref, quint8_ref, qint16_ref, quint16_ref, qint32_ref])
QUANTIZED_DTYPES = _QUANTIZED_DTYPES_REF.union(_QUANTIZED_DTYPES_NO_REF)
tf_export(
"dtypes.QUANTIZED_DTYPES",
v1=["dtypes.QUANTIZED_DTYPES",
"QUANTIZED_DTYPES"]).export_constant(__name__, "QUANTIZED_DTYPES")
_PYTHON_TO_TF = {
builtins.float: float32,
builtins.bool: bool,
builtins.object: string
}
_ANY_TO_TF = {}
_ANY_TO_TF.update(_INTERN_TABLE)
_ANY_TO_TF.update(_STRING_TO_TF)
_ANY_TO_TF.update(_PYTHON_TO_TF)
_ANY_TO_TF.update(_NP_TO_TF)
# Ensure no collisions.
assert len(_ANY_TO_TF) == sum(
len(d) for d in [_INTERN_TABLE, _STRING_TO_TF, _PYTHON_TO_TF, _NP_TO_TF])
@tf_export("dtypes.as_dtype", "as_dtype")
def as_dtype(type_value):
"""Converts the given `type_value` to a `DType`.
Args:
type_value: A value that can be converted to a `tf.DType` object. This may
currently be a `tf.DType` object, a [`DataType`
enum](https://www.tensorflow.org/code/tensorflow/core/framework/types.proto),
a string type name, or a `numpy.dtype`.
Returns:
A `DType` corresponding to `type_value`.
Raises:
TypeError: If `type_value` cannot be converted to a `DType`.
"""
if isinstance(type_value, DType):
return type_value
if isinstance(type_value, np.dtype):
try:
return _NP_TO_TF[type_value.type]
except KeyError:
pass
try:
return _ANY_TO_TF[type_value]
except KeyError:
pass
raise TypeError("Cannot convert value %r to a TensorFlow DType." %
(type_value,))
| [
"[email protected]"
] | |
6b27894d6781b7a1a591111f28f575c8857b9a50 | fb3061b2e8752e59e59d0f54f8a9637c25bcc8f4 | /Tristan/randomnumber.py | 0dd78cf9ab6872d179441ebf8ea0bba5d6a197ba | [] | no_license | idcrypt3/camp_2019_07_14 | 25383c67dbd2d44ad9e5966b2666d545688a4d36 | 708cccb313181fc15dc7aa45699024b9b44ba275 | refs/heads/master | 2020-06-20T06:21:37.907263 | 2019-07-19T18:53:17 | 2019-07-19T18:53:17 | 197,023,869 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 42 | py | import random
print(random.randint(1, 20)) | [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.