repo_name
stringlengths
6
130
hexsha
sequence
file_path
sequence
code
sequence
apis
sequence
possible_versions
list
nextBillyonair/DPM
[ "840ffaafe15c208b200b74094ffa8fe493b4c975", "840ffaafe15c208b200b74094ffa8fe493b4c975", "840ffaafe15c208b200b74094ffa8fe493b4c975" ]
[ "tests/test_moments.py", "dpm/transforms/power.py", "dpm/distributions/arcsine.py" ]
[ "import pytest\nfrom dpm.distributions import *\nimport dpm.utils as utils\nimport torch\n\n\ndef test_arcsine():\n model = Arcsine()\n assert model.expectation == 0.5\n assert model.median == 0.5\n assert model.variance == 0.125\n assert model.skewness == 0.\n assert model.kurtosis == -1.5\n\n model = Arcsine(-1, 1)\n assert model.expectation == 0.\n assert model.median == 0.\n assert model.variance == 0.5\n assert model.skewness == 0.\n assert model.kurtosis == -1.5\n\ndef test_bernoulli():\n model = Bernoulli(probs=[0.3])\n assert model.logits.item() + 0.8473 < 1e-2\n assert model.expectation.item() - 0.3 < 1e-2\n assert model.variance.item() - 0.21 < 1e-2\n assert model.skewness.item() - 1.9047619048 < 1e-2\n assert model.kurtosis.item() + -1.2380952381 < 1e-2\n\ndef test_beta():\n model = Beta()\n assert model.expectation == 0.5\n assert model.variance == 0.125\n m = Beta(0.5, 0.5).mode.item()\n assert m == 0. or 1.\n assert Beta(4.5, 3.5).mode.item() - 0.5833333333 < 1e-2\n assert Beta(1.5, 0.5).mode.item() == 1.\n assert Beta(0.5, 1.5).mode.item() == 0.\n # assert Beta(1.00000, 1.00000).mode.item() > 0. and Beta(1.00000, 1.00000).mode.item() < 1.\n\ndef test_cauchy():\n model = Cauchy(loc=1.)\n assert model.median == 1.\n assert model.mode == 1.\n\ndef test_exponential():\n model = Exponential()\n assert model.expectation - 1. < 1e-2\n assert model.mode - 0. < 1e-2\n assert model.variance - 1. < 1e-2\n assert model.median - 0.6931471806 < 1e-2\n assert model.skewness - 2. < 1e-2\n assert model.kurtosis - 6. < 1e-2\n\n model = Exponential(0.5)\n assert model.expectation - 2. < 1e-2\n assert model.mode - 0. < 1e-2\n assert model.variance - 4. < 1e-2\n assert model.median - 1.3862943611 < 1e-2\n assert model.skewness - 2. < 1e-2\n assert model.kurtosis - 6. < 1e-2\n\ndef test_gamma():\n model = Gamma()\n assert model.expectation - 1. < 1e-2\n assert model.variance - 1. < 1e-2\n\n model = Gamma(0.5, 0.75)\n assert model.expectation - 0.6666666667 < 1e-2\n assert model.variance - 0.8888888889 < 1e-2\n\ndef test_gumbel():\n model = Gumbel(loc=1., scale=2.)\n assert model.expectation - (1 + 2 * utils.euler_mascheroni) < 1e-2\n assert model.mode == 1.\n assert model.median - 1.7330258412 < 1e-2\n assert model.variance - 6.5797362674 < 1e-2\n assert model.skewness - 1.14 < 1e-2\n assert model.kurtosis - 2.4 < 1e-2\n\ndef test_hyperbolicsecant():\n model = HyperbolicSecant()\n assert model.expectation == 0.\n assert model.variance == 1.\n assert model.median == 0.\n\ndef test_laplace():\n model = Laplace(loc=1., scale=2.)\n assert model.expectation - 1. < 1e-2\n assert model.variance - 8. < 1e-2\n assert model.stddev - 2.8284271247 < 1e-2\n assert model.median - 1. < 1e-2\n assert model.mode - 1. < 1e-2\n assert model.skewness < 1e-2\n assert model.kurtosis - 3. < 1e-2\n assert model.entropy() - 2.3862943611 < 1e-2\n\ndef test_log_cauchy():\n model = LogCauchy(loc=2.)\n assert model.median - 7.3890560989 < 1e-2\n\ndef test_log_normal():\n model = LogNormal()\n assert model.expectation - 1.6487212707 < 1e-2\n assert model.variance - 4.6707742705 < 1e-2\n assert model.mode - utils.e < 1e-2\n assert model.median - utils.e < 1e-2\n\ndef test_logistic():\n model = Logistic(loc=1., scale=2.)\n assert model.expectation == 1.\n assert model.mode == 1.\n assert model.variance - 13.1594725348 < 1e-2\n assert model.median == 1.\n assert model.skewness == 0.\n assert model.kurtosis == 1.2\n\ndef test_normal():\n model = Normal(0., 3.)\n assert model.variance.item() == 3.\n assert model.expectation.item() == 0.\n model = Normal([0., 0.], [3., 1., 1., 3.])\n assert (model.variance - torch.tensor([[3., 1.], [1., 3.]]) < 1e-2).all()\n assert (model.expectation == torch.tensor([0., 0.])).all()\n\n\ndef test_rayleigh():\n model = Rayleigh(3.)\n assert model.expectation - 3.7599424119 < 1e-2\n assert model.mode - 3. < 1e-2\n assert model.median - 3.5322300675 < 1e-2\n assert model.variance - 3.8628330588 < 1e-2\n assert model.skewness - 1.1186145158 < 1e-2\n assert model.kurtosis - 0.2450893007 < 1e-2\n\n\ndef test_studentt():\n model = StudentT()\n model.expectation\n model.variance\n model.mode\n\ndef test_uniform():\n model = Uniform()\n assert model.expectation - 0.5 < 1e-2\n assert model.variance - 1/12. < 1e-2\n assert model.median - 0.5 < 1e-2\n assert model.skewness == 0.\n assert model.kurtosis + 1.2 < 1e-2\n\ndef test_logitnormal():\n model = LogitNormal()\n assert model.median - torch.sigmoid(torch.tensor(0.)) < 1e-2\n model = LogitNormal(1.)\n assert model.median - torch.sigmoid(torch.tensor(1.)) < 1e-2\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n# EOF\n", "import torch\nfrom torch.nn import Parameter\nfrom .transform import Transform\n\n\nclass Power(Transform):\n\n def __init__(self, power=1.0, learnable=True):\n super().__init__()\n if not isinstance(power, torch.Tensor):\n power = torch.tensor(power).view(1, -1)\n self.power = power\n if learnable:\n self.power = Parameter(self.power)\n\n def forward(self, x):\n if self.power == 0.:\n return x.exp()\n return (1. + x * self.power) ** (1. / self.power)\n\n def inverse(self, y):\n if self.power == 0.:\n return y.log()\n return (y**self.power - 1.) / self.power\n\n def log_abs_det_jacobian(self, x, y):\n if self.power == 0.:\n return x.sum(-1)\n return ((1. / self.power - 1.) * (x * self.power).log1p()).sum(-1)\n\n def get_parameters(self):\n return {'type':'power', 'power':self.power.item()}\n", "import torch\nfrom torch import nn\nfrom torch import distributions as dists\nfrom torch.nn import Module, Parameter, ModuleList\nfrom torch.nn.functional import softplus\nimport numpy as np\nimport math\nfrom .distribution import Distribution\n\n\nclass Arcsine(Distribution):\n\n def __init__(self, low=0., high=1., learnable=True):\n super().__init__()\n if not isinstance(low, torch.Tensor):\n low = torch.tensor(low).view(-1)\n self.n_dims = len(low)\n if not isinstance(high, torch.Tensor):\n high = torch.tensor(high).view(-1)\n self.alpha = low.float()\n self.beta = high.float()\n if learnable:\n self.alpha = Parameter(self.alpha)\n self.beta = Parameter(self.beta)\n\n def log_prob(self, value):\n return - (math.pi * ((value - self.low) * (self.high - value)).sqrt()).log().sum(-1)\n\n def sample(self, batch_size):\n u = torch.rand((batch_size, self.n_dims))\n return self.icdf(u)\n\n def cdf(self, value):\n return (2. / math.pi) * torch.asin(((value - self.low) / (self.high - self.low)).sqrt())\n\n def icdf(self, value):\n u = 0.5 - 0.5 * torch.cos(value * math.pi)\n return self.low + (self.high - self.low) * u\n\n @property\n def expectation(self):\n return (self.low + self.high) / 2.\n\n @property\n def variance(self):\n return (self.high - self.low).pow(2) / 8.\n\n @property\n def median(self):\n return self.expectation\n\n @property\n def skewness(self):\n return torch.tensor(0.).float()\n\n @property\n def kurtosis(self):\n return torch.tensor(-3./2.).float()\n\n @property\n def low(self):\n return torch.min(self.alpha, self.beta)\n\n @property\n def high(self):\n return torch.max(self.alpha, self.beta)\n\n def get_parameters(self):\n if self.n_dims == 1:\n return {'low':self.low.item(), 'high':self.high.item()}\n return {'low':self.low.detach().numpy(),\n 'high':self.high.detach().numpy()}\n" ]
[ [ "torch.tensor" ], [ "torch.nn.Parameter", "torch.tensor" ], [ "torch.nn.Parameter", "torch.max", "torch.min", "torch.tensor", "torch.rand", "torch.cos" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
skyquant2/gs-quant
[ "b7e648fa7912b13ad1fd503b643389e34587aa1e", "b7e648fa7912b13ad1fd503b643389e34587aa1e" ]
[ "gs_quant/test/timeseries/test_datetime.py", "gs_quant/test/timeseries/test_statistics.py" ]
[ "\"\"\"\nCopyright 2018 Goldman Sachs.\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing,\nsoftware distributed under the License is distributed on an\n\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\nKIND, either express or implied. See the License for the\nspecific language governing permissions and limitations\nunder the License.\n\"\"\"\n\nimport pytest\nfrom pandas.testing import assert_series_equal\n\nfrom gs_quant.timeseries.datetime import *\n\n\ndef test_align():\n dates1 = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 5),\n ]\n\n dates2 = [\n date(2019, 1, 2),\n date(2019, 1, 4),\n date(2019, 1, 6),\n ]\n\n x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0], index=dates1)\n y = pd.Series([20.0, 40.0, 60.0], index=dates2)\n\n expectedl = pd.Series([2.0, 4.0], index=[date(2019, 1, 2), date(2019, 1, 4)])\n expectedr = pd.Series([20.0, 40.0], index=[date(2019, 1, 2), date(2019, 1, 4)])\n\n result = align(x, y, Interpolate.INTERSECT)\n assert_series_equal(result[0], expectedl, obj=\"Align intersect left\")\n assert_series_equal(result[1], expectedr, obj=\"Align intersect left\")\n\n result = align(y, x, Interpolate.INTERSECT)\n assert_series_equal(result[0], expectedr, obj=\"Align intersect right\")\n assert_series_equal(result[1], expectedl, obj=\"Align intersect right\")\n\n union_dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 5),\n date(2019, 1, 6),\n ]\n\n expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, np.nan], index=union_dates)\n expected2 = pd.Series([np.nan, 20.0, np.nan, 40.0, np.nan, 60.0], index=union_dates)\n\n result = align(x, y, Interpolate.NAN)\n assert_series_equal(result[0], expected1, obj=\"Align NaN left\")\n assert_series_equal(result[1], expected2, obj=\"Align NaN left\")\n\n result = align(y, x, Interpolate.NAN)\n assert_series_equal(result[0], expected2, obj=\"Align NaN right\")\n assert_series_equal(result[1], expected1, obj=\"Align NaN right\")\n\n expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 0.0], index=union_dates)\n expected2 = pd.Series([0.0, 20.0, 0.0, 40.0, 0.0, 60.0], index=union_dates)\n\n result = align(x, y, Interpolate.ZERO)\n assert_series_equal(result[0], expected1, obj=\"Align zero left\")\n assert_series_equal(result[1], expected2, obj=\"Align zero left\")\n\n result = align(y, x, Interpolate.ZERO)\n assert_series_equal(result[0], expected2, obj=\"Align zero right\")\n assert_series_equal(result[1], expected1, obj=\"Align zero right\")\n\n expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 5.0], index=union_dates)\n expected2 = pd.Series([20.0, 20.0, 20.0, 40.0, 40.0, 60.0], index=union_dates)\n\n result = align(x, y, Interpolate.STEP)\n assert_series_equal(result[0], expected1, obj=\"Align step left\")\n assert_series_equal(result[1], expected2, obj=\"Align step left\")\n\n result = align(y, x, Interpolate.STEP)\n assert_series_equal(result[0], expected2, obj=\"Align step left\")\n assert_series_equal(result[1], expected1, obj=\"Align step left\")\n\n xp = x.copy()\n yp = y.copy()\n xp.index = pd.to_datetime(xp.index)\n yp.index = pd.to_datetime(yp.index)\n up = pd.to_datetime(union_dates)\n\n expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, np.nan], index=up)\n expected2 = pd.Series([np.nan, 20.0, 30.0, 40.0, 50.0, 60.0], index=up)\n\n result = align(xp, yp, Interpolate.TIME)\n assert_series_equal(result[0], expected1, obj=\"Align time left\")\n assert_series_equal(result[1], expected2, obj=\"Align time left\")\n\n result = align(yp, xp, Interpolate.TIME)\n assert_series_equal(result[0], expected2, obj=\"Align time right\")\n assert_series_equal(result[1], expected1, obj=\"Align time right\")\n\n a = pd.Series([0, 100, 110], index=pd.DatetimeIndex(['2019-07-01', '2019-07-08', '2019-07-10']))\n b = pd.Series([20, 60, 70], index=pd.DatetimeIndex(['2019-07-02', '2019-07-10', '2019-07-11']))\n result = align(a, b, Interpolate.TIME)\n\n u_index = a.index.union(b.index)\n assert_series_equal(result[0], pd.Series([0, 100 / 7, 100, 110, np.nan], index=u_index))\n assert_series_equal(result[1], pd.Series([np.nan, 20, 50, 60, 70], index=u_index))\n\n result = align(x, 3)\n assert_series_equal(result[0], x, obj=\"Align scalar left\")\n assert_series_equal(result[1], pd.Series(3, index=dates1), obj=\"Align scalar left\")\n\n result = align(3, x)\n assert_series_equal(result[0], pd.Series(3, index=dates1), obj=\"Align scalar left\")\n assert_series_equal(result[1], x, obj=\"Align scalar right\")\n\n result = align(1, 2)\n assert result[0] == 1\n assert result[1] == 2\n\n with pytest.raises(MqValueError):\n align(x, x, \"None\")\n\n\ndef test_interpolate():\n dates = [\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 5),\n date(2019, 1, 7),\n ]\n\n x = pd.Series([2.0, 3.0, 5.0, 7.0], index=dates)\n\n result = interpolate(x, dates)\n assert_series_equal(result, x, obj=\"Interpolate series by dates\")\n\n result = interpolate(x, x)\n assert_series_equal(result, x, obj=\"Interpolate series by series dates\")\n\n result = interpolate(x)\n assert_series_equal(result, x, obj=\"Interpolate series default\")\n\n select_dates = [\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 7),\n ]\n\n result = interpolate(x, select_dates)\n expected = pd.Series([2.0, 3.0, 7.0], index=select_dates)\n assert_series_equal(result, expected, obj=\"Interpolate subset of dates\")\n\n select_dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 4),\n date(2019, 1, 5),\n date(2019, 1, 6),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n intersect_dates = [\n date(2019, 1, 2),\n date(2019, 1, 5),\n date(2019, 1, 7),\n ]\n\n result = interpolate(x, select_dates, Interpolate.INTERSECT)\n expected = pd.Series([2.0, 5.0, 7.0], index=intersect_dates)\n assert_series_equal(result, expected, obj=\"Interpolate intersect\")\n\n result = interpolate(x, select_dates, Interpolate.NAN)\n expected = pd.Series([np.nan, 2.0, np.nan, 5.0, np.nan, 7.0, np.nan], index=select_dates)\n assert_series_equal(result, expected, obj=\"Interpolate nan\")\n\n result = interpolate(x, select_dates, Interpolate.ZERO)\n expected = pd.Series([0.0, 2.0, 0.0, 5.0, 0.0, 7.0, 0.0], index=select_dates)\n assert_series_equal(result, expected, obj=\"Interpolate zero\")\n\n result = interpolate(x, select_dates, Interpolate.STEP)\n expected = pd.Series([2.0, 2.0, 2.0, 5.0, 5.0, 7.0, 7.0], index=select_dates)\n assert_series_equal(result, expected, obj=\"Interpolate step dates\")\n\n result = interpolate(x, pd.Series(np.nan, select_dates), Interpolate.STEP)\n expected = pd.Series([2.0, 2.0, 2.0, 5.0, 5.0, 7.0, 7.0], index=select_dates)\n assert_series_equal(result, expected, obj=\"Interpolate step series\")\n\n xnan = pd.Series([np.nan, 3.0, 5.0, 7.0], index=dates)\n\n result = interpolate(xnan, select_dates, Interpolate.STEP)\n expected = pd.Series([np.nan, np.nan, np.nan, 5.0, 5.0, 7.0, 7.0], index=select_dates)\n assert_series_equal(result, expected, obj=\"Interpolate flat nan start\")\n\n x = pd.Series([2.0, 3.0, 5.0, 7.0], index=pd.DatetimeIndex(dates))\n result = interpolate(x, select_dates, Interpolate.STEP)\n expected = pd.Series([2.0, 2.0, 2.0, 5.0, 5.0, 7.0, 7.0], index=pd.DatetimeIndex(select_dates))\n assert_series_equal(result, expected, obj=\"Interpolate step dates to series with timestamps\")\n\n with pytest.raises(MqValueError, match=\"Unknown intersection type: None\"):\n interpolate(x, x, \"None\")\n\n with pytest.raises(MqValueError, match=\"Cannot perform step interpolation on an empty series\"):\n interpolate(pd.Series(), select_dates, Interpolate.STEP)\n\n\ndef test_value():\n dates = [\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 5),\n date(2019, 1, 7),\n ]\n\n x = pd.Series([2.0, 3.0, 5.0, 7.0], index=dates)\n\n result = value(x, date(2019, 1, 3))\n assert result == 3.0\n\n result = value(x, date(2019, 1, 5))\n assert result == 5.0\n\n result = value(x, date(2019, 1, 4))\n assert result == 3.0\n\n result = value(x, date(2019, 1, 4), Interpolate.INTERSECT)\n assert result is None\n\n result = value(x, date(2019, 1, 4), Interpolate.STEP)\n assert result == 3.0\n\n result = value(x, date(2019, 1, 4), Interpolate.ZERO)\n assert result == 0.0\n\n result = value(x, date(2019, 1, 4), Interpolate.NAN)\n assert np.isnan(result)\n\n\ndef test_day():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n ]\n\n x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)\n\n result = day(x)\n expected = pd.Series([1, 2, 3, 4], index=dates)\n assert_series_equal(result, expected, obj=\"Day\")\n\n\ndef test_weekday():\n dates = [\n date(2019, 1, 7),\n date(2019, 1, 8),\n date(2019, 1, 9),\n date(2019, 1, 10),\n date(2019, 1, 11),\n date(2019, 1, 12),\n date(2019, 1, 13),\n ]\n\n x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0], index=dates)\n\n result = weekday(x)\n expected = pd.Series([0, 1, 2, 3, 4, 5, 6], index=dates)\n assert_series_equal(result, expected, obj=\"Weekday\")\n\n\ndef test_month():\n dates = [\n date(2019, 1, 1),\n date(2019, 2, 1),\n date(2019, 3, 1),\n date(2019, 4, 1),\n ]\n\n x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)\n\n result = month(x)\n expected = pd.Series([1, 2, 3, 4], index=dates)\n assert_series_equal(result, expected, obj=\"Month\")\n\n\ndef test_year():\n dates = [\n date(2019, 1, 1),\n date(2020, 1, 2),\n date(2021, 1, 3),\n date(2022, 1, 4),\n ]\n\n x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)\n\n result = year(x)\n expected = pd.Series([2019, 2020, 2021, 2022], index=dates)\n assert_series_equal(result, expected, obj=\"Year\")\n\n\ndef test_quarter():\n dates = [\n date(2019, 1, 1),\n date(2019, 4, 1),\n date(2019, 7, 1),\n date(2019, 10, 1),\n ]\n\n x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)\n\n result = quarter(x)\n expected = pd.Series([1, 2, 3, 4], index=dates)\n assert_series_equal(result, expected, obj=\"Quarter\")\n\n\ndef test_day_count_fractions():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 5),\n date(2019, 1, 6),\n ]\n\n x = pd.Series([])\n assert_series_equal(x, day_count_fractions(x))\n\n x = pd.Series([100.0, 101, 103.02, 100.9596, 100.9596, 102.978792], index=dates)\n\n result = day_count_fractions(x, DayCountConvention.ACTUAL_360)\n result2 = day_count_fractions(x.index, DayCountConvention.ACTUAL_360)\n dcf = 1 / 360\n expected = pd.Series([np.NaN, dcf, dcf, dcf, dcf, dcf], index=dates)\n assert_series_equal(result, expected, obj=\"ACT/360\")\n assert_series_equal(result2, expected, obj=\"ACT/360\")\n\n result = day_count_fractions(x, DayCountConvention.ACTUAL_365F)\n dcf = 1 / 365\n expected = pd.Series([np.NaN, dcf, dcf, dcf, dcf, dcf], index=dates)\n assert_series_equal(result, expected, obj=\"ACT/365\")\n\n\ndef test_date_range():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 5),\n date(2019, 1, 6),\n ]\n\n values = [1.0, 2.0, 3.0, 4.0, 5.0, 7.0]\n s0 = pd.Series(values, index=dates)\n s1 = pd.Series(values, index=pd.date_range('2019-01-01', periods=6, freq='D'))\n\n for x in [s0, s1]:\n assert (date_range(x, 0, 0) == x).all()\n assert (date_range(x, 0, 0, True) == x.iloc[:-2]).all()\n\n assert date_range(x, 0, date(2019, 1, 3)).index[-1] == date(2019, 1, 3)\n assert (date_range(x, 0, date(2019, 1, 3)) == x.iloc[:3]).all()\n\n assert date_range(x, date(2019, 1, 3), date(2019, 1, 6)).index[0] == date(2019, 1, 3)\n assert date_range(x, date(2019, 1, 3), date(2019, 1, 6)).index[-1] == date(2019, 1, 6)\n assert (date_range(x, date(2019, 1, 3), date(2019, 1, 6)) == x.iloc[2:6]).all()\n\n y = pd.Series(values, index=pd.date_range('2020-10-23', periods=6, freq='D'))\n assert (date_range(y, 1, 1, True) == y.iloc[3:5]).all()\n\n with pytest.raises(MqValueError):\n date_range(pd.Series([1]), 0, 0)\n\n with pytest.raises(MqTypeError):\n date_range(pd.Series([1]), 0, 0, 'string')\n\n\ndef test_prepend():\n x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 7.0], index=pd.date_range('2019-01-01', \"2019-01-06\"))\n y = pd.Series([3.1, 4.1, 5.1], index=pd.date_range('2019-01-03', '2019-01-05'))\n\n assert_series_equal(prepend([]), pd.Series(dtype='float64'), obj='prepend empty')\n\n assert_series_equal(prepend([x]), x, obj='prepend one series')\n\n actual = prepend([x, y])\n expected = pd.Series([1.0, 2.0, 3.1, 4.1, 5.1], index=pd.date_range('2019-01-01', '2019-01-05'))\n assert_series_equal(actual, expected, obj='prepend two series')\n\n x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 7.0], index=pd.date_range('2019-01-01', periods=6, freq='H'))\n y = pd.Series([3.1, 4.1, 5.1], index=pd.date_range('2019-01-01 02:00', periods=3, freq='H'))\n\n actual = prepend([x, y])\n expected = pd.Series([1.0, 2.0, 3.1, 4.1, 5.1], index=pd.date_range('2019-01-01', periods=5, freq='H'))\n assert_series_equal(actual, expected, obj='prepend two real-time series')\n\n\ndef test_union():\n x = pd.Series([3.1, 4.1, np.nan], index=pd.date_range('2019-01-03', '2019-01-05'))\n y = pd.Series([1.0, np.nan, 3.0, 4.0, 5.0, 6.0], index=pd.date_range('2019-01-01', \"2019-01-06\"))\n z = pd.Series([60.0, 70.0], index=pd.date_range('2019-01-06', \"2019-01-07\"))\n\n assert_series_equal(union([]), pd.Series(dtype='float64'), obj='union empty')\n\n x.index.freq = None\n assert_series_equal(union([x]), x, obj='union of one series')\n\n actual = union([x, y, z])\n expected = pd.Series([1.0, np.nan, 3.1, 4.1, 5.0, 6.0, 70], index=pd.date_range('2019-01-01', '2019-01-07'))\n assert_series_equal(actual, expected, obj='union of three series')\n\n x = pd.Series([3.1, 4.1, np.nan], index=pd.date_range('2019-01-01 02:00', periods=3, freq='H'))\n y = pd.Series([1.0, np.nan, 3.0, 4.0, 5.0, 6.0], index=pd.date_range('2019-01-01', periods=6, freq='H'))\n\n actual = union([x, y])\n expected = pd.Series([1.0, np.nan, 3.1, 4.1, 5.0, 6.0], index=pd.date_range('2019-01-01', periods=6, freq='H'))\n assert_series_equal(actual, expected, obj='union of two real-time series')\n\n\ndef test_bucketize():\n dates = pd.bdate_range(start='1/1/2021', end='4/23/2021')\n series = pd.Series(range(len(dates)), index=dates)\n\n actual = bucketize(series, AggregateFunction.MAX, AggregatePeriod.MONTH)\n expected_index = pd.DatetimeIndex([date(2021, 1, 31), date(2021, 2, 28), date(2021, 3, 31), date(2021, 4, 30)])\n expected = pd.Series([20, 40, 63, 80], index=expected_index)\n actual.index.freq = None # Ignore the index freq\n assert_series_equal(actual, expected, check_index_type=False)\n\n\ndef test_day_count():\n assert day_count(datetime.date(2021, 5, 7), datetime.date(2021, 5, 10)) == 1\n assert day_count(datetime.date(2021, 5, 10), datetime.date(2021, 5, 14)) == 4\n assert day_count(datetime.date(2021, 5, 10), datetime.date(2021, 5, 17)) == 5\n\n with pytest.raises(MqValueError):\n day_count(datetime.date(2021, 5, 7), '2021-05-10')\n\n\nif __name__ == \"__main__\":\n pytest.main(args=[\"test_datetime.py\"])\n", "\"\"\"\nCopyright 2018 Goldman Sachs.\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing,\nsoftware distributed under the License is distributed on an\n\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\nKIND, either express or implied. See the License for the\nspecific language governing permissions and limitations\nunder the License.\n\"\"\"\nfrom datetime import date\n\nimport pytest\nfrom pandas.testing import assert_series_equal\nfrom scipy.integrate import odeint\nfrom gs_quant.timeseries import *\nfrom gs_quant.timeseries.statistics import Direction\n\n\ndef test_generate_series():\n x = generate_series(100)\n\n assert (len(x) == 100)\n assert (x.index[0] == datetime.date.today())\n assert (x[0] == 100)\n\n x = generate_series(100, Direction.END_TODAY)\n assert (len(x) == 100)\n assert (x.index[-1] == datetime.date.today())\n assert (x[0] == 100)\n\n\ndef test_min():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n\n result = min_(x)\n expected = pd.Series([3.0, 2.0, 2.0, 1.0, 1.0, 1.0], index=dates)\n assert_series_equal(result, expected, obj=\"Minimum\")\n\n result = min_(x, Window(1, 0))\n expected = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n assert_series_equal(result, expected, obj=\"Minimum window 1\")\n\n result = min_(x, Window(2, 0))\n expected = pd.Series([3.0, 2.0, 2.0, 1.0, 1.0, 3.0], index=dates)\n assert_series_equal(result, expected, obj=\"Minimum window 2\")\n\n result = min_(x, Window('1w', 0))\n expected = pd.Series([3.0, 2.0, 2.0, 1.0, 1.0, 1.0], index=dates)\n assert_series_equal(result, expected, obj=\"Minimum with window 1w\")\n\n y = pd.Series([4.0, np.nan, 4.0, 2.0, 2.0, 5.0], index=dates)\n result = min_([x, y], Window(2, 0))\n expected = pd.Series([3.0, 2.0, 2.0, 1.0, 1.0, 2.0], index=dates)\n assert_series_equal(result, expected, obj=\"Minimum of multiple series\")\n\n result = min_(x, \"2d\")\n expected = pd.Series([2.0, 1.0, 3.0, 3.0], index=dates[2:])\n assert_series_equal(result, expected, obj=\"Minimum with strdate window\")\n\n result = min_(x, \"1d\")\n expected = pd.Series([2.0, 3.0, 1.0, 3.0, 6.0], index=dates[1:])\n assert_series_equal(result, expected, obj=\"Minimum with strdate window 2\")\n\n\ndef test_max():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n\n result = max_(x)\n expected = pd.Series([3.0, 3.0, 3.0, 3.0, 3.0, 6.0], index=dates)\n assert_series_equal(result, expected, obj=\"Maximum\")\n\n result = max_(x, Window(1, 0))\n expected = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n assert_series_equal(result, expected, obj=\"Maximum window 1\")\n\n result = max_(x, Window(2, 0))\n expected = pd.Series([3.0, 3.0, 3.0, 3.0, 3.0, 6.0], index=dates)\n assert_series_equal(result, expected, obj=\"Maximum window 2\")\n\n result = max_(x, Window('2d', 0))\n expected = pd.Series([3.0, 3.0, 3.0, 3.0, 3.0, 6.0], index=dates)\n assert_series_equal(result, expected, obj=\"Maximum window 1w\")\n\n y = pd.Series([4.0, np.nan, 4.0, 2.0, 2.0, 5.0], index=dates)\n result = max_([x, y], Window(2, 0))\n expected = pd.Series([4.0, 4.0, 4.0, 4.0, 3.0, 6.0], index=dates)\n assert_series_equal(result, expected, obj=\"Maximum of multiple series\")\n\n s = pd.Series([-3.0, -2.0, 3.0, -1.0, -3.0, 6.0], index=dates)\n t = pd.Series([0, 0], index=dates[0:2])\n result = max_([s, t], 1)\n expected = pd.Series([0.0, 3, 0, 0, 6], index=dates[1:])\n assert_series_equal(result, expected, obj=\"Maximum with constant\")\n\n\ndef test_range():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n\n result = range_(x)\n expected = pd.Series([0.0, 1.0, 1.0, 2.0, 2.0, 5.0], index=dates)\n assert_series_equal(result, expected, obj=\"Range\")\n\n result = range_(x, Window(1, 0))\n expected = pd.Series([0.0, 0.0, 0.0, 0.0, 0.0, 0.0], index=dates)\n assert_series_equal(result, expected, obj=\"Range window 1\")\n\n result = range_(x, Window(2, 0))\n expected = pd.Series([0.0, 1.0, 1.0, 2.0, 2.0, 3.0], index=dates)\n assert_series_equal(result, expected, obj=\"Range window 2\")\n\n result = range_(x, Window('1w', 0))\n expected = pd.Series([0.0, 1.0, 1.0, 2.0, 2.0, 5.0], index=dates)\n assert_series_equal(result, expected, obj=\"Range window 1w\")\n\n\ndef test_mean():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n\n result = mean(x)\n expected = pd.Series([3.0, 2.5, 8 / 3, 2.25, 2.4, 3.0], index=dates)\n assert_series_equal(result, expected, obj=\"Mean\")\n\n result = mean(x, Window(1, 0))\n expected = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n assert_series_equal(result, expected, obj=\"Mean window 1\")\n\n result = mean(x, Window(2, 0))\n expected = pd.Series([3.0, 2.5, 2.5, 2.0, 2.0, 4.5], index=dates)\n assert_series_equal(result, expected, obj=\"Mean window 2\")\n\n result = mean(x, Window('1w', 0))\n expected = pd.Series([3.0, 2.5, 8 / 3, 2.25, 2.4, 3.0], index=dates)\n assert_series_equal(result, expected, obj=\"Mean window 1w\")\n\n y = pd.Series([4.0, np.nan, 4.0, 2.0, 2.0, 5.0], index=dates)\n result = mean([x, y], Window(2, 0))\n expected = pd.Series([3.5, 3.0, 3.0, 2.5, 2.0, 4.0], index=dates)\n assert_series_equal(result, expected, obj=\"Mean of multiple series\")\n\n\ndef test_median():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n\n result = median(x)\n expected = pd.Series([3.0, 2.5, 3.0, 2.5, 3.0, 3.0], index=dates)\n assert_series_equal(result, expected, obj=\"Median\")\n\n result = median(x, Window(1, 0))\n expected = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n assert_series_equal(result, expected, obj=\"Median window 1\")\n\n result = median(x, Window(2, 0))\n expected = pd.Series([3.0, 2.5, 2.5, 2.0, 2.0, 4.5], index=dates)\n assert_series_equal(result, expected, obj=\"Median window 2\")\n\n result = median(x, Window('1w', 0))\n expected = pd.Series([3.0, 2.5, 3.0, 2.5, 3.0, 3.0], index=dates)\n assert_series_equal(result, expected, obj=\"Median window 1w\")\n\n\ndef test_mode():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n\n result = mode(x)\n expected = pd.Series([3.0, 2.0, 3.0, 3.0, 3.0, 3.0], index=dates)\n assert_series_equal(result, expected, obj=\"mode\")\n\n result = mode(x, Window(1, 0))\n expected = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n assert_series_equal(result, expected, obj=\"mode window 1\")\n\n result = mode(x, Window(2, 0))\n expected = pd.Series([3.0, 2.0, 2.0, 1.0, 1.0, 3.0], index=dates)\n assert_series_equal(result, expected, obj=\"mode window 2\")\n\n result = mode(x, Window('1w', 0))\n expected = pd.Series([3.0, 2.0, 3.0, 3.0, 3.0, 3.0], index=dates)\n assert_series_equal(result, expected, obj=\"Mode window 1w\")\n\n\ndef test_sum():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], index=dates)\n\n result = sum_(x)\n expected = pd.Series([1.0, 3.0, 6.0, 10, 15, 21], index=dates)\n assert_series_equal(result, expected, obj=\"Summation\")\n\n result = sum_(x, Window(2, 0))\n expected = pd.Series([1.0, 3.0, 5.0, 7.0, 9.0, 11.0], index=dates)\n assert_series_equal(result, expected, obj=\"Summation\")\n\n result = sum_(x, Window('1w', 0))\n expected = pd.Series([1.0, 3.0, 6.0, 10.0, 15.0, 20.0], index=dates)\n assert_series_equal(result, expected, obj=\"Sum window 1w\")\n\n y = pd.Series([4.0, np.nan, 4.0, 2.0, 2.0, 5.0], index=dates)\n result = sum_([x, y], Window(2, 0))\n expected = pd.Series([5.0, 7.0, 9.0, 13.0, 13.0, 18.0], index=dates)\n assert_series_equal(result, expected, obj=\"Sum of multiple series\")\n\n\ndef test_product():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], index=dates)\n\n result = product(x)\n expected = pd.Series([1.0, 2.0, 6.0, 24, 120, 720], index=dates)\n assert_series_equal(result, expected, obj=\"Product\")\n\n result = product(x, Window(2, 0))\n expected = pd.Series([1.0, 2.0, 6.0, 12.0, 20.0, 30.0], index=dates)\n assert_series_equal(result, expected, obj=\"Product\")\n\n result = product(x, Window('1w', 0))\n expected = pd.Series([1.0, 2.0, 6.0, 24.0, 120.0, 720.0], index=dates)\n assert_series_equal(result, expected, obj=\"Product window 1w\")\n\n\ndef test_std():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n\n result = std(x)\n expected = pd.Series([np.nan, 0.707106, 0.577350, 0.957427, 0.894427, 1.673320], index=dates)\n assert_series_equal(result, expected, obj=\"std\", check_less_precise=True)\n\n result = std(x, Window(2, 0))\n expected = pd.Series([np.nan, 0.707106, 0.707106, 1.414214, 1.414214, 2.121320], index=dates)\n assert_series_equal(result, expected, obj=\"std window 2\", check_less_precise=True)\n\n result = std(x, Window('1w', 0))\n expected = pd.Series([np.nan, 0.707106, 0.577350, 0.957427, 0.894427, 1.870828], index=dates)\n assert_series_equal(result, expected, obj=\"std window 1w\", check_less_precise=True)\n\n assert std(pd.Series()).empty\n\n\ndef test_exponential_std():\n def exp_std_calc(ts, alpha=0.75):\n std = ts * 0\n for i in range(1, len(ts)):\n weights = (1 - alpha) * alpha ** np.arange(i, -1, -1)\n weights[0] /= (1 - alpha)\n x = ts.to_numpy()[:i + 1]\n ema = sum(weights * x) / sum(weights)\n debias_fact = sum(weights) ** 2 / (sum(weights) ** 2 - sum(weights ** 2))\n var = debias_fact * sum(weights * (x - ema) ** 2) / sum(weights)\n std[i] = np.sqrt(var)\n std[0] = np.NaN\n return std\n\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n\n result = exponential_std(x)\n expected = exp_std_calc(x)\n assert_series_equal(result, expected, obj=\"Exponentially weighted standard deviation\")\n\n result = exponential_std(x, 0.8)\n expected = exp_std_calc(x, 0.8)\n assert_series_equal(result, expected, obj=\"Exponentially weighted standard deviation weight 1\")\n\n\ndef test_var():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n\n result = var(x)\n expected = pd.Series([np.nan, 0.500000, 0.333333, 0.916667, 0.800000, 2.800000], index=dates)\n assert_series_equal(result, expected, obj=\"var\", check_less_precise=True)\n\n result = var(x, Window(2, 0))\n expected = pd.Series([np.nan, 0.5, 0.5, 2.0, 2.0, 4.5], index=dates)\n assert_series_equal(result, expected, obj=\"var window 2\", check_less_precise=True)\n\n result = var(x, Window('1w', 0))\n expected = pd.Series([np.nan, 0.500000, 0.333333, 0.916666, 0.800000, 3.500000], index=dates)\n assert_series_equal(result, expected, obj=\"var window 1w\", check_less_precise=True)\n\n\ndef test_cov():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n y = pd.Series([3.5, 1.8, 2.9, 1.2, 3.1, 5.9], index=dates)\n\n result = cov(x, y)\n expected = pd.Series([np.nan, 0.850000, 0.466667, 0.950000, 0.825000, 2.700000], index=dates)\n assert_series_equal(result, expected, obj=\"cov\", check_less_precise=True)\n\n result = cov(x, y, Window(2, 0))\n expected = pd.Series([np.nan, 0.850000, 0.549999, 1.7000000, 1.900000, 4.200000], index=dates)\n assert_series_equal(result, expected, obj=\"cov window 2\", check_less_precise=True)\n\n result = cov(x, y, Window('1w', 0))\n expected = pd.Series([np.nan, 0.850000, 0.466667, 0.950000, 0.825000, 3.375000], index=dates)\n assert_series_equal(result, expected, obj=\"cov window 1w\", check_less_precise=True)\n\n\ndef test_zscores():\n with pytest.raises(MqValueError):\n zscores(pd.Series(range(5)), \"2d\")\n\n assert_series_equal(zscores(pd.Series()), pd.Series())\n assert_series_equal(zscores(pd.Series(), 1), pd.Series())\n\n assert_series_equal(zscores(pd.Series([1])), pd.Series([0.0]))\n assert_series_equal(zscores(pd.Series([1]), Window(1, 0)), pd.Series([0.0]))\n\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n\n result = zscores(x)\n expected = pd.Series([0.000000, -0.597614, 0.000000, -1.195229, 0.000000, 1.792843], index=dates)\n assert_series_equal(result, expected, obj=\"z-score\", check_less_precise=True)\n\n assert_series_equal(result, (x - x.mean()) / x.std(), obj=\"full series zscore\")\n\n result = zscores(x, Window(2, 0))\n expected = pd.Series([0.0, -0.707107, 0.707107, -0.707107, 0.707107, 0.707107], index=dates)\n assert_series_equal(result, expected, obj=\"z-score window 2\", check_less_precise=True)\n assert_series_equal(zscores(x, Window(5, 5)), zscores(x, 5))\n\n result = zscores(x, Window('1w', 0))\n expected = pd.Series([0.0, -0.707106, 0.577350, -1.305582, 0.670820, 1.603567], index=dates)\n assert_series_equal(result, expected, obj=\"z-score window 1w\", check_less_precise=True)\n\n result = zscores(x, '1w')\n expected = pd.Series([1.603567], index=dates[-1:])\n assert_series_equal(result, expected, obj='z-score window string 1w', check_less_precise=True)\n\n result = zscores(x, '1m')\n expected = pd.Series()\n assert_series_equal(result, expected, obj=\"z-score window too large\", check_less_precise=True)\n\n\ndef test_winsorize():\n assert_series_equal(winsorize(pd.Series()), pd.Series())\n\n x = generate_series(10000)\n r = returns(x)\n\n limit = 1.0\n\n mu = r.mean()\n sigma = r.std()\n\n b_upper = mu + sigma * limit * 1.001\n b_lower = mu - sigma * limit * 1.001\n\n assert (True in r.ge(b_upper).values)\n assert (True in r.le(b_lower).values)\n\n wr = winsorize(r, limit)\n\n assert (True not in wr.ge(b_upper).values)\n assert (True not in wr.le(b_lower).values)\n\n limit = 2.0\n\n mu = r.mean()\n sigma = r.std()\n\n b_upper = mu + sigma * limit * 1.001\n b_lower = mu - sigma * limit * 1.001\n\n assert (True in r.ge(b_upper).values)\n assert (True in r.le(b_lower).values)\n\n wr = winsorize(r, limit)\n\n assert (True not in wr.ge(b_upper).values)\n assert (True not in wr.le(b_lower).values)\n\n\ndef test_percentiles():\n dates = [\n date(2019, 1, 1),\n date(2019, 1, 2),\n date(2019, 1, 3),\n date(2019, 1, 4),\n date(2019, 1, 7),\n date(2019, 1, 8),\n ]\n\n x = pd.Series([3.0, 2.0, 3.0, 1.0, 3.0, 6.0], index=dates)\n y = pd.Series([3.5, 1.8, 2.9, 1.2, 3.1, 6.0], index=dates)\n\n assert_series_equal(percentiles(pd.Series([]), y), pd.Series([]))\n assert_series_equal(percentiles(x, pd.Series([])), pd.Series())\n assert_series_equal(percentiles(x, y, Window(7, 0)), pd.Series())\n\n result = percentiles(x, y, 2)\n expected = pd.Series([50.0, 50.0, 100.0, 75.0], index=dates[2:])\n assert_series_equal(result, expected, obj=\"percentiles with window length 2\")\n\n result = percentiles(x, y, Window(2, 0))\n expected = pd.Series([100.0, 0.0, 50.0, 50.0, 100.0, 75.0], index=dates)\n assert_series_equal(result, expected, obj=\"percentiles with window 2 and ramp 0\")\n\n result = percentiles(x, y, Window('1w', 0))\n expected = pd.Series([100.0, 0.0, 33.333333, 25.0, 100.0, 90.0], index=dates)\n assert_series_equal(result, expected, obj=\"percentiles with window 1w\")\n\n result = percentiles(x, y, Window('1w', '3d'))\n expected = pd.Series([25.0, 100.0, 90.0], index=dates[3:])\n assert_series_equal(result, expected, obj=\"percentiles with window 1w and ramp 3d\")\n\n result = percentiles(x)\n expected = pd.Series([50.0, 25.0, 66.667, 12.500, 70.0, 91.667], index=dates)\n assert_series_equal(result, expected, obj=\"percentiles over historical values\", check_less_precise=True)\n\n result = percentiles(x, y)\n expected = pd.Series([100.0, 0.0, 33.333, 25.0, 100.0, 91.667], index=dates)\n assert_series_equal(result, expected, obj=\"percentiles without window length\", check_less_precise=True)\n\n with pytest.raises(ValueError):\n percentiles(x, pd.Series(), Window(6, 1))\n\n\ndef test_percentile():\n with pytest.raises(MqError):\n percentile(pd.Series(), -1)\n with pytest.raises(MqError):\n percentile(pd.Series(), 100.1)\n with pytest.raises(MqTypeError):\n percentile(pd.Series(range(5), index=range(5)), 90, \"2d\")\n\n for n in range(0, 101, 5):\n assert percentile(pd.Series(x * 10 for x in range(0, 11)), n) == n\n\n x = percentile(pd.Series(x for x in range(0, 5)), 50, 2)\n assert_series_equal(x, pd.Series([1.5, 2.5, 3.5], index=pd.RangeIndex(2, 5)))\n\n x = percentile(pd.Series(), 90, \"1d\")\n assert_series_equal(x, pd.Series(), obj=\"Percentile with empty series\")\n\n\ndef test_percentile_str():\n today = datetime.datetime.now()\n days = pd.date_range(today, periods=12, freq='D')\n start = pd.Series([29, 56, 82, 13, 35, 53, 25, 23, 21, 12, 15, 9], index=days)\n actual = percentile(start, 2, '10d')\n expected = pd.Series([12.18, 9.54], index=pd.date_range(today + datetime.timedelta(days=10), periods=2, freq='D'))\n assert_series_equal(actual, expected)\n\n actual = percentile(start, 50, '1w')\n expected = percentile(start, 50, 7)\n assert_series_equal(actual, expected)\n\n\ndef test_regression():\n x1 = pd.Series([0.0, 1.0, 4.0, 9.0, 16.0, 25.0, np.nan], index=pd.date_range('2019-1-1', periods=7), name='x1')\n x2 = pd.Series([0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0], index=pd.date_range('2019-1-1', periods=8))\n y = pd.Series([10.0, 14.0, 20.0, 28.0, 38.0, 50.0, 60.0], index=pd.date_range('2019-1-1', periods=7))\n\n with pytest.raises(MqTypeError):\n LinearRegression([x1, x2], y, 1)\n\n regression = LinearRegression([x1, x2], y, True)\n\n np.testing.assert_almost_equal(regression.coefficient(0), 10.0)\n np.testing.assert_almost_equal(regression.coefficient(1), 1.0)\n np.testing.assert_almost_equal(regression.coefficient(2), 3.0)\n\n np.testing.assert_almost_equal(regression.r_squared(), 1.0)\n\n expected = pd.Series([10.0, 14.0, 20.0, 28.0, 38.0, 50.0], index=pd.date_range('2019-1-1', periods=6))\n assert_series_equal(regression.fitted_values(), expected)\n\n dates_predict = [date(2019, 2, 1), date(2019, 2, 2)]\n predicted = regression.predict([pd.Series([2.0, 3.0], index=dates_predict),\n pd.Series([6.0, 7.0], index=dates_predict)])\n expected = pd.Series([30.0, 34.0], index=dates_predict)\n assert_series_equal(predicted, expected)\n\n np.testing.assert_almost_equal(regression.standard_deviation_of_errors(), 0)\n\n\ndef test_rolling_linear_regression():\n x1 = pd.Series([0.0, 1.0, 4.0, 9.0, 16.0, 25.0, np.nan], index=pd.date_range('2019-1-1', periods=7), name='x1')\n x2 = pd.Series([0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0], index=pd.date_range('2019-1-1', periods=8))\n y = pd.Series([10.0, 14.0, 20.0, 28.0, 28.0, 40.0, 60.0], index=pd.date_range('2019-1-1', periods=7))\n\n with pytest.raises(MqValueError):\n RollingLinearRegression([x1, x2], y, 3, True)\n\n with pytest.raises(MqTypeError):\n RollingLinearRegression([x1, x2], y, 4, 1)\n\n regression = RollingLinearRegression([x1, x2], y, 4, True)\n\n expected = pd.Series([np.nan, np.nan, np.nan, 10.0, 2.5, 19.0], index=pd.date_range('2019-1-1', periods=6))\n assert_series_equal(regression.coefficient(0), expected, check_names=False)\n\n expected = pd.Series([np.nan, np.nan, np.nan, 1.0, -1.5, 1.0], index=pd.date_range('2019-1-1', periods=6))\n assert_series_equal(regression.coefficient(1), expected, check_names=False)\n\n expected = pd.Series([np.nan, np.nan, np.nan, 3.0, 12.5, -1.0], index=pd.date_range('2019-1-1', periods=6))\n assert_series_equal(regression.coefficient(2), expected, check_names=False)\n\n expected = pd.Series([np.nan, np.nan, np.nan, 1.0, 0.964029, 0.901961], index=pd.date_range('2019-1-1', periods=6))\n assert_series_equal(regression.r_squared(), expected, check_names=False)\n\n expected = pd.Series([np.nan, np.nan, np.nan, 28.0, 28.5, 39.0], index=pd.date_range('2019-1-1', periods=6))\n assert_series_equal(regression.fitted_values(), expected, check_names=False, check_less_precise=True)\n\n expected = pd.Series([np.nan, np.nan, np.nan, 0.0, 2.236068, 4.472136], index=pd.date_range('2019-1-1', periods=6))\n assert_series_equal(regression.standard_deviation_of_errors(), expected, check_names=False)\n\n\ndef test_sir_model():\n n = 1000\n d = 100\n i0 = 100\n r0 = 0\n s0 = n\n beta = 0.5\n gamma = 0.25\n\n t = np.linspace(0, d, d)\n\n def deriv(y, t_loc, n_loc, beta_loc, gamma_loc):\n s, i, r = y\n dsdt = -beta_loc * s * i / n_loc\n didt = beta_loc * s * i / n_loc - gamma_loc * i\n drdt = gamma_loc * i\n\n return dsdt, didt, drdt\n\n def get_series(beta_loc, gamma_loc):\n # Initial conditions vector\n y0 = s0, i0, r0\n # Integrate the SIR equations over the time grid, t.\n ret = odeint(deriv, y0, t, args=(n, beta_loc, gamma_loc))\n s, i, r = ret.T\n\n dr = pd.date_range(dt.date.today(), dt.date.today() + dt.timedelta(days=d - 1))\n return pd.Series(s, dr), pd.Series(i, dr), pd.Series(r, dr)\n\n (s, i, r) = get_series(beta, gamma)\n\n sir = SIRModel(beta, gamma, s, i, r, n)\n\n assert abs(sir.beta() - beta) < 0.01\n assert abs(sir.gamma() - gamma) < 0.01\n\n beta = 0.4\n gamma = 0.25\n\n (s, i, r) = get_series(0.4, 0.25)\n\n s_predict = sir.predict_s()\n i_predict = sir.predict_i()\n r_predict = sir.predict_r()\n\n assert s_predict.size == d\n assert i_predict.size == d\n assert r_predict.size == d\n\n with pytest.raises(MqTypeError):\n SIRModel(beta, gamma, s, i, r, n, fit=0)\n\n sir = SIRModel(beta, gamma, s, i, r, n, fit=False)\n\n assert sir.beta() == beta\n assert sir.gamma() == gamma\n\n sir1 = SIRModel(beta, gamma, s, i, r, n, fit=False)\n\n with DataContext(end=dt.date.today() + dt.timedelta(days=d - 1)):\n sir2 = SIRModel(beta, gamma, s[0], i, r[0], n, fit=False)\n\n assert sir1.beta() == sir1.beta()\n assert sir2.gamma() == sir2.gamma()\n assert (sir1.predict_i() == sir2.predict_i()).all()\n assert (sir1.predict_r() == sir2.predict_r()).all()\n assert (sir1.predict_s() == sir2.predict_s()).all()\n\n\ndef test_seir_model():\n n = 1000\n d = 100\n e0 = 1\n i0 = 1\n r0 = 0\n s0 = n\n beta = 0.5\n gamma = 0.2\n sigma = 1\n\n t = np.linspace(0, d, d)\n\n def deriv(y, t_loc, n_loc, beta_loc, gamma_loc, sigma_loc):\n s, e, i, r = y\n dsdt = -beta_loc * s * i / n_loc\n dedt = beta_loc * s * i / n_loc - sigma_loc * e\n didt = sigma_loc * e - gamma * i\n drdt = gamma_loc * i\n\n return dsdt, dedt, didt, drdt\n\n def get_series(beta_loc, gamma_loc, sigma_loc):\n # Initial conditions vector\n y0 = s0, e0, i0, r0\n # Integrate the SEIR equations over the time grid, t.\n ret = odeint(deriv, y0, t, args=(n, beta_loc, gamma_loc, sigma_loc))\n s, e, i, r = ret.T\n\n dr = pd.date_range(dt.date.today(), dt.date.today() + dt.timedelta(days=d - 1))\n return pd.Series(s, dr), pd.Series(e, dr), pd.Series(i, dr), pd.Series(r, dr)\n\n (s, e, i, r) = get_series(beta, gamma, sigma)\n\n seir = SEIRModel(beta, gamma, sigma, s, e, i, r, n)\n\n assert abs(seir.beta() - beta) < 0.01\n assert abs(seir.gamma() - gamma) < 0.01\n assert abs(seir.sigma() - sigma) < 0.01\n\n s_predict = seir.predict_s()\n e_predict = seir.predict_e()\n i_predict = seir.predict_i()\n r_predict = seir.predict_i()\n\n assert s_predict.size == d\n assert e_predict.size == d\n assert i_predict.size == d\n assert r_predict.size == d\n\n with pytest.raises(MqTypeError):\n SEIRModel(beta, gamma, sigma, s, e, i, r, n, fit=0)\n\n seir = SEIRModel(beta, gamma, sigma, s, e, i, r, n, fit=False)\n\n assert seir.beta() == beta\n assert seir.gamma() == gamma\n assert seir.sigma() == sigma\n\n seir1 = SEIRModel(beta, gamma, sigma, s, e, i, r, n, fit=False)\n\n with DataContext(end=dt.date.today() + dt.timedelta(days=d - 1)):\n seir2 = SEIRModel(beta, gamma, sigma, s[0], e[0], i, r[0], n, fit=False)\n\n assert seir1.beta() == seir1.beta()\n assert seir2.gamma() == seir2.gamma()\n assert seir2.sigma() == seir2.sigma()\n assert (seir1.predict_i() == seir2.predict_i()).all()\n assert (seir1.predict_e() == seir2.predict_e()).all()\n assert (seir1.predict_r() == seir2.predict_r()).all()\n assert (seir1.predict_s() == seir2.predict_s()).all()\n\n\nif __name__ == \"__main__\":\n pytest.main(args=[\"test_statistics.py\"])\n" ]
[ [ "pandas.testing.assert_series_equal" ], [ "pandas.testing.assert_series_equal", "scipy.integrate.odeint" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "0.16", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "1.3", "1.8" ], "tensorflow": [] } ]
Fa-Li/E3SM
[ "a91995093ec6fc0dd6e50114f3c70b5fb64de0f0", "a91995093ec6fc0dd6e50114f3c70b5fb64de0f0", "a91995093ec6fc0dd6e50114f3c70b5fb64de0f0" ]
[ "components/mpas-seaice/testing_and_setup/forcing/create_ocean_forcing.py", "components/mpas-seaice/testing_and_setup/testcases/square/operators_strain_stress_divergence/strain_stress_divergence_map.py", "components/mpas-seaice/testing_and_setup/testcases/spherical_operators/strain/average_variational_strains.py" ]
[ "from __future__ import print_function\nfrom netCDF4 import Dataset\nimport netCDF4\nimport numpy as np\nimport os\nimport sys\nimport ConfigParser\nimport math\nfrom scipy.interpolate import griddata\nfrom create_forcing import create_scrip_grid_file, get_mpas_grid_info, create_scrip_file_MPAS, write_scrip_in_file, create_output_times, get_remapping_data\n\n#-------------------------------------------------------------------------------\n\ndef latlon_to_xyz(lat, lon):\n\n x = math.cos(lat) * math.cos(lon)\n y = math.cos(lat) * math.sin(lon)\n z = math.sin(lat)\n\n return x, y, z\n\n#-------------------------------------------------------------------------------\n\ndef xyz_to_latlon(x, y, z):\n\n lon = 0.0\n if (x != 0.0 or y != 0.0): lon = math.atan2(y,x)\n\n lat = math.asin(z / math.sqrt(x*x + y*y + z*z))\n\n return lat, lon\n\n#-------------------------------------------------------------------------------\n\ndef create_scrip_file_gx1(filenameScrip, filenameGx1Grid):\n\n filenameGx1Grid = \"/Users/akt/Work/Forcing/gx1/grid_info/global_gx1.nc\"\n fileIn = Dataset(filenameGx1Grid,\"r\")\n\n nx = len(fileIn.dimensions[\"nx\"])\n ny = len(fileIn.dimensions[\"ny\"])\n\n ULONin = fileIn.variables[\"ULON\"][:]\n ULATin = fileIn.variables[\"ULAT\"][:]\n\n KMT = fileIn.variables[\"KMT\"][:]\n\n fileIn.close()\n\n nCells = nx * ny\n gridDims = [nx, ny]\n\n gridImask = np.ones(nCells,dtype=\"i\")\n\n ULAT = np.zeros((ny+1,nx+1))\n ULON = np.zeros((ny+1,nx+1))\n\n ULAT[1:,1:] = ULATin[:,:]\n ULON[1:,1:] = ULONin[:,:]\n\n ULAT[:,0] = ULAT[:,-1]\n ULON[:,0] = ULON[:,-1]\n\n ULON[0,:] = ULON[1,:]\n ULAT[0,:] = ULAT[1,:] - math.pi / 180.0\n\n cornerLat = np.zeros((4,nCells))\n cornerLon = np.zeros((4,nCells))\n\n for i in range(0,nx):\n for j in range(0,ny):\n\n ii = i + 1\n jj = j + 1\n\n iCell = ii + nx * (jj-1) - 1\n\n i1 = ii-1 ; j1 = jj-1\n i2 = ii ; j2 = jj-1\n i3 = ii ; j3 = jj\n i4 = ii-1 ; j4 = jj\n\n cornerLat[0,iCell] = ULAT[j1,i1]\n cornerLat[1,iCell] = ULAT[j2,i2]\n cornerLat[2,iCell] = ULAT[j3,i3]\n cornerLat[3,iCell] = ULAT[j4,i4]\n\n cornerLon[0,iCell] = ULON[j1,i1]\n cornerLon[1,iCell] = ULON[j2,i2]\n cornerLon[2,iCell] = ULON[j3,i3]\n cornerLon[3,iCell] = ULON[j4,i4]\n\n centerLat = np.zeros(nCells)\n centerLon = np.zeros(nCells)\n\n for i in range(0,nx):\n for j in range(0,ny):\n\n ii = i + 1\n jj = j + 1\n\n iCell = ii + nx * (jj-1) - 1\n\n x1,y1,z1 = latlon_to_xyz(cornerLat[0,iCell],cornerLon[0,iCell])\n x2,y2,z2 = latlon_to_xyz(cornerLat[1,iCell],cornerLon[1,iCell])\n x3,y3,z3 = latlon_to_xyz(cornerLat[2,iCell],cornerLon[2,iCell])\n x4,y4,z4 = latlon_to_xyz(cornerLat[3,iCell],cornerLon[3,iCell])\n\n x0 = 0.25 * (x1 + x2 + x3 + x4)\n y0 = 0.25 * (y1 + y2 + y3 + y4)\n z0 = 0.25 * (z1 + z2 + z3 + z4)\n\n centerLat[iCell], centerLon[iCell] = xyz_to_latlon(x0, y0, z0)\n\n create_scrip_grid_file(filenameScrip, nCells, 4, 2, gridDims, centerLat, centerLon, gridImask, cornerLat, cornerLon, \"gx1\")\n\n#-------------------------------------------------------------------------------\n\ndef fill_array(arrayIn):\n\n nTimes = arrayIn.shape[0]\n nx = arrayIn.shape[1]\n ny = arrayIn.shape[2]\n\n arrayOut = np.zeros((nTimes,nx,ny))\n arrayOut[:] = arrayIn[:]\n\n grid_x, grid_y = np.mgrid[0:nx, 0:ny]\n\n for iTime in range(0,nTimes):\n\n array = np.zeros((nx,3*ny))\n\n array[:, 0: ny] = arrayIn[iTime,:,:]\n array[:, ny:2*ny] = arrayIn[iTime,:,:]\n array[:,2*ny:3*ny] = arrayIn[iTime,:,:]\n\n pointsGood = []\n valuesGood = []\n\n pointsBad = []\n\n for i in range(0,nx):\n for j in range(0,ny):\n if (array[i,j] > -900.0):\n pointsGood.append((i,j))\n valuesGood.append(array[i,j])\n else:\n pointsBad.append((i,j))\n\n pointsGood = np.array(pointsGood)\n valuesGood = np.array(valuesGood)\n pointsBad = np.array(pointsBad)\n\n valuesBad = griddata(pointsGood, valuesGood, (grid_x, grid_y), method='nearest')\n\n for iBad in range(0,pointsBad.shape[0]):\n i = pointsBad[iBad,0]\n j = pointsBad[iBad,1]\n arrayOut[iTime,i,j] = valuesBad[i,j]\n\n return arrayOut\n\n#-------------------------------------------------------------------------------\n\ndef interpolate_array(nCells, remapMatrix, arrayIn):\n\n arrayOut = np.zeros((12,nCells))\n\n for iTime in range(0,12):\n\n arrayInTime = arrayIn[iTime,:,:].flatten()\n\n arrayOut[iTime,:] = remapMatrix.dot(arrayInTime)\n\n return arrayOut\n\n#-------------------------------------------------------------------------------\n\ndef create_forcing(\\\n filenameIn, \\\n filenameOut, \\\n nCells, \\\n remapMatrix):\n\n fileIn = Dataset(filenameIn,\"r\")\n\n fileOut = Dataset(filenameOut,\"w\",format=\"NETCDF3_CLASSIC\")\n\n fileOut.createDimension(\"nCells\",nCells)\n fileOut.createDimension(\"StrLen\",64)\n fileOut.createDimension(\"Time\",None)\n\n # time\n xtimes = create_output_times(12, 0)\n varXtime = fileOut.createVariable(\"xtime\",\"c\",dimensions=[\"Time\",\"StrLen\"])\n for iTime in range(0,12):\n varXtime[iTime,0:19] = netCDF4.stringtochar(np.array(xtimes[iTime], 'S19'))\n varXtime[iTime,19:] = \" \"*45\n\n varSST = fileOut.createVariable(\"seaSurfaceTemperature\", \"d\",dimensions=[\"Time\",\"nCells\"])\n varSSS = fileOut.createVariable(\"seaSurfaceSalinity\", \"d\",dimensions=[\"Time\",\"nCells\"])\n varU = fileOut.createVariable(\"uOceanVelocity\", \"d\",dimensions=[\"Time\",\"nCells\"])\n varV = fileOut.createVariable(\"vOceanVelocity\", \"d\",dimensions=[\"Time\",\"nCells\"])\n varDhdx = fileOut.createVariable(\"seaSurfaceTiltU\", \"d\",dimensions=[\"Time\",\"nCells\"])\n varDhdy = fileOut.createVariable(\"seaSurfaceTiltV\", \"d\",dimensions=[\"Time\",\"nCells\"])\n varHblt = fileOut.createVariable(\"oceanMixedLayerDepth\", \"d\",dimensions=[\"Time\",\"nCells\"])\n varQdp = fileOut.createVariable(\"oceanHeatFluxConvergence\",\"d\",dimensions=[\"Time\",\"nCells\"])\n\n print(\"Interpolate seaSurfaceTemperature\")\n arrayIn = fileIn.variables[\"T\"][:]\n arrayIn = fill_array(arrayIn)\n arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)\n varSST[:] = arrayOut[:]\n\n print(\"Interpolate seaSurfaceSalinity\")\n arrayIn = fileIn.variables[\"S\"][:]\n arrayIn = fill_array(arrayIn)\n arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)\n varSSS[:] = arrayOut[:]\n\n print(\"Interpolate uOceanVelocity\")\n arrayIn = fileIn.variables[\"U\"][:,0,:,:]\n arrayIn = fill_array(arrayIn)\n arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)\n varU[:] = arrayOut[:]\n\n print(\"Interpolate vOceanVelocity\")\n arrayIn = fileIn.variables[\"V\"][:,0,:,:]\n arrayIn = fill_array(arrayIn)\n arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)\n varV[:] = arrayOut[:]\n\n print(\"Interpolate seaSurfaceTiltU\")\n arrayIn = fileIn.variables[\"dhdx\"][:]\n arrayIn = fill_array(arrayIn)\n arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)\n varDhdx[:] = arrayOut[:]\n\n print(\"Interpolate seaSurfaceTiltV\")\n arrayIn = fileIn.variables[\"dhdy\"][:]\n arrayIn = fill_array(arrayIn)\n arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)\n varDhdy[:] = arrayOut[:]\n\n print(\"Interpolate oceanMixedLayerDepth\")\n arrayIn = fileIn.variables[\"hblt\"][:]\n arrayIn = fill_array(arrayIn)\n arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)\n varHblt[:] = arrayOut[:]\n\n print(\"Interpolate oceanHeatFluxConvergence\")\n arrayIn = fileIn.variables[\"qdp\"][:]\n arrayIn = fill_array(arrayIn)\n arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)\n varQdp[:] = arrayOut[:]\n\n fileIn.close()\n fileOut.close()\n\n#-------------------------------------------------------------------------------\n\ndef perform_remapping(\\\n filenameMPASGrid, \\\n filenameGx1Grid, \\\n filenameGx1OceanMixed, \\\n filenameMPASOceanMixed, \\\n scripDir):\n\n # create MPAS scrip grid file\n print(\"create_scrip_file_MPAS\")\n scripGridFilename = \"remap_grid_MPAS_tmp.nc\"\n create_scrip_file_MPAS(filenameMPASGrid, scripGridFilename)\n\n # create gx1 scrip grid file\n print(\"create_scrip_file_gx1\")\n scripGx1Filename = \"remap_grid_gx1_tmp.nc\"\n create_scrip_file_gx1(scripGx1Filename, filenameGx1Grid)\n\n # create input scrip file\n print(\"write_scrip_in_file\")\n write_scrip_in_file(\"gx1\")\n\n # run scrip to generate weights\n print(\"SCRIP\")\n cmd = scripDir + \"/scrip\"\n os.system(cmd)\n\n # get remapping weights\n print(\"get_remapping_data\")\n filenameRemapping = \"remap_gx1_to_MPAS_tmp.nc\"\n remapMatrix, dstGridSize = get_remapping_data(filenameRemapping)\n\n print(\"create_forcing ocean climatology\")\n # combined ocean climatology\n create_forcing(\\\n filenameGx1OceanMixed, \\\n filenameMPASOceanMixed, \\\n dstGridSize, \\\n remapMatrix)\n\n#-------------------------------------------------------------------------------\n\n'''\ncreate_ocean_forcing.py\n=======================\n\nUsage\n-----\n\nThis script creates ocean forcing using CESM output.\n\nUsage: python create_ocean_forcing.py configFilename\n\nwhere configFilename is a python config file with the following example format:\n\n[forcing_generation]\nfilenameMPASGrid = /location/of/MPAS/grid\nfilenameGx1Grid = /location/of/gx1/grid\nfilenameGx1OceanMixed = /location/of/gx1/ocean_mixed_file\nfilenameMPASOceanMixed = /location/of/output/ocean_mixed_file\nscripDir = /location/of/SCRIP/executable\n\nSCRIP\n-----\n\nThis script requires the SCRIP package to be installed.\nSCRIP is a software package which computes addresses and weights for remapping\nand interpolating fields between grids in spherical coordinates. It can be\nobtained from https://github.com/SCRIP-Project/SCRIP\n\ngx1 input data\n--------------\n\nThis script requires a gx1 grid file and ocean mixed file as input. These can be\nobtained from:\nhttps://web.lcrc.anl.gov/public/e3sm/mpas_standalonedata/mpas-seaice/forcing/\nMPAS-Seaice_clim_data.tar.gz\n'''\n\nif (len(sys.argv) != 2):\n print(\"Usage: python create_ocean_forcing.py configFilename\")\n sys.exit()\n\nconfig = ConfigParser.ConfigParser()\nconfig.read(sys.argv[1])\n\nfilenameMPASGrid = config.get('forcing_generation','filenameMPASGrid')\nfilenameGx1Grid = config.get('forcing_generation','filenameGx1Grid')\nfilenameGx1OceanMixed = config.get('forcing_generation','filenameGx1OceanMixed')\nfilenameMPASOceanMixed = config.get('forcing_generation','filenameMPASOceanMixed')\nscripDir = config.get('forcing_generation','scripDir')\n\nperform_remapping(\\\n filenameMPASGrid, \\\n filenameGx1Grid, \\\n filenameGx1OceanMixed, \\\n filenameMPASOceanMixed, \\\n scripDir)\n", "from netCDF4 import Dataset\nimport numpy as np\nimport matplotlib as mpl\nfrom matplotlib.patches import Polygon\nfrom matplotlib.collections import PatchCollection\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.axes_grid1 import make_axes_locatable\nimport math\n\ndegreesToRadians = math.pi / 180.0\n\n#---------------------------------------------------------------\n\ndef cm2inch(value):\n return value/2.54\n\n#---------------------------------------------------------------\n\ndef get_mpas_patch_collection(nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertexx, latVertex, mpasArray, cmap, vmin, vmax, minX, maxX, minY, maxY):\n\n patches = []\n colours = []\n\n minval = 1.0e30\n maxval = -1.0e30\n\n for iVertex in range(0,nVertices):\n\n polygonVertices = []\n\n useVertex = False\n for iCellOnVertex in range(0,vertexDegree):\n\n iCell = cellsOnVertex[iVertex,iCellOnVertex]\n\n polygonVertices.append((xCell[iCell],yCell[iCell]))\n\n if (xCell[iCell] >= minX and xCell[iCell] <= maxX and \\\n yCell[iCell] >= minY and yCell[iCell] <= maxY):\n useVertex = True\n\n if (useVertex and (useVertexx[iVertex] == 1)):\n polygon = Polygon(polygonVertices)\n patches.append(polygon)\n\n colours.append(mpasArray[iVertex])\n\n minval = min(minval,mpasArray[iVertex])\n maxval = max(maxval,mpasArray[iVertex])\n\n patchCollection = PatchCollection(patches, cmap=cmap, rasterized=True)\n patchCollection.set_array(np.array(colours))\n patchCollection.set_linewidth(0)\n\n patchCollection.set_clim(vmin=vmin,vmax=vmax)\n\n return patchCollection, minval, maxval\n\n#---------------------------------------------------------------\n\ndef plot_subfigure(axes, fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, array, vmin, vmax, minX, maxX, minY, maxY, sciNote=False, diffPlot=False, title=None, subfigureLabel=None, colorbar=True, unityBar=False):\n\n if (not diffPlot):\n #colourMap = mpl.cm.jet\n colourMap = mpl.cm.seismic\n else:\n #colourMap = mpl.cm.RdBu\n colourMap = mpl.cm.seismic\n\n patchCollection, minArray, maxArray = get_mpas_patch_collection(nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, array, colourMap, vmin, vmax, minX, maxX, minY, maxY)\n axes.add_collection(patchCollection)\n axes.set_ylim([minY,maxY])\n axes.set_xlim([minX,maxX])\n axes.set_xticks([])\n axes.set_yticks([])\n axes.set_aspect('equal', adjustable='box')\n\n if (title != None):\n axes.set_title(title, fontsize=8)\n\n if (subfigureLabel != None):\n axes.text(0.02, 0.89, subfigureLabel, verticalalignment='bottom', horizontalalignment='left',transform=axes.transAxes, fontsize=8)\n\n if (colorbar):\n divider = make_axes_locatable(axes)\n cax = divider.append_axes(\"right\", size=\"5%\", pad=0.05)\n cb = fig.colorbar(patchCollection,cax=cax)\n if (unityBar):\n cb.ax.set_yticklabels(['-1.0','-0.5','0.0','0.5','1.0'])\n if (sciNote):\n cb.formatter.set_powerlimits((0, 0))\n cb.update_ticks()\n\n#---------------------------------------------------------------\n\ndef strain_stress_divergence_map():\n\n # grid quad\n fileGrid = Dataset(\"grid_hex_0082x0094.nc\",\"r\")\n\n nCells = len(fileGrid.dimensions[\"nCells\"])\n nVertices = len(fileGrid.dimensions[\"nVertices\"])\n vertexDegree = len(fileGrid.dimensions[\"vertexDegree\"])\n\n vertexDegreeArr = np.zeros(nVertices,dtype=\"i\")\n vertexDegreeArr[:] = vertexDegree\n\n nEdgesOnCell = fileGrid.variables[\"nEdgesOnCell\"][:]\n\n cellsOnVertex = fileGrid.variables[\"cellsOnVertex\"][:]\n cellsOnVertex[:] = cellsOnVertex[:] - 1\n\n verticesOnCell = fileGrid.variables[\"verticesOnCell\"][:]\n verticesOnCell[:] = verticesOnCell[:] - 1\n\n cellsOnCell = fileGrid.variables[\"cellsOnCell\"][:]\n cellsOnCell[:] = cellsOnCell[:] - 1\n\n latVertex = fileGrid.variables[\"latVertex\"][:]\n latCell = fileGrid.variables[\"latCell\"][:]\n\n xCell = fileGrid.variables[\"xCell\"][:]\n yCell = fileGrid.variables[\"yCell\"][:]\n zCell = fileGrid.variables[\"zCell\"][:]\n\n xVertex = fileGrid.variables[\"xVertex\"][:]\n yVertex = fileGrid.variables[\"yVertex\"][:]\n zVertex = fileGrid.variables[\"zVertex\"][:]\n\n xMin = np.amin(xVertex)\n xMax = np.amax(xVertex)\n yMin = np.amin(yVertex)\n yMax = np.amax(yVertex)\n\n fileGrid.close()\n\n # ic hex\n fileIC = Dataset(\"ic_hex_0082x0094.nc\",\"r\")\n\n uVelocity = fileIC.variables[\"uVelocity\"][:]\n vVelocity = fileIC.variables[\"vVelocity\"][:]\n\n stressDivergenceUAnalytical = fileIC.variables[\"stressDivergenceUAnalytical\"][:]\n stressDivergenceVAnalytical = fileIC.variables[\"stressDivergenceVAnalytical\"][:]\n\n print(\"Stress divergence: \",\n np.amin(stressDivergenceUAnalytical), np.amax(stressDivergenceUAnalytical),\n np.amin(stressDivergenceVAnalytical), np.amax(stressDivergenceVAnalytical))\n\n fileIC.close()\n\n # Wachspress\n fileWach = Dataset(\"./output_hex_wachsavg_0082x0094/output.2000.nc\",\"r\")\n\n interiorCell = fileWach.variables[\"interiorCell\"][0,:]\n\n stressDivergenceUWach = fileWach.variables[\"stressDivergenceU\"][0,:]\n stressDivergenceVWach = fileWach.variables[\"stressDivergenceV\"][0,:]\n\n stressDivergenceUWachDiff = (stressDivergenceUWach - stressDivergenceUAnalytical)\n stressDivergenceVWachDiff = (stressDivergenceVWach - stressDivergenceVAnalytical)\n\n print(\"Wachs: \",\n np.amin(stressDivergenceUWachDiff), np.amax(stressDivergenceUWachDiff),\n np.amin(stressDivergenceVWachDiff), np.amax(stressDivergenceVWachDiff))\n\n fileWach.close()\n\n useVertex = np.ones(nVertices,dtype=\"i\")\n for iCell in range(0,nCells):\n if (interiorCell[iCell] == 0):\n for iCellOnCell in range(0,nEdgesOnCell[iCell]):\n iCell2 = cellsOnCell[iCell,iCellOnCell]\n if (iCell2 < nCells):\n for iVertexOnCell in range(0,nEdgesOnCell[iCell2]):\n iVertex = verticesOnCell[iCell2,iVertexOnCell]\n useVertex[iVertex] = 0\n\n # PWL\n filePWL = Dataset(\"./output_hex_pwlavg_0082x0094/output.2000.nc\",\"r\")\n\n stressDivergenceUPWL = filePWL.variables[\"stressDivergenceU\"][0,:]\n stressDivergenceVPWL = filePWL.variables[\"stressDivergenceV\"][0,:]\n\n stressDivergenceUPWLDiff = (stressDivergenceUPWL - stressDivergenceUAnalytical)\n stressDivergenceVPWLDiff = (stressDivergenceVPWL - stressDivergenceVAnalytical)\n\n print(\"PWL: \",\n np.amin(stressDivergenceUPWLDiff), np.amax(stressDivergenceUPWLDiff),\n np.amin(stressDivergenceVPWLDiff), np.amax(stressDivergenceVPWLDiff))\n\n filePWL.close()\n\n # Weak\n fileWeak = Dataset(\"./output_hex_weak_0082x0094/output.2000.nc\",\"r\")\n\n stressDivergenceUWeak = fileWeak.variables[\"stressDivergenceU\"][0,:]\n stressDivergenceVWeak = fileWeak.variables[\"stressDivergenceV\"][0,:]\n\n stressDivergenceUWeakDiff = (stressDivergenceUWeak - stressDivergenceUAnalytical)\n stressDivergenceVWeakDiff = (stressDivergenceVWeak - stressDivergenceVAnalytical)\n\n print(\"Weak: \",\n np.amin(stressDivergenceUWeakDiff), np.amax(stressDivergenceUWeakDiff),\n np.amin(stressDivergenceVWeakDiff), np.amax(stressDivergenceVWeakDiff))\n\n fileWeak.close()\n\n # Weak Wachspress\n fileWach = Dataset(\"./output_hex_weakwachs_0082x0094/output.2000.nc\",\"r\")\n\n stressDivergenceUWeakWach = fileWach.variables[\"stressDivergenceU\"][0,:]\n stressDivergenceVWeakWach = fileWach.variables[\"stressDivergenceV\"][0,:]\n\n stressDivergenceUWeakWachDiff = (stressDivergenceUWeakWach - stressDivergenceUAnalytical)\n stressDivergenceVWeakWachDiff = (stressDivergenceVWeakWach - stressDivergenceVAnalytical)\n\n print(\"Wachs: \",\n np.amin(stressDivergenceUWeakWachDiff), np.amax(stressDivergenceUWeakWachDiff),\n np.amin(stressDivergenceVWeakWachDiff), np.amax(stressDivergenceVWeakWachDiff))\n\n fileWach.close()\n\n # Weak PWL\n filePWL = Dataset(\"./output_hex_weakpwl_0082x0094/output.2000.nc\",\"r\")\n\n stressDivergenceUWeakPWL = filePWL.variables[\"stressDivergenceU\"][0,:]\n stressDivergenceVWeakPWL = filePWL.variables[\"stressDivergenceV\"][0,:]\n\n stressDivergenceUWeakPWLDiff = (stressDivergenceUWeakPWL - stressDivergenceUAnalytical)\n stressDivergenceVWeakPWLDiff = (stressDivergenceVWeakPWL - stressDivergenceVAnalytical)\n\n print(\"PWL: \",\n np.amin(stressDivergenceUWeakPWLDiff), np.amax(stressDivergenceUWeakPWLDiff),\n np.amin(stressDivergenceVWeakPWLDiff), np.amax(stressDivergenceVWeakPWLDiff))\n\n filePWL.close()\n\n\n mpl.rc('font', family='Times New Roman', size=8)\n mpl.rc('text', usetex=True)\n mpl.rcParams['axes.linewidth'] = 0.5\n\n minVelocity = -1.0\n maxVelocity = 1.0\n\n minStressDiv = -750.0\n maxStressDiv = 750.0\n\n minStressDivDiff = -40.0\n maxStressDivDiff = 40.0\n\n\n fig, axes = plt.subplots(4, 4)\n\n fig.set_size_inches(7, 6.75)\n\n plot_subfigure(axes[0,0], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, uVelocity, minVelocity, maxVelocity, xMin, xMax, yMin, yMax, \\\n False, False, r'$u^\\prime$', '(a)', False)\n plot_subfigure(axes[0,1], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, vVelocity, minVelocity, maxVelocity, xMin, xMax, yMin, yMax, \\\n False, False, r'$v^\\prime$', '(b)', False)\n plot_subfigure(axes[0,2], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceUAnalytical, minStressDiv, maxStressDiv, xMin, xMax, yMin, yMax, \\\n False, False, r'$(\\nabla \\cdot \\sigma)_{u^\\prime}$ Analytical', r'(c)$\\times20$', False)\n plot_subfigure(axes[0,3], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceVAnalytical, minStressDiv, maxStressDiv, xMin, xMax, yMin, yMax, \\\n False, False, r'$(\\nabla \\cdot \\sigma)_{v^\\prime}$ Analytical', r'(d)$\\times20$', True)\n\n plot_subfigure(axes[1,0], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceUWachDiff, minStressDivDiff, maxStressDivDiff, xMin, xMax, yMin, yMax, \\\n False, False, r'Wachs. ($u^\\prime$ direction)', '(e)', False)\n plot_subfigure(axes[1,1], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceVWachDiff, minStressDivDiff, maxStressDivDiff, xMin, xMax, yMin, yMax, \\\n False, False, r'Wachs. ($v^\\prime$ direction)', '(f)', False)\n plot_subfigure(axes[1,2], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceUPWLDiff, minStressDivDiff, maxStressDivDiff, xMin, xMax, yMin, yMax, \\\n False, False, r'PWL ($u^\\prime$ direction)', '(g)', False)\n plot_subfigure(axes[1,3], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceVPWLDiff, minStressDivDiff, maxStressDivDiff, xMin, xMax, yMin, yMax, \\\n False, False, r'PWL ($v^\\prime$ direction)', '(h)', True)\n\n plot_subfigure(axes[2,0], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceUWeakWachDiff, minStressDivDiff, maxStressDivDiff, xMin, xMax, yMin, yMax, \\\n False, False, r'Weak Wachs. ($u^\\prime$ direction)', '(i)', False)\n plot_subfigure(axes[2,1], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceVWeakWachDiff, minStressDivDiff, maxStressDivDiff, xMin, xMax, yMin, yMax, \\\n False, False, r'Weak Wachs. ($v^\\prime$ direction)', '(j)', False)\n plot_subfigure(axes[2,2], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceUWeakPWLDiff, minStressDivDiff, maxStressDivDiff, xMin, xMax, yMin, yMax, \\\n False, False, r'Weak PWL ($u^\\prime$ direction)', '(k)', False)\n plot_subfigure(axes[2,3], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceVWeakPWLDiff, minStressDivDiff, maxStressDivDiff, xMin, xMax, yMin, yMax, \\\n False, False, r'Weak PWL ($v^\\prime$ direction)', '(l)', True)\n\n plot_subfigure(axes[3,0], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceUWeakDiff, minStressDivDiff, maxStressDivDiff, xMin, xMax, yMin, yMax, \\\n False, False, r'Weak ($u^\\prime$ direction)', '(m)', False)\n plot_subfigure(axes[3,1], fig, nVertices, vertexDegree, cellsOnVertex, xCell, yCell, zCell, useVertex, latVertex, stressDivergenceVWeakDiff, minStressDivDiff, maxStressDivDiff, xMin, xMax, yMin, yMax, \\\n False, False, r'Weak ($v^\\prime$ direction)', '(n)', True)\n axes[3,2].axis('off')\n axes[3,3].axis('off')\n\n\n\n #plt.tight_layout(pad=0.5, w_pad=0.5, h_pad=0.5)\n plt.savefig(\"strain_stress_divergence_map.png\",dpi=400)\n #plt.savefig(\"strain_stress_divergence_map_3.png\", bbox_inches=\"tight\",dpi=2000)\n\n plt.clf()\n plt.cla()\n plt.close()\n\n\n\n#-------------------------------------------------------------------------------\n\nif __name__ == \"__main__\":\n\n strain_stress_divergence_map()\n", "from netCDF4 import Dataset\nimport numpy as np\n\n#-------------------------------------------------------------------------------\n\ndef average_variational_strains():\n\n gridSizes = [2562, 10242, 40962, 163842]\n\n operatorMethods = [\"wachspress\",\"pwl\",\"weakwachs\"]\n\n for operatorMethod in operatorMethods:\n\n print(\"Operator Method: \", operatorMethod)\n\n for gridSize in gridSizes:\n\n print(\" Gridsize: \", gridSize)\n \n filenameModify = \"./output_%s_%i/output.2000.nc\" %(operatorMethod, gridSize)\n fileModify = Dataset(filenameModify,\"a\")\n\n nVertices = len(fileModify.dimensions[\"nVertices\"])\n nCells = len(fileModify.dimensions[\"nCells\"])\n vertexDegree = len(fileModify.dimensions[\"vertexDegree\"])\n nTimes = len(fileModify.dimensions[\"Time\"])\n\n cellsOnVertex = fileModify.variables[\"cellsOnVertex\"][:]\n cellVerticesAtVertex = fileModify.variables[\"cellVerticesAtVertex\"][:]\n\n cellsOnVertex[:] -= 1\n cellVerticesAtVertex[:] -= 1\n\n strain11var = fileModify.variables[\"strain11var\"][:]\n strain22var = fileModify.variables[\"strain22var\"][:]\n strain12var = fileModify.variables[\"strain12var\"][:]\n\n strain11varAvg = np.zeros((nTimes,nVertices))\n strain22varAvg = np.zeros((nTimes,nVertices))\n strain12varAvg = np.zeros((nTimes,nVertices))\n\n for iTime in range(0, nTimes):\n\n for iVertex in range(0, nVertices):\n\n strain11avg = 0.0\n strain22avg = 0.0\n strain12avg = 0.0\n nCellsSum = 0\n\n for iVertexDegree in range(0,vertexDegree):\n\n iCell = cellsOnVertex[iVertex,iVertexDegree]\n\n if (iCell <= nCells-1):\n\n iVertexOnCell = cellVerticesAtVertex[iVertex,iVertexDegree]\n\n strain11avg = strain11avg + strain11var[iTime,iCell,iVertexOnCell]\n strain22avg = strain22avg + strain22var[iTime,iCell,iVertexOnCell]\n strain12avg = strain12avg + strain12var[iTime,iCell,iVertexOnCell]\n nCellsSum = nCellsSum + 1\n\n strain11varAvg[iTime,iVertex] = strain11avg / float(nCellsSum)\n strain22varAvg[iTime,iVertex] = strain22avg / float(nCellsSum)\n strain12varAvg[iTime,iVertex] = strain12avg / float(nCellsSum)\n\n #print(iTime,iVertex,strain11varAvg[iTime,iVertex],strain11avg,float(nCellsSum))\n\n try:\n var = fileModify.createVariable(\"strain11varAvg\",\"d\",dimensions=[\"Time\",\"nVertices\"])\n except:\n var = fileModify.variables[\"strain11varAvg\"]\n var[:] = strain11varAvg[:]\n\n try:\n var = fileModify.createVariable(\"strain22varAvg\",\"d\",dimensions=[\"Time\",\"nVertices\"])\n except:\n var = fileModify.variables[\"strain22varAvg\"]\n var[:] = strain22varAvg[:]\n\n try:\n var = fileModify.createVariable(\"strain12varAvg\",\"d\",dimensions=[\"Time\",\"nVertices\"])\n except:\n var = fileModify.variables[\"strain12varAvg\"]\n var[:] = strain12varAvg[:]\n\n fileModify.close()\n\n#-------------------------------------------------------------------------------\n\nif __name__ == \"__main__\":\n\n average_variational_strains()\n" ]
[ [ "numpy.array", "numpy.zeros", "scipy.interpolate.griddata", "numpy.ones" ], [ "numpy.amax", "matplotlib.collections.PatchCollection", "matplotlib.patches.Polygon", "numpy.amin", "matplotlib.pyplot.cla", "matplotlib.pyplot.subplots", "matplotlib.pyplot.savefig", "numpy.ones", "matplotlib.pyplot.clf", "matplotlib.pyplot.close", "numpy.array", "numpy.zeros", "matplotlib.rc" ], [ "numpy.zeros" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [ "1.7", "1.0", "0.10", "1.2", "0.14", "0.19", "1.5", "0.12", "0.17", "0.13", "1.6", "1.4", "1.9", "1.3", "1.10", "0.15", "0.18", "0.16", "1.8" ], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
minkowski0125/multilayer-gcn-simulation
[ "15a4cd29d819246549148e3a32c99f3b8589f3b4" ]
[ "main.py" ]
[ "import json\nfrom utils import *\nfrom config import args\nfrom train import train\nfrom torch.utils.tensorboard import SummaryWriter\n\nif __name__ == '__main__':\n set_seed(args.seed)\n\n series = []\n if args.dataset == 'pubmed':\n graphs, features, adjs, labels = load_pubmed_data({\n 'deg_num': args.deg,\n 'sample_num': 1,\n })\n elif args.dataset == 'random':\n graphs, features, adjs, labels = load_pubmed_data({\n 'deg_num': args.deg,\n 'feat_dim': args.feat_dim,\n 'sample_num': 1,\n })\n\n writer = SummaryWriter(f'./log_pubmed')\n\n hiddens = [50, 100, 200, 500, 1000, 1500, 2000, 3000]\n for hidden in hiddens:\n series.append(train(data = (graphs, features, adjs, labels), deg = args.deg, feat_dim = args.feat_dim, hidden_dim = hidden, layer_num = args.layer_num, o = 0, writer=writer))\n print()\n \n visualize(series, hiddens, 'hidden')\n # print(series)\n" ]
[ [ "torch.utils.tensorboard.SummaryWriter" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
SatyaSiddharthDash/headlinegen
[ "ec11cb4b4dd4e6dce553c787cf31670a83f1c650" ]
[ "data_preprocessing_scripts/preprocess.py" ]
[ "import pandas as pd\nfrom sklearn.model_selection import train_test_split\n\n\nrandom_state = 100\n\ndata = pd.read_csv(\"~/headlinegen/data/nytime_front_page.csv\")\ndata['title'] = data['title'].apply(lambda x: ' '.join(x.split(' ')[:-5]))\n\nlens = data[\"content\"].apply(lambda x: len(x.split(\" \"))).nlargest(10)\n\nprint(\n f'max_input_len = {data[\"content\"].apply(lambda x: len(x.split(\" \"))).min()}')\nprint(\n f'max_output_len = {data[\"title\"].apply(lambda x: len(x.split(\" \"))).max()}')\n\nprint(lens)\n\n# train, valid_test = train_test_split(data,\n# test_size=0.2,\n# random_state=random_state,\n# shuffle=True)\n# valid, test = train_test_split(valid_test,\n# test_size=0.5,\n# random_state=random_state,\n# shuffle=True)\n\n# print(train.shape, valid.shape, test.shape)\n\n# for dataset, prefix in zip([train, valid, test], ['train', 'val', 'test']):\n# for columnname, suffix in zip(['content', 'title'], ['source', 'target']):\n# filename = \"/Users/satyasiddharthdash/headlinegen/data/nytimes/\" + prefix + '.' + suffix\n# with open(filename, 'w') as outfile:\n# outfile.write(dataset[columnname].str.cat(sep='\\n'))\n" ]
[ [ "pandas.read_csv" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.1", "1.5", "1.2", "1.3" ], "scipy": [], "tensorflow": [] } ]
duypham2108/stLearn
[ "91b6bae91b29aba8b4f055bf92da13f1558ddbe8", "91b6bae91b29aba8b4f055bf92da13f1558ddbe8" ]
[ "stlearn/tools/microenv/cci/base_grouping.py", "stlearn/tools/microenv/cci/perm_utils.py" ]
[ "\"\"\" Performs LR analysis by grouping LR pairs which having hotspots across\n similar tissues.\n\"\"\"\n\nfrom stlearn.pl import het_plot\nfrom sklearn.cluster import DBSCAN, AgglomerativeClustering\nfrom anndata import AnnData\nfrom tqdm import tqdm\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sb\n\n\ndef get_hotspots(\n adata: AnnData,\n lr_scores: np.ndarray,\n lrs: np.array,\n eps: float,\n quantile=0.05,\n verbose=True,\n plot_diagnostics: bool = False,\n show_plot: bool = False,\n):\n \"\"\"Determines the hotspots for the inputted scores by progressively setting more stringent cutoffs & clustering in space, chooses point which maximises number of clusters.\n Parameters\n ----------\n adata: AnnData The data object\n lr_scores: np.ndarray LR_pair*Spots containing the LR scores.\n lrs: np.array The LR_pairs, in-line with the rows of scores.\n eps: float The eps parameter used in DBScan to get the number of clusters.\n quantile: float The quantiles to use for the cutoffs, if 0.05 then will take non-zero quantiles of 0.05, 0.1,..., 1 quantiles to cluster.\n\n Returns\n -------\n lr_hot_scores: np.ndarray, lr_cutoffs: np.array First is the LR scores for just the hotspots, second is the cutoff used to get those LR_scores.\n \"\"\"\n coors = adata.obs[[\"imagerow\", \"imagecol\"]].values\n lr_summary, lr_hot_scores = hotspot_core(\n lr_scores, lrs, coors, eps, quantile, plot_diagnostics, adata\n )\n\n if plot_diagnostics and show_plot: # Showing the diagnostic plotting #\n plt.show()\n\n if verbose:\n print(\"Clustering LRs to help with ranking/interpretation...\")\n # Clustering the LR pairs to obtain a set of clusters so to order within\n # each cluster\n clusterer = AgglomerativeClustering(\n affinity=\"euclidean\", linkage=\"ward\", distance_threshold=10, n_clusters=None\n )\n clusterer.fit(lr_hot_scores > 0)\n dist_cutoff = np.quantile(clusterer.distances_, 0.98)\n clusterer = AgglomerativeClustering(\n affinity=\"euclidean\",\n linkage=\"ward\",\n distance_threshold=dist_cutoff,\n n_clusters=None,\n )\n clusters = clusterer.fit_predict(lr_hot_scores > 0)\n cluster_set = np.unique(clusters)\n\n if verbose:\n print(\"Ranking LRs...\")\n\n # Determining the ordering of the clusters so is useful to user #\n cluster_mean_spots = []\n for cluster in cluster_set:\n cluster_bool = clusters == cluster\n cluster_mean_spots.append(np.mean(lr_summary[cluster_bool, 2]))\n cluster_order = np.argsort(-np.array(cluster_mean_spots))\n\n # Determining order of lrs in cluster & also overall cluster scores #\n lr_order = []\n new_clusters = []\n cluster_scores = np.zeros((adata.shape[0], len(cluster_set)))\n for i, index in enumerate(cluster_order):\n cluster = cluster_set[index]\n cluster_indices = np.where(clusters == cluster)[0]\n lr_order_ = np.argsort(-lr_summary[cluster_indices, 2])\n lr_order.extend(cluster_indices[lr_order_])\n\n new_clusters += [i] * len(cluster_indices)\n\n cluster_scores[:, i] = lr_hot_scores[cluster_indices, :].mean(axis=0)\n\n if verbose:\n print(\"Saving results:\")\n\n # Re-ordering the summary & the scores #\n lrs = lrs[lr_order]\n lr_summary = lr_summary[lr_order, :]\n lr_summary[:, 3] = new_clusters\n lr_summary = pd.DataFrame(\n lr_summary,\n index=lrs,\n columns=[\"spot_counts\", \"cutoff\", \"hotspot_counts\", \"lr_cluster\"],\n )\n lr_scores = lr_scores[lr_order, :].transpose()\n lr_hot_scores = lr_hot_scores[lr_order, :].transpose()\n\n # Adding all this information to the AnnData #\n adata.uns[\"lr_summary\"] = lr_summary\n adata.obsm[\"lr_scores\"] = lr_scores\n adata.obsm[\"lr_hot_scores\"] = lr_hot_scores\n adata.obsm[\"cluster_scores\"] = cluster_scores\n\n if verbose:\n print(f\"\\tSummary values of lrs in adata.uns['lr_summary'].\")\n print(\n f\"\\tMatrix of lr scores in same order as the summary in adata.obsm['lr_scores'].\"\n )\n print(f\"\\tMatrix of the hotspot scores in adata.obsm['lr_hot_scores'].\")\n print(\n f\"\\tMatrix of the mean LR cluster scores in adata.obsm['cluster_scores'].\"\n )\n\n\ndef hotspot_core(\n lr_scores,\n lrs,\n coors,\n eps,\n quantile,\n plot_diagnostics=False,\n adata=None,\n verbose=True,\n max_score=False,\n):\n \"\"\"Made code for getting the hotspot information.\"\"\"\n score_copy = lr_scores.copy()\n quantiles = [quantile * i for i in range(int(1 / quantile))]\n\n # Values to return #\n lr_hot_scores = np.zeros(score_copy.shape)\n # cols: spot_counts, cutoff, hotspot_counts, lr_cluster\n lr_summary = np.zeros((score_copy.shape[0], 4))\n\n ### Also creating grouping lr_pairs by quantiles to plot diagnostics ###\n if plot_diagnostics:\n lr_quantiles = [(i / 6) for i in range(1, 7)][::-1]\n lr_mean_scores = np.apply_along_axis(non_zero_mean, 1, score_copy)\n lr_quant_values = np.quantile(lr_mean_scores, lr_quantiles)\n quant_lrs = np.array(\n [lrs[lr_mean_scores == quant] for quant in lr_quant_values]\n )\n fig, axes = plt.subplots(6, 4, figsize=(20, 15))\n\n # Determining the cutoffs for hotspots #\n with tqdm(\n total=len(lrs),\n desc=\"Removing background lr scores...\",\n bar_format=\"{l_bar}{bar}\",\n disable=verbose == False,\n ) as pbar:\n for i, lr_ in enumerate(lrs):\n lr_score_ = score_copy[i, :]\n lr_summary[i, 0] = len(np.where(lr_score_ > 0)[0])\n\n cutoff_scores = []\n cutoffs = np.quantile(lr_score_[lr_score_ > 0], quantiles)\n for cutoff in cutoffs:\n spot_bool = lr_score_ >= cutoff\n if len(np.where(spot_bool)[0]) == 0:\n cutoff_scores.append(0)\n continue\n\n coor_ = coors[spot_bool, :]\n clusters = DBSCAN(\n min_samples=2, eps=eps, metric=\"manhattan\"\n ).fit_predict(coor_)\n score = len(np.unique(clusters)) * (np.mean(lr_score_[spot_bool])) ** 2\n cutoff_scores.append(score)\n\n # Cutoff point where maximum number of clusters occurs #\n best_cutoff = cutoffs[np.argmax(cutoff_scores)]\n if not max_score:\n lr_summary[i, 1] = best_cutoff\n else:\n lr_summary[i, 1] = cutoff_scores[np.argmax(cutoff_scores)]\n\n lr_score_[lr_score_ < best_cutoff] = 0\n lr_hot_scores[i, :] = lr_score_\n lr_summary[i, 2] = len(np.where(lr_score_ > 0)[0])\n\n # Adding the diagnostic plots #\n if plot_diagnostics and lr_ in quant_lrs and type(adata) != type(None):\n add_diagnostic_plots(\n adata,\n i,\n lr_,\n quant_lrs,\n lr_quantiles,\n lr_scores,\n lr_hot_scores,\n axes,\n cutoffs,\n cutoff_scores,\n best_cutoff,\n )\n\n pbar.update(1)\n\n return lr_summary, lr_hot_scores\n\n\ndef non_zero_mean(vals):\n \"\"\"Gives the non-zero mean of the values.\"\"\"\n return vals[vals > 0].mean()\n\n\ndef add_diagnostic_plots(\n adata,\n i,\n lr_,\n quant_lrs,\n lr_quantiles,\n lr_scores,\n lr_hot_scores,\n axes,\n cutoffs,\n n_clusters,\n best_cutoff,\n):\n \"\"\"Adds diagnostic plots for the quantile LR pair to a figure to illustrate \\\n how the cutoff is functioning.\n \"\"\"\n q_i = np.where(quant_lrs == lr_)[0][0]\n\n # Scatter plot #\n axes[q_i][0].scatter(cutoffs, n_clusters)\n axes[q_i][0].set_title(f\"n_clusts*mean_spot_score vs cutoff\")\n axes[q_i][0].set_xlabel(\"cutoffs\")\n axes[q_i][0].set_ylabel(\"n_clusts*mean_spot_score\")\n\n # Distribution of scores with cutoff #\n scores_ = lr_scores[i, :]\n sb.distplot(\n scores_[scores_ > 0],\n ax=axes[q_i][1],\n hist=True,\n kde=False,\n color=\"red\",\n norm_hist=True,\n )\n v_height = 0.5\n axes[q_i][1].vlines(best_cutoff, 0, v_height)\n axes[q_i][1].text(best_cutoff, v_height, str(round(best_cutoff, 2)))\n axes[q_i][1].set_title(f\"Distrib {round(lr_quantiles[q_i], 2)}({lr_})\")\n\n # Showing before & after filtering spots #\n adata.obsm[\"lr_scores\"] = scores_\n het_plot(\n adata,\n use_het=\"lr_scores\",\n ax=axes[q_i][2],\n show_color_bar=False,\n )\n axes[q_i][2].set_title(\"scores\")\n\n adata.obsm[\"lr_scores\"] = lr_hot_scores[i, :]\n het_plot(\n adata,\n use_het=\"lr_scores\",\n ax=axes[q_i][3],\n show_color_bar=False,\n )\n axes[q_i][3].set_title(\"hotspot scores\")\n", "import numpy as np\nimport pandas as pd\nfrom scipy.spatial.distance import euclidean, canberra\nfrom sklearn.preprocessing import MinMaxScaler\n\nfrom numba import njit, prange\nfrom numba.typed import List\n\nfrom .base import get_lrs_scores\n\n\ndef nonzero_quantile(expr, q, interpolation):\n \"\"\"Calculating the non-zero quantiles.\"\"\"\n nonzero_expr = expr[expr > 0]\n quants = np.quantile(nonzero_expr, q=q, interpolation=interpolation)\n if type(quants) != np.array and type(quants) != np.ndarray:\n quants = np.array([quants])\n return quants\n\n\ndef getzero_prop(expr):\n \"\"\"Calculating the proportion of zeros.\"\"\"\n zero_bool = expr == 0\n n_zeros = len(np.where(zero_bool)[0])\n zero_prop = [n_zeros / len(expr)]\n return zero_prop\n\n\ndef get_lr_quants(\n lr_expr: pd.DataFrame,\n l_indices: list,\n r_indices: list,\n quantiles: np.array,\n method=\"\",\n):\n \"\"\"Gets the quantiles per gene in the LR pair, & then concatenates.\n Returns\n -------\n lr_quants, l_quants, r_quants: np.ndarray First is concatenation of two latter. Each row is a quantile value, each column is a LR pair.\n \"\"\"\n\n quant_func = nonzero_quantile if method != \"quantiles\" else np.quantile\n\n # First getting the quantiles of gene expression #\n gene_quants = np.apply_along_axis(\n quant_func, 0, lr_expr.values, q=quantiles, interpolation=\"nearest\"\n )\n\n l_quants = gene_quants[:, l_indices]\n r_quants = gene_quants[:, r_indices]\n\n lr_quants = np.concatenate((l_quants, r_quants), 0).transpose()\n\n return lr_quants, l_quants, r_quants\n\n\ndef get_lr_zeroprops(lr_expr: pd.DataFrame, l_indices: list, r_indices: list):\n \"\"\"Gets the proportion of zeros per gene in the LR pair, & then concatenates.\n Returns\n -------\n lr_props, l_props, r_props: np.ndarray First is concatenation of two latter. Each row is a prop value, each column is a LR pair.\n \"\"\"\n\n # First getting the quantiles of gene expression #\n gene_props = np.apply_along_axis(getzero_prop, 0, lr_expr.values)\n\n l_props = gene_props[:, l_indices]\n r_props = gene_props[:, r_indices]\n\n lr_props = np.concatenate((l_props, r_props), 0).transpose()\n\n return lr_props, l_props, r_props\n\n\ndef get_lr_bounds(lr_value: float, bin_bounds: np.array):\n \"\"\"For the given lr_value, returns the bin where it belongs.\n Returns\n -------\n lr_bin: tuple Tuple of length 2, first is the lower bound of the bin, second is upper bound of the bin.\n \"\"\"\n if np.any(bin_bounds == lr_value): # If sits on a boundary\n lr_i = np.where(bin_bounds == lr_value)[0][0]\n if lr_value == max(bin_bounds): # Must be in the final bin\n _lower = bin_bounds[-2]\n _upper = bin_bounds[-1]\n else: # In the lower bin\n _lower = bin_bounds[lr_i]\n _upper = bin_bounds[lr_i + 1]\n else: # Bin where it's value sit in-between #\n _lower = bin_bounds[np.where(bin_bounds < lr_value)[0][-1]]\n _upper = bin_bounds[np.where(bin_bounds > lr_value)[0][0]]\n\n return (_lower, _upper)\n\n\ndef get_similar_genes(\n ref_quants: np.array,\n ref_props: np.array,\n n_genes: int,\n candidate_expr: np.ndarray,\n candidate_genes: np.array,\n quantiles=(0.5), # (.5, .75, .85, .9, .95, .97, .98, .99, 1)\n):\n \"\"\"Gets genes with a similar expression distribution as the inputted gene,\n by measuring distance between the gene expression quantiles.\n Parameters\n ----------\n ref_quants: np.array The pre-calculated quantiles.\n ref_props: np.array The query zero proportions.\n n_genes: int Number of equivalent genes to select.\n candidate_expr: np.ndarray Expression of gene candidates (cells*genes).\n candidate_genes: np.array Same as candidate_expr.shape[1], indicating gene names.\n quantiles: tuple The quantile to use\n Returns\n -------\n similar_genes: np.array Array of strings for gene names.\n \"\"\"\n if type(quantiles) == float:\n quantiles = np.array([quantiles])\n else:\n quantiles = np.array(quantiles)\n\n # Query quants #\n query_quants = np.apply_along_axis(\n nonzero_quantile, 0, candidate_expr, q=quantiles, interpolation=\"nearest\"\n )\n\n # Need to min-max normalise so can take distance with the proportion #\n all_quants = np.concatenate((np.array([ref_quants]), query_quants), axis=1)\n scaler = MinMaxScaler()\n scaled_quants = scaler.fit_transform(all_quants.transpose()).transpose()\n ref_scaled = scaled_quants[:, 0]\n query_scaled = scaled_quants[:, 1:]\n\n # Query props #\n query_props = np.apply_along_axis(getzero_prop, 0, candidate_expr)\n\n # Concatenating to create the ref & query vals to match #\n ref_vals = np.array([ref_scaled[0], ref_props[0]]) # both between 0 & 1\n query_vals = np.concatenate((query_scaled, query_props))\n\n # Measuring distances from the desired gene #\n dists = np.apply_along_axis(canberra, 0, query_vals, ref_vals)\n\n # Retrieving desired number of genes #\n order = np.argsort(dists)\n similar_genes = candidate_genes[order[0:n_genes]]\n\n \"\"\" During debugging, plotting distribution of distances & selected genes.\n import matplotlib.pyplot as plt\n cutoff = dists[order[n_genes]]\n plt.hist(dists, bins=100)\n plt.vlines(cutoff, 0, 500, color='r')\n plt.show()\n \"\"\"\n\n return similar_genes\n\n\ndef get_similar_genes_Quantiles(\n gene_expr: np.array,\n n_genes: int,\n candidate_quants: np.ndarray,\n candidate_genes: np.array,\n quantiles=(0.5, 0.75, 0.85, 0.9, 0.95, 0.97, 0.98, 0.99, 1),\n):\n \"\"\"Gets genes with a similar expression distribution as the inputted gene,\n by measuring distance between the gene expression quantiles.\n Parameters\n ----------\n gene_expr: np.array Expression of the gene of interest, or, if the same length as quantiles, then assumes is the pre-calculated quantiles.\n n_genes: int Number of equivalent genes to select.\n candidate_quants: np.ndarray Expression quantiles of gene candidates (quantiles*genes).\n candidate_genes: np.array Same as candidate_expr.shape[1], indicating gene names.\n quantiles: tuple The quantile to use\n Returns\n -------\n similar_genes: np.array Array of strings for gene names.\n \"\"\"\n\n if type(quantiles) == float:\n quantiles = np.array([quantiles])\n else:\n quantiles = np.array(quantiles)\n\n # Getting the quantiles for the gene #\n if len(gene_expr) != len(quantiles):\n # ref_quants = np.quantile(gene_expr, q=quantiles, interpolation='nearest')\n ref_quants = nonzero_quantile(gene_expr, q=quantiles, interpolation=\"nearest\")\n else:\n ref_quants = gene_expr\n\n # Measuring distances from the desired gene #\n dists = np.apply_along_axis(canberra, 0, candidate_quants, ref_quants)\n order = np.argsort(dists)\n\n \"\"\" During debugging, plotting distribution of distances & selected genes.\n import matplotlib.pyplot as plt\n cutoff = dists[order[n_genes]]\n fig, ax = plt.subplots()\n ax.hist(dists[order[0:28000]], bins=1000)\n y_max = ax.get_ylim()[1]\n ax.vlines(cutoff, 0, y_max/2, color='r')\n plt.show()\n print(candidate_quants[:,order[0:3]]) # Showing the quantiles of selected\n print(candidate_quants[:,order[n_genes-3:n_genes]])\n print(ref_quants)\n \"\"\"\n\n # Retrieving desired number of genes #\n similar_genes = candidate_genes[order[0:n_genes]]\n\n return similar_genes\n\n\n@njit(parallel=True)\ndef get_similar_genesFAST(\n ref_quants: np.array,\n n_genes: int,\n candidate_quants: np.ndarray,\n candidate_genes: np.array,\n):\n \"\"\"Fast version of the above with parallelisation.\"\"\"\n\n # Measuring distances from the desired gene #\n dists = np.zeros((1, candidate_quants.shape[1]), dtype=np.float64)[0, :]\n for i in prange(0, candidate_quants.shape[1]):\n cand_quants = candidate_quants[:, i]\n abs_diff = ref_quants - cand_quants\n abs_diff[abs_diff < 0] = -abs_diff[abs_diff < 0]\n dists[i] = np.nansum(abs_diff / (ref_quants + cand_quants))\n\n # Need to remove the zero-dists since this indicates they are expressed\n # exactly the same, & hence likely in the same spot !!!\n nonzero_bool = dists > 0\n dists = dists[nonzero_bool]\n candidate_quants = candidate_quants[:, nonzero_bool]\n candidate_genes = candidate_genes[nonzero_bool]\n order = np.argsort(dists)\n\n \"\"\" During debugging, plotting distribution of distances & selected genes.\n import matplotlib.pyplot as plt\n cutoff = dists[order[n_genes]]\n fig, ax = plt.subplots()\n ax.hist(dists[order[0:28000]], bins=1000)\n y_max = ax.get_ylim()[1]\n ax.vlines(cutoff, 0, y_max/2, color='r')\n plt.show()\n print(candidate_quants[:,order[0:3]]) # Showing the quantiles of selected\n print(candidate_quants[:,order[n_genes-3:n_genes]])\n print(ref_quants)\n \"\"\"\n\n # Retrieving desired number of genes #\n similar_genes = candidate_genes[order[0:n_genes]]\n\n return similar_genes\n\n\n@njit\ndef gen_rand_pairs(genes1: np.array, genes2: np.array, n_pairs: int):\n \"\"\"Generates random pairs of genes.\"\"\"\n\n rand_pairs = List()\n for j in range(0, n_pairs):\n l_rand = np.random.choice(genes1, 1)[0]\n r_rand = np.random.choice(genes2, 1)[0]\n rand_pair = \"_\".join([l_rand, r_rand])\n while rand_pair in rand_pairs or l_rand == r_rand:\n l_rand = np.random.choice(genes1, 1)[0]\n r_rand = np.random.choice(genes2, 1)[0]\n rand_pair = \"_\".join([l_rand, r_rand])\n\n rand_pairs.append(rand_pair)\n\n return rand_pairs\n\n\ndef get_lr_features(adata, lr_expr, lrs, quantiles):\n \"\"\"Gets expression features of LR pairs; nonzero-median, zero-prop, quantiles.\"\"\"\n quantiles = np.array(quantiles)\n\n # Determining indices of LR pairs #\n l_indices, r_indices = [], []\n for lr in lrs:\n l_, r_ = lr.split(\"_\")\n l_indices.extend(np.where(lr_expr.columns.values == l_)[0])\n r_indices.extend(np.where(lr_expr.columns.values == r_)[0])\n\n # The nonzero median when quantiles=.5 #\n lr_quants, l_quants, r_quants = get_lr_quants(\n lr_expr, l_indices, r_indices, quantiles, method=\"quantiles\"\n )\n\n # Calculating the zero proportions, for grouping based on median/zeros #\n lr_props, l_props, r_props = get_lr_zeroprops(lr_expr, l_indices, r_indices)\n\n ######## Getting lr features for later diagnostics #######\n lr_meds, l_meds, r_meds = get_lr_quants(\n lr_expr, l_indices, r_indices, quantiles=np.array([0.5]), method=\"\"\n )\n lr_median_means = lr_meds.mean(axis=1)\n lr_prop_means = lr_props.mean(axis=1)\n\n # Calculating mean rank #\n median_order = np.argsort(lr_median_means)\n prop_order = np.argsort(lr_prop_means * -1)\n median_ranks = [np.where(median_order == i)[0][0] for i in range(len(lrs))]\n prop_ranks = [np.where(prop_order == i)[0][0] for i in range(len(lrs))]\n mean_ranks = np.array([median_ranks, prop_ranks]).mean(axis=0)\n\n # Saving the lrfeatures...\n cols = [\"nonzero-median\", \"zero-prop\", \"median_rank\", \"prop_rank\", \"mean_rank\"]\n lr_features = pd.DataFrame(index=lrs, columns=cols)\n lr_features.iloc[:, 0] = lr_median_means\n lr_features.iloc[:, 1] = lr_prop_means\n lr_features.iloc[:, 2] = np.array(median_ranks)\n lr_features.iloc[:, 3] = np.array(prop_ranks)\n lr_features.iloc[:, 4] = np.array(mean_ranks)\n lr_features = lr_features.iloc[np.argsort(mean_ranks), :]\n lr_cols = [f\"L_{quant}\" for quant in quantiles] + [\n f\"R_{quant}\" for quant in quantiles\n ]\n quant_df = pd.DataFrame(lr_quants, columns=lr_cols, index=lrs)\n lr_features = pd.concat((lr_features, quant_df), axis=1)\n adata.uns[\"lrfeatures\"] = lr_features\n\n return lr_features\n\n\ndef get_lr_bg(\n adata,\n neighbours,\n het_vals,\n min_expr,\n lr_,\n lr_score,\n l_quant,\n r_quant,\n genes,\n candidate_quants,\n gene_bg_genes,\n n_genes,\n n_pairs,\n):\n \"\"\"Gets the LR-specific background & bg spot indices.\"\"\"\n l_, r_ = lr_.split(\"_\")\n if l_ not in gene_bg_genes:\n l_genes = get_similar_genesFAST(\n l_quant, n_genes, candidate_quants, genes # group_l_props,\n )\n gene_bg_genes[l_] = l_genes\n else:\n l_genes = gene_bg_genes[l_]\n\n if r_ not in gene_bg_genes:\n r_genes = get_similar_genesFAST(\n r_quant, n_genes, candidate_quants, genes # group_r_props,\n )\n gene_bg_genes[r_] = r_genes\n else:\n r_genes = gene_bg_genes[r_]\n\n rand_pairs = gen_rand_pairs(l_genes, r_genes, n_pairs)\n spot_indices = np.where(lr_score > 0)[0]\n\n background = get_lrs_scores(\n adata,\n rand_pairs,\n neighbours,\n het_vals,\n min_expr,\n filter_pairs=False,\n spot_indices=spot_indices,\n )\n\n return background, spot_indices\n" ]
[ [ "numpy.array", "numpy.unique", "numpy.quantile", "matplotlib.pyplot.subplots", "pandas.DataFrame", "sklearn.cluster.DBSCAN", "numpy.apply_along_axis", "numpy.mean", "numpy.argmax", "numpy.argsort", "sklearn.cluster.AgglomerativeClustering", "matplotlib.pyplot.show", "numpy.zeros", "numpy.where" ], [ "pandas.concat", "numpy.random.choice", "numpy.quantile", "pandas.DataFrame", "numpy.concatenate", "numpy.apply_along_axis", "numpy.nansum", "numpy.any", "numpy.argsort", "numpy.array", "numpy.zeros", "numpy.where", "sklearn.preprocessing.MinMaxScaler" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "1.3", "0.19", "1.1", "1.5", "0.24", "0.20", "1.0", "0.25", "1.2" ], "scipy": [], "tensorflow": [] } ]
MarkDaoust/agents
[ "00ddf75a8a35a26a03a9323b78d95c06211b5b3f" ]
[ "tf_agents/bandits/agents/utils_test.py" ]
[ "# coding=utf-8\n# Copyright 2018 The TF-Agents Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Tests for tf_agents.bandits.agents.utils.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom absl.testing import parameterized\nimport numpy as np\nimport tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import\nimport tensorflow_probability as tfp\n\nfrom tf_agents.bandits.agents import utils\nfrom tf_agents.specs import tensor_spec\n\ntfd = tfp.distributions\ntf.compat.v1.enable_v2_behavior()\n\n\ndef test_cases():\n return parameterized.named_parameters(\n {\n 'testcase_name': '_batch1_contextdim10',\n 'batch_size': 1,\n 'context_dim': 10,\n }, {\n 'testcase_name': '_batch4_contextdim5',\n 'batch_size': 4,\n 'context_dim': 5,\n })\n\n\nclass UtilsTest(tf.test.TestCase, parameterized.TestCase):\n\n def testNumActionsFromTensorSpecGoodSpec(self):\n action_spec = tensor_spec.BoundedTensorSpec(\n dtype=tf.int32, shape=(), minimum=0, maximum=15)\n num_actions = utils.get_num_actions_from_tensor_spec(action_spec)\n self.assertEqual(num_actions, 16)\n\n def testNumActionsFromTensorSpecWrongRank(self):\n action_spec = tensor_spec.BoundedTensorSpec(\n dtype=tf.int32, shape=(2, 3), minimum=0, maximum=15)\n\n with self.assertRaisesRegexp(ValueError, r'Action spec must be a scalar'):\n utils.get_num_actions_from_tensor_spec(action_spec)\n\n @test_cases()\n def testBUpdate(self, batch_size, context_dim):\n b_array = np.array(range(context_dim))\n r_array = np.array(range(batch_size)).reshape((batch_size, 1))\n x_array = np.array(range(batch_size * context_dim)).reshape(\n (batch_size, context_dim))\n rx = r_array * x_array\n expected_b_updated_array = b_array + np.sum(rx, axis=0)\n\n b = tf.constant(b_array, dtype=tf.float32, shape=[context_dim])\n r = tf.constant(r_array, dtype=tf.float32, shape=[batch_size])\n x = tf.constant(x_array, dtype=tf.float32, shape=[batch_size, context_dim])\n b_update = utils.sum_reward_weighted_observations(r, x)\n self.assertAllClose(expected_b_updated_array, self.evaluate(b + b_update))\n\n @test_cases()\n def testBUpdateEmptyObservations(self, batch_size, context_dim):\n r = tf.constant([], dtype=tf.float32, shape=[0, 1])\n x = tf.constant([], dtype=tf.float32, shape=[0, context_dim])\n b_update = utils.sum_reward_weighted_observations(r, x)\n expected_b_update_array = np.zeros([context_dim], dtype=np.float32)\n self.assertAllClose(expected_b_update_array, self.evaluate(b_update))\n\n def testLaplacian1D(self):\n action_spec = tensor_spec.BoundedTensorSpec(\n dtype=tf.int32, shape=(), minimum=0, maximum=4)\n num_actions = utils.get_num_actions_from_tensor_spec(action_spec)\n laplacian_matrix = tf.convert_to_tensor(\n utils.build_laplacian_over_ordinal_integer_actions(action_spec),\n dtype=tf.float32)\n res = tf.matmul(\n laplacian_matrix, tf.ones([num_actions, 1], dtype=tf.float32))\n # The vector of ones is in the null space of the Laplacian matrix.\n self.assertAllClose(0.0, self.evaluate(tf.norm(res)))\n\n # The row sum is zero.\n row_sum = tf.reduce_sum(laplacian_matrix, 1)\n self.assertAllClose(0.0, self.evaluate(tf.norm(row_sum)))\n\n # The column sum is zero.\n column_sum = tf.reduce_sum(laplacian_matrix, 0)\n self.assertAllClose(0.0, self.evaluate(tf.norm(column_sum)))\n\n # The diagonal elements are 2.0.\n self.assertAllClose(2.0, laplacian_matrix[1, 1])\n\n laplacian_matrix_expected = np.array(\n [[1.0, -1.0, 0.0, 0.0, 0.0],\n [-1.0, 2.0, -1.0, 0.0, 0.0],\n [0.0, -1.0, 2.0, -1.0, 0.0],\n [0.0, 0.0, -1.0, 2.0, -1.0],\n [0.0, 0.0, 0.0, -1.0, 1.0]])\n self.assertAllClose(laplacian_matrix_expected,\n self.evaluate(laplacian_matrix))\n\n def testComputePairwiseDistances(self):\n input_vects = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])\n pdist_matrix = np.array(\n [[0.0, 27.0, 108.0,],\n [27.0, 0.0, 27.0],\n [108.0, 27.0, 0.0]])\n tf_dist_matrix = utils.compute_pairwise_distances(\n tf.constant(input_vects, dtype=tf.float32))\n self.assertAllClose(pdist_matrix, self.evaluate(tf_dist_matrix))\n\n def testBuildLaplacianNearestNeighborGraph(self):\n input_vects = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9],\n [10, 11, 12], [13, 14, 15]])\n num_actions = input_vects.shape[0]\n laplacian_matrix = utils.build_laplacian_nearest_neighbor_graph(\n tf.constant(input_vects, dtype=tf.float32), k=2)\n\n # The vector of ones is in the null space of the Laplacian matrix.\n res = tf.matmul(\n laplacian_matrix, tf.ones([num_actions, 1], dtype=tf.float32))\n self.assertAllClose(0.0, self.evaluate(tf.norm(res)))\n\n # The row sum is zero.\n row_sum = tf.reduce_sum(laplacian_matrix, 1)\n self.assertAllClose(0.0, self.evaluate(tf.norm(row_sum)))\n\n # The column sum is zero.\n column_sum = tf.reduce_sum(laplacian_matrix, 0)\n self.assertAllClose(0.0, self.evaluate(tf.norm(column_sum)))\n\n self.assertAllClose(2.0, laplacian_matrix[0, 0])\n self.assertAllClose(4.0, laplacian_matrix[2, 2])\n\n\nif __name__ == '__main__':\n tf.test.main()\n" ]
[ [ "tensorflow.compat.v1.enable_v2_behavior", "tensorflow.norm", "tensorflow.constant", "tensorflow.reduce_sum", "tensorflow.test.main", "tensorflow.ones", "numpy.array", "numpy.zeros", "numpy.sum" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10", "2.7", "1.12", "2.6", "2.2", "1.13", "2.3", "2.4", "1.4", "2.9", "1.5", "1.7", "2.5", "0.12", "1.0", "2.8", "1.2", "2.10" ] } ]
abhinavralhan/dask
[ "e840ba38eadfa93c3b9959347f0a43c1279a94ab", "e840ba38eadfa93c3b9959347f0a43c1279a94ab", "e840ba38eadfa93c3b9959347f0a43c1279a94ab" ]
[ "dask/dataframe/tests/test_hashing.py", "dask/dataframe/rolling.py", "dask/dataframe/categorical.py" ]
[ "import numpy as np\nimport pandas as pd\nimport pandas.util.testing as tm\n\nimport pytest\n\nfrom dask.dataframe.hashing import hash_pandas_object\nfrom dask.dataframe.utils import assert_eq\n\n\[email protected]('obj', [\n pd.Series([1, 2, 3]),\n pd.Series([1.0, 1.5, 3.2]),\n pd.Series([1.0, 1.5, 3.2], index=[1.5, 1.1, 3.3]),\n pd.Series(['a', 'b', 'c']),\n pd.Series([True, False, True]),\n pd.Index([1, 2, 3]),\n pd.Index([True, False, True]),\n pd.DataFrame({'x': ['a', 'b', 'c'], 'y': [1, 2, 3]}),\n pd.util.testing.makeMissingDataframe(),\n pd.util.testing.makeMixedDataFrame(),\n pd.util.testing.makeTimeDataFrame(),\n pd.util.testing.makeTimeSeries(),\n pd.util.testing.makeTimedeltaIndex()])\ndef test_hash_pandas_object(obj):\n a = hash_pandas_object(obj)\n b = hash_pandas_object(obj)\n if isinstance(a, np.ndarray):\n np.testing.assert_equal(a, b)\n else:\n assert_eq(a, b)\n\n\ndef test_categorical_consistency():\n # Check that categoricals hash consistent with their values, not codes\n # This should work for categoricals of any dtype\n for s1 in [pd.Series(['a', 'b', 'c', 'd']),\n pd.Series([1000, 2000, 3000, 4000]),\n pd.Series(pd.date_range(0, periods=4))]:\n s2 = s1.astype('category').cat.set_categories(s1)\n s3 = s2.cat.set_categories(list(reversed(s1)))\n for categorize in [True, False]:\n # These should all hash identically\n h1 = hash_pandas_object(s1, categorize=categorize)\n h2 = hash_pandas_object(s2, categorize=categorize)\n h3 = hash_pandas_object(s3, categorize=categorize)\n tm.assert_series_equal(h1, h2)\n tm.assert_series_equal(h1, h3)\n\n\ndef test_object_missing_values():\n # Check that the presence of missing values doesn't change how object dtype\n # is hashed.\n s = pd.Series(['a', 'b', 'c', None])\n h1 = hash_pandas_object(s).iloc[:3]\n h2 = hash_pandas_object(s.iloc[:3])\n tm.assert_series_equal(h1, h2)\n", "from __future__ import absolute_import, division, print_function\n\nimport datetime\n\nimport pandas as pd\nfrom pandas.core.window import Rolling as pd_Rolling\nfrom numbers import Integral\n\nfrom ..base import tokenize\nfrom ..utils import M, funcname, derived_from\nfrom ..highlevelgraph import HighLevelGraph\nfrom .core import _emulate\nfrom .utils import make_meta, PANDAS_VERSION\n\n\ndef overlap_chunk(func, prev_part, current_part, next_part, before, after,\n args, kwargs):\n\n msg = (\"Partition size is less than overlapping \"\n \"window size. Try using ``df.repartition`` \"\n \"to increase the partition size.\")\n\n if prev_part is not None and isinstance(before, Integral):\n if prev_part.shape[0] != before:\n raise NotImplementedError(msg)\n\n if next_part is not None and isinstance(after, Integral):\n if next_part.shape[0] != after:\n raise NotImplementedError(msg)\n # We validate that the window isn't too large for tiemdeltas in map_overlap\n\n parts = [p for p in (prev_part, current_part, next_part) if p is not None]\n combined = pd.concat(parts)\n out = func(combined, *args, **kwargs)\n if prev_part is None:\n before = None\n if isinstance(before, datetime.timedelta):\n before = len(prev_part)\n\n if next_part is None:\n return out.iloc[before:]\n if isinstance(after, datetime.timedelta):\n after = len(next_part)\n return out.iloc[before:-after]\n\n\ndef map_overlap(func, df, before, after, *args, **kwargs):\n \"\"\"Apply a function to each partition, sharing rows with adjacent partitions.\n\n Parameters\n ----------\n func : function\n Function applied to each partition.\n df : dd.DataFrame, dd.Series\n before : int or timedelta\n The rows to prepend to partition ``i`` from the end of\n partition ``i - 1``.\n after : int or timedelta\n The rows to append to partition ``i`` from the beginning\n of partition ``i + 1``.\n args, kwargs :\n Arguments and keywords to pass to the function. The partition will\n be the first argument, and these will be passed *after*.\n\n See Also\n --------\n dd.DataFrame.map_overlap\n \"\"\"\n if (isinstance(before, datetime.timedelta) or isinstance(after, datetime.timedelta)):\n if not df.index._meta_nonempty.is_all_dates:\n raise TypeError(\"Must have a `DatetimeIndex` when using string offset \"\n \"for `before` and `after`\")\n else:\n if not (isinstance(before, Integral) and before >= 0 and\n isinstance(after, Integral) and after >= 0):\n raise ValueError(\"before and after must be positive integers\")\n\n if 'token' in kwargs:\n func_name = kwargs.pop('token')\n token = tokenize(df, before, after, *args, **kwargs)\n else:\n func_name = 'overlap-' + funcname(func)\n token = tokenize(func, df, before, after, *args, **kwargs)\n\n if 'meta' in kwargs:\n meta = kwargs.pop('meta')\n else:\n meta = _emulate(func, df, *args, **kwargs)\n meta = make_meta(meta, index=df._meta.index)\n\n name = '{0}-{1}'.format(func_name, token)\n name_a = 'overlap-prepend-' + tokenize(df, before)\n name_b = 'overlap-append-' + tokenize(df, after)\n df_name = df._name\n\n dsk = {}\n\n # Have to do the checks for too large windows in the time-delta case\n # here instead of in `overlap_chunk`, since we can't rely on fix-frequency\n # index\n\n timedelta_partition_message = (\n \"Partition size is less than specified window. \"\n \"Try using ``df.repartition`` to increase the partition size\"\n )\n\n if before and isinstance(before, Integral):\n dsk.update({(name_a, i): (M.tail, (df_name, i), before)\n for i in range(df.npartitions - 1)})\n prevs = [None] + [(name_a, i) for i in range(df.npartitions - 1)]\n elif isinstance(before, datetime.timedelta):\n # Assumes monotonic (increasing?) index\n deltas = pd.Series(df.divisions).diff().iloc[1:-1]\n if (before > deltas).any():\n raise ValueError(timedelta_partition_message)\n dsk.update({(name_a, i): (_tail_timedelta, (df_name, i), (df_name, i + 1), before)\n for i in range(df.npartitions - 1)})\n prevs = [None] + [(name_a, i) for i in range(df.npartitions - 1)]\n else:\n prevs = [None] * df.npartitions\n\n if after and isinstance(after, Integral):\n dsk.update({(name_b, i): (M.head, (df_name, i), after)\n for i in range(1, df.npartitions)})\n nexts = [(name_b, i) for i in range(1, df.npartitions)] + [None]\n elif isinstance(after, datetime.timedelta):\n # TODO: Do we have a use-case for this? Pandas doesn't allow negative rolling windows\n deltas = pd.Series(df.divisions).diff().iloc[1:-1]\n if (after > deltas).any():\n raise ValueError(timedelta_partition_message)\n\n dsk.update({(name_b, i): (_head_timedelta, (df_name, i - 0), (df_name, i), after)\n for i in range(1, df.npartitions)})\n nexts = [(name_b, i) for i in range(1, df.npartitions)] + [None]\n else:\n nexts = [None] * df.npartitions\n\n for i, (prev, current, next) in enumerate(zip(prevs, df.__dask_keys__(), nexts)):\n dsk[(name, i)] = (overlap_chunk, func, prev, current, next, before,\n after, args, kwargs)\n\n graph = HighLevelGraph.from_collections(name, dsk, dependencies=[df])\n return df._constructor(graph, name, meta, df.divisions)\n\n\ndef _head_timedelta(current, next_, after):\n \"\"\"Return rows of ``next_`` whose index is before the last\n observation in ``current`` + ``after``.\n\n Parameters\n ----------\n current : DataFrame\n next_ : DataFrame\n after : timedelta\n\n Returns\n -------\n overlapped : DataFrame\n \"\"\"\n return next_[next_.index < (current.index.max() + after)]\n\n\ndef _tail_timedelta(prev, current, before):\n \"\"\"Return rows of ``prev`` whose index is after the first\n observation in ``current`` - ``before``.\n\n Parameters\n ----------\n current : DataFrame\n next_ : DataFrame\n before : timedelta\n\n Returns\n -------\n overlapped : DataFrame\n \"\"\"\n return prev[prev.index > (current.index.min() - before)]\n\n\ndef pandas_rolling_method(df, rolling_kwargs, name, *args, **kwargs):\n rolling = df.rolling(**rolling_kwargs)\n return getattr(rolling, name)(*args, **kwargs)\n\n\nclass Rolling(object):\n \"\"\"Provides rolling window calculations.\"\"\"\n\n def __init__(self, obj, window=None, min_periods=None, freq=None,\n center=False, win_type=None, axis=0):\n if freq is not None:\n msg = 'The deprecated freq argument is not supported.'\n raise NotImplementedError(msg)\n\n self.obj = obj # dataframe or series\n self.window = window\n self.min_periods = min_periods\n self.center = center\n self.axis = axis\n self.win_type = win_type\n # Allow pandas to raise if appropriate\n pd_roll = obj._meta.rolling(**self._rolling_kwargs())\n # Using .rolling(window='2s'), pandas will convert the\n # offset str to a window in nanoseconds. But pandas doesn't\n # accept the integer window with win_type='freq', so we store\n # that information here.\n # See https://github.com/pandas-dev/pandas/issues/15969\n self._window = pd_roll.window\n self._win_type = pd_roll.win_type\n self._min_periods = pd_roll.min_periods\n\n def _rolling_kwargs(self):\n return {'window': self.window,\n 'min_periods': self.min_periods,\n 'center': self.center,\n 'win_type': self.win_type,\n 'axis': self.axis}\n\n @property\n def _has_single_partition(self):\n \"\"\"\n Indicator for whether the object has a single partition (True)\n or multiple (False).\n \"\"\"\n return (self.axis in (1, 'columns') or\n (isinstance(self.window, Integral) and self.window <= 1) or\n self.obj.npartitions == 1)\n\n def _call_method(self, method_name, *args, **kwargs):\n rolling_kwargs = self._rolling_kwargs()\n meta = pandas_rolling_method(self.obj._meta_nonempty, rolling_kwargs,\n method_name, *args, **kwargs)\n\n if self._has_single_partition:\n # There's no overlap just use map_partitions\n return self.obj.map_partitions(pandas_rolling_method,\n rolling_kwargs, method_name,\n *args, token=method_name, meta=meta,\n **kwargs)\n # Convert window to overlap\n if self.center:\n before = self.window // 2\n after = self.window - before - 1\n elif self._win_type == 'freq':\n before = pd.Timedelta(self.window)\n after = 0\n else:\n before = self.window - 1\n after = 0\n return map_overlap(pandas_rolling_method, self.obj, before, after,\n rolling_kwargs, method_name, *args,\n token=method_name, meta=meta, **kwargs)\n\n @derived_from(pd_Rolling)\n def count(self):\n return self._call_method('count')\n\n @derived_from(pd_Rolling)\n def sum(self):\n return self._call_method('sum')\n\n @derived_from(pd_Rolling)\n def mean(self):\n return self._call_method('mean')\n\n @derived_from(pd_Rolling)\n def median(self):\n return self._call_method('median')\n\n @derived_from(pd_Rolling)\n def min(self):\n return self._call_method('min')\n\n @derived_from(pd_Rolling)\n def max(self):\n return self._call_method('max')\n\n @derived_from(pd_Rolling)\n def std(self, ddof=1):\n return self._call_method('std', ddof=1)\n\n @derived_from(pd_Rolling)\n def var(self, ddof=1):\n return self._call_method('var', ddof=1)\n\n @derived_from(pd_Rolling)\n def skew(self):\n return self._call_method('skew')\n\n @derived_from(pd_Rolling)\n def kurt(self):\n return self._call_method('kurt')\n\n @derived_from(pd_Rolling)\n def quantile(self, quantile):\n return self._call_method('quantile', quantile)\n\n @derived_from(pd_Rolling)\n def apply(self, func, args=(), kwargs={}, **kwds):\n # TODO: In a future version of pandas this will change to\n # raw=False. Think about inspecting the function signature and setting\n # to that?\n if PANDAS_VERSION >= '0.23.0':\n kwds.setdefault(\"raw\", None)\n else:\n if kwargs:\n msg = (\"Invalid argument to 'apply'. Keyword arguments \"\n \"should be given as a dict to the 'kwargs' arugment. \")\n raise TypeError(msg)\n return self._call_method('apply', func, args=args,\n kwargs=kwargs, **kwds)\n\n @derived_from(pd_Rolling)\n def aggregate(self, func, args=(), kwargs={}, **kwds):\n return self._call_method('agg', func, args=args,\n kwargs=kwargs, **kwds)\n\n agg = aggregate\n\n def __repr__(self):\n\n def order(item):\n k, v = item\n _order = {'window': 0, 'min_periods': 1, 'center': 2,\n 'win_type': 3, 'axis': 4}\n return _order[k]\n\n rolling_kwargs = self._rolling_kwargs()\n # pandas translates the '2S' offset to nanoseconds\n rolling_kwargs['window'] = self._window\n rolling_kwargs['win_type'] = self._win_type\n return 'Rolling [{}]'.format(','.join(\n '{}={}'.format(k, v)\n for k, v in sorted(rolling_kwargs.items(), key=order)\n if v is not None))\n", "from __future__ import absolute_import, division, print_function\n\nfrom collections import defaultdict\nimport pandas as pd\nfrom toolz import partition_all\nfrom numbers import Integral\n\nfrom ..base import tokenize, compute_as_if_collection\nfrom .accessor import Accessor\nfrom .utils import (has_known_categories, clear_known_categories, is_scalar,\n is_categorical_dtype)\n\n\ndef _categorize_block(df, categories, index):\n \"\"\" Categorize a dataframe with given categories\n\n df: DataFrame\n categories: dict mapping column name to iterable of categories\n \"\"\"\n df = df.copy()\n for col, vals in categories.items():\n if is_categorical_dtype(df[col]):\n df[col] = df[col].cat.set_categories(vals)\n else:\n df[col] = pd.Categorical(df[col], categories=vals, ordered=False)\n if index is not None:\n if is_categorical_dtype(df.index):\n ind = df.index.set_categories(index)\n else:\n ind = pd.Categorical(df.index, categories=index, ordered=False)\n ind.name = df.index.name\n df.index = ind\n return df\n\n\ndef _get_categories(df, columns, index):\n res = {}\n for col in columns:\n x = df[col]\n if is_categorical_dtype(x):\n res[col] = pd.Series(x.cat.categories)\n else:\n res[col] = x.dropna().drop_duplicates()\n if index:\n if is_categorical_dtype(df.index):\n return res, df.index.categories\n return res, df.index.dropna().drop_duplicates()\n return res, None\n\n\ndef _get_categories_agg(parts):\n res = defaultdict(list)\n res_ind = []\n for p in parts:\n for k, v in p[0].items():\n res[k].append(v)\n res_ind.append(p[1])\n res = {k: pd.concat(v, ignore_index=True).drop_duplicates()\n for k, v in res.items()}\n if res_ind[0] is None:\n return res, None\n return res, res_ind[0].append(res_ind[1:]).drop_duplicates()\n\n\ndef categorize(df, columns=None, index=None, split_every=None, **kwargs):\n \"\"\"Convert columns of the DataFrame to category dtype.\n\n Parameters\n ----------\n columns : list, optional\n A list of column names to convert to categoricals. By default any\n column with an object dtype is converted to a categorical, and any\n unknown categoricals are made known.\n index : bool, optional\n Whether to categorize the index. By default, object indices are\n converted to categorical, and unknown categorical indices are made\n known. Set True to always categorize the index, False to never.\n split_every : int, optional\n Group partitions into groups of this size while performing a\n tree-reduction. If set to False, no tree-reduction will be used.\n Default is 16.\n kwargs\n Keyword arguments are passed on to compute.\n \"\"\"\n meta = df._meta\n if columns is None:\n columns = list(meta.select_dtypes(['object', 'category']).columns)\n elif is_scalar(columns):\n columns = [columns]\n\n # Filter out known categorical columns\n columns = [c for c in columns if not (is_categorical_dtype(meta[c]) and\n has_known_categories(meta[c]))]\n\n if index is not False:\n if is_categorical_dtype(meta.index):\n index = not has_known_categories(meta.index)\n elif index is None:\n index = meta.index.dtype == object\n\n # Nothing to do\n if not len(columns) and index is False:\n return df\n\n if split_every is None:\n split_every = 16\n elif split_every is False:\n split_every = df.npartitions\n elif not isinstance(split_every, Integral) or split_every < 2:\n raise ValueError(\"split_every must be an integer >= 2\")\n\n token = tokenize(df, columns, index, split_every)\n a = 'get-categories-chunk-' + token\n dsk = {(a, i): (_get_categories, key, columns, index)\n for (i, key) in enumerate(df.__dask_keys__())}\n\n prefix = 'get-categories-agg-' + token\n k = df.npartitions\n depth = 0\n while k > split_every:\n b = prefix + str(depth)\n for part_i, inds in enumerate(partition_all(split_every, range(k))):\n dsk[(b, part_i)] = (_get_categories_agg, [(a, i) for i in inds])\n k = part_i + 1\n a = b\n depth += 1\n\n dsk[(prefix, 0)] = (_get_categories_agg, [(a, i) for i in range(k)])\n dsk.update(df.dask)\n\n # Compute the categories\n categories, index = compute_as_if_collection(type(df), dsk, (prefix, 0),\n **kwargs)\n\n # Categorize each partition\n return df.map_partitions(_categorize_block, categories, index)\n\n\nclass CategoricalAccessor(Accessor):\n \"\"\"\n Accessor object for categorical properties of the Series values.\n\n Examples\n --------\n >>> s.cat.categories # doctest: +SKIP\n\n Notes\n -----\n Attributes that depend only on metadata are eager\n\n * categories\n * ordered\n\n Attributes depending on the entire dataset are lazy\n\n * codes\n * ...\n\n So `df.a.cat.categories` <=> `df.a._meta.cat.categories`\n So `df.a.cat.codes` <=> `df.a.map_partitions(lambda x: x.cat.codes)`\n \"\"\"\n _accessor = pd.Series.cat\n _accessor_name = 'cat'\n\n def _validate(self, series):\n if not is_categorical_dtype(series.dtype):\n raise AttributeError(\"Can only use .cat accessor with a \"\n \"'category' dtype\")\n\n @property\n def known(self):\n \"\"\"Whether the categories are fully known\"\"\"\n return has_known_categories(self._series)\n\n def as_known(self, **kwargs):\n \"\"\"Ensure the categories in this series are known.\n\n If the categories are known, this is a no-op. If unknown, the\n categories are computed, and a new series with known categories is\n returned.\n\n Parameters\n ----------\n kwargs\n Keywords to pass on to the call to `compute`.\n \"\"\"\n if self.known:\n return self._series\n categories = self._property_map('categories').unique().compute(**kwargs)\n return self.set_categories(categories.values)\n\n def as_unknown(self):\n \"\"\"Ensure the categories in this series are unknown\"\"\"\n if not self.known:\n return self._series\n out = self._series.copy()\n out._meta = clear_known_categories(out._meta)\n return out\n\n @property\n def ordered(self):\n return self._delegate_property(self._series._meta, 'cat', 'ordered')\n\n @property\n def categories(self):\n \"\"\"The categories of this categorical.\n\n If categories are unknown, an error is raised\"\"\"\n if not self.known:\n msg = (\"`df.column.cat.categories` with unknown categories is not \"\n \"supported. Please use `column.cat.as_known()` or \"\n \"`df.categorize()` beforehand to ensure known categories\")\n raise NotImplementedError(msg)\n return self._delegate_property(self._series._meta, 'cat', 'categories')\n\n @property\n def codes(self):\n \"\"\"The codes of this categorical.\n\n If categories are unknown, an error is raised\"\"\"\n if not self.known:\n msg = (\"`df.column.cat.codes` with unknown categories is not \"\n \"supported. Please use `column.cat.as_known()` or \"\n \"`df.categorize()` beforehand to ensure known categories\")\n raise NotImplementedError(msg)\n return self._property_map('codes')\n\n def remove_unused_categories(self):\n \"\"\"\n Removes categories which are not used\n\n Notes\n -----\n This method requires a full scan of the data to compute the\n unique values, which can be expensive.\n \"\"\"\n # get the set of used categories\n present = self._series.dropna().unique()\n present = pd.Index(present.compute())\n\n if isinstance(self._series._meta, pd.CategoricalIndex):\n meta_cat = self._series._meta\n else:\n meta_cat = self._series._meta.cat\n\n # Reorder to keep cat:code relationship, filtering unused (-1)\n ordered, mask = present.reindex(meta_cat.categories)\n if mask is None:\n # PANDAS-23963: old and new categories match.\n return self._series\n\n new_categories = ordered[mask != -1]\n meta = meta_cat.set_categories(new_categories, ordered=meta_cat.ordered)\n return self._series.map_partitions(self._delegate_method, 'cat',\n 'set_categories', (),\n {'new_categories': new_categories},\n meta=meta,\n token='cat-set_categories')\n" ]
[ [ "pandas.util.testing.makeTimeDataFrame", "numpy.testing.assert_equal", "pandas.util.testing.makeMissingDataframe", "pandas.Series", "pandas.util.testing.makeTimeSeries", "pandas.util.testing.makeMixedDataFrame", "pandas.Index", "pandas.util.testing.assert_series_equal", "pandas.DataFrame", "pandas.date_range", "pandas.util.testing.makeTimedeltaIndex" ], [ "pandas.concat", "pandas.Series", "pandas.Timedelta" ], [ "pandas.Categorical", "pandas.concat", "pandas.Series" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "1.4", "1.3", "0.19", "1.1", "1.5", "0.24", "0.20", "1.0", "0.25", "1.2" ], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "1.3", "0.19", "1.1", "1.5", "0.24", "0.20", "1.0", "0.25", "1.2" ], "scipy": [], "tensorflow": [] } ]
alechfho/dog_breed
[ "2e2f7083c859fdb250f5ba920246b9d2f8168b4d" ]
[ "dataset_processing.py" ]
[ "import numpy as np\nimport pandas as pd\n\n\ndef partition_images(df_labels, identifier_label=None, label_postfix='postfix', target_dir='./', filter_identity=[],\n dev_portion=0.20, encoding_strategy='vgg19_4096'):\n if np.size(filter_identity) == 0:\n filter_identity = df_labels[identifier_label].unique()\n\n df_filter_labels = df_labels[df_labels.breed.isin(filter_identity)]\n df_filter_identifier_label_count = df_filter_labels.groupby([identifier_label]).agg(['count'])\n df_filter_identifier_label_count['dev_count'] = np.ceil(\n df_filter_identifier_label_count[df_filter_identifier_label_count.columns[0]] * dev_portion).astype(int)\n\n df_result_train = pd.DataFrame()\n df_result_dev = pd.DataFrame()\n\n for ident_label, row in df_filter_identifier_label_count.iterrows():\n total = row[0]\n dev_count = row[1]\n train_count = total - dev_count\n df_train, df_dev = filter_images_by_label(df_filter_labels, ident_label, train_count, dev_count)\n df_result_train = df_result_train.append(df_train)\n df_result_dev = df_result_dev.append(df_dev)\n\n train_label = '{target_dir}/labels_train_{label_postfix}.csv'.format(target_dir=target_dir,\n label_postfix=label_postfix)\n dev_label = '{target_dir}/labels_dev_{label_postfix}.csv'.format(target_dir=target_dir, label_postfix=label_postfix)\n\n print('Split into training and dev sets')\n print('Training set in ' + train_label)\n print(df_result_train.groupby([identifier_label]).agg(['count']))\n print('Dev set in ' + dev_label)\n print(df_result_dev.groupby([identifier_label]).agg(['count']))\n\n df_result_train.to_csv(train_label, index=False)\n df_result_dev.to_csv(dev_label, index=False)\n return\n\n\ndef filter_images_by_label(df_labels, label, train_count, dev_count):\n df_selected_label = df_labels[df_labels.breed.isin([label])]\n df_selected_label_train = df_selected_label.head(train_count)\n df_selected_label_vaidation = df_selected_label.tail(dev_count)\n return df_selected_label_train, df_selected_label_vaidation\n" ]
[ [ "numpy.ceil", "numpy.size", "pandas.DataFrame" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] } ]
ebothmann/heppyplot
[ "dab969879391f70a91c34f71482a9691b9c80141" ]
[ "heppyplot/plot_helpers.py" ]
[ "import math\n\nimport matplotlib.pyplot as plt\nimport matplotlib.gridspec as gridspec\nimport matplotlib.transforms as mtransforms\nfrom mpl_toolkits.axes_grid.anchored_artists import AnchoredText\n\ndef setup_axes(diff=False):\n fig = plt.figure()\n axes = []\n if diff:\n gs = gridspec.GridSpec(2, 1, height_ratios=[2,1])\n main_axis = plt.subplot(gs[0])\n axes.append(plt.subplot(gs[0]))\n axes.append(plt.subplot(gs[1], sharex=main_axis))\n else:\n axes.append(plt.subplot())\n return fig, axes\n\ndef layout_main_and_diff_axis(fig, axes):\n main_axis, diff_axis = axes\n fig.subplots_adjust(hspace=0.0)\n main_axis.spines['bottom'].set_visible(False)\n plt.setp(main_axis.get_xticklabels(), visible=False)\n main_axis.set_xlabel('')\n diff_axis.xaxis.tick_bottom()\n\ndef configure_legend_on_axis(axis, title='', loc='best', borderpad=1.2, draws_background=True):\n legend = axis.legend(loc=loc,\n title=title,\n borderaxespad=borderpad,\n framealpha=0.8,\n frameon=draws_background,\n fancybox=draws_background)\n legend.get_frame().set_color((0.96,0.96,0.96))\n for line in legend.get_lines():\n line.set_alpha(1.0)\n\ndef add_annotation_on_axis(axis, annotation, loc='upper right', borderpad=1.2):\n codes = {'upper right': 1, 'upper left': 2, 'lower left': 3, 'lower right': 4,\n 'right': 5, 'center left': 6,'center right': 7,\n 'lower center': 8, 'upper center': 9, 'center': 10}\n at = AnchoredText(annotation,\n codes[loc],\n frameon=False,\n borderpad=borderpad,\n prop=dict(linespacing=2.5))\n axis.add_artist(at)\n\ndef get_major_ticks_within_view_interval(axis):\n interval = axis.get_view_interval()\n ticks_in_view_interval = []\n for tick, loc in zip(axis.get_major_ticks(),\n axis.get_major_locator()()):\n if mtransforms.interval_contains(interval, loc):\n ticks_in_view_interval.append(tick)\n return ticks_in_view_interval\n\ndef set_figure_size_with_width(width):\n params = {'figure.figsize': figure_size_from_width(width)}\n plt.rcParams.update(params)\n\ndef figure_size_from_width(width):\n \"\"\"Returns a single plot figure size in inches given a width in points\"\"\"\n inches_per_point = 1.0/72.27\n golden_mean = (math.sqrt(5)-1.0)/2.0\n inches_width = width * inches_per_point\n fig_height = inches_width*golden_mean\n return [inches_width,fig_height]\n" ]
[ [ "matplotlib.pyplot.subplot", "matplotlib.gridspec.GridSpec", "matplotlib.pyplot.rcParams.update", "matplotlib.transforms.interval_contains", "matplotlib.pyplot.figure" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
SeanNaren/transformers
[ "8d43c71a1ca3ad322cc45008eb66a5611f1e017e", "8d43c71a1ca3ad322cc45008eb66a5611f1e017e" ]
[ "examples/tensorflow/text-classification/run_text_classification.py", "src/transformers/models/speech_to_text/modeling_speech_to_text.py" ]
[ "#!/usr/bin/env python\n# coding=utf-8\n# Copyright 2021 The HuggingFace Inc. team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\" Fine-tuning the library models for sequence classification.\"\"\"\n# You can also adapt this script on your own text classification task. Pointers for this are left as comments.\n\nimport logging\nimport os\nimport random\nimport sys\nfrom dataclasses import dataclass, field\nfrom math import ceil\nfrom pathlib import Path\nfrom typing import Optional\n\nimport numpy as np\nfrom datasets import load_dataset\n\nfrom transformers import (\n AutoConfig,\n AutoTokenizer,\n HfArgumentParser,\n PretrainedConfig,\n TFAutoModelForSequenceClassification,\n TrainingArguments,\n set_seed,\n)\nfrom transformers.file_utils import CONFIG_NAME, TF2_WEIGHTS_NAME\n\n\nos.environ[\"TF_CPP_MIN_LOG_LEVEL\"] = \"1\" # Reduce the amount of console output from TF\nimport tensorflow as tf # noqa: E402\n\n\nlogger = logging.getLogger(__name__)\n\n\n# region Helper classes\nclass DataSequence(tf.keras.utils.Sequence):\n # We use a Sequence object to load the data. Although it's completely possible to load your data as Numpy/TF arrays\n # and pass those straight to the Model, this constrains you in a couple of ways. Most notably, it requires all\n # the data to be padded to the length of the longest input example, and it also requires the whole dataset to be\n # loaded into memory. If these aren't major problems for you, you can skip the sequence object in your own code!\n def __init__(self, dataset, non_label_column_names, batch_size, labels, shuffle=True):\n super().__init__()\n # Retain all of the columns not present in the original data - these are the ones added by the tokenizer\n self.data = {\n key: dataset[key]\n for key in dataset.features.keys()\n if key not in non_label_column_names and key != \"label\"\n }\n data_lengths = {len(array) for array in self.data.values()}\n assert len(data_lengths) == 1, \"Dataset arrays differ in length!\"\n self.data_length = data_lengths.pop()\n self.num_batches = ceil(self.data_length / batch_size)\n if labels:\n self.labels = np.array(dataset[\"label\"])\n assert len(self.labels) == self.data_length, \"Labels not the same length as input arrays!\"\n else:\n self.labels = None\n self.batch_size = batch_size\n self.shuffle = shuffle\n if self.shuffle:\n # Shuffle the data order\n self.permutation = np.random.permutation(self.data_length)\n else:\n self.permutation = None\n\n def on_epoch_end(self):\n # If we're shuffling, reshuffle the data order after each epoch\n if self.shuffle:\n self.permutation = np.random.permutation(self.data_length)\n\n def __getitem__(self, item):\n # Note that this yields a batch, not a single sample\n batch_start = item * self.batch_size\n batch_end = (item + 1) * self.batch_size\n if self.shuffle:\n data_indices = self.permutation[batch_start:batch_end]\n else:\n data_indices = np.arange(batch_start, batch_end)\n # We want to pad the data as little as possible, so we only pad each batch\n # to the maximum length within that batch. We do that by stacking the variable-\n # length inputs into a ragged tensor and then densifying it.\n batch_input = {\n key: tf.ragged.constant([data[i] for i in data_indices]).to_tensor() for key, data in self.data.items()\n }\n if self.labels is None:\n return batch_input\n else:\n batch_labels = self.labels[data_indices]\n return batch_input, batch_labels\n\n def __len__(self):\n return self.num_batches\n\n\nclass SavePretrainedCallback(tf.keras.callbacks.Callback):\n # Hugging Face models have a save_pretrained() method that saves both the weights and the necessary\n # metadata to allow them to be loaded as a pretrained model in future. This is a simple Keras callback\n # that saves the model with this method after each epoch.\n def __init__(self, output_dir, **kwargs):\n super().__init__()\n self.output_dir = output_dir\n\n def on_epoch_end(self, epoch, logs=None):\n self.model.save_pretrained(self.output_dir)\n\n\n# endregion\n\n# region Command-line arguments\n@dataclass\nclass DataTrainingArguments:\n \"\"\"\n Arguments pertaining to what data we are going to input our model for training and eval.\n\n Using `HfArgumentParser` we can turn this class\n into argparse arguments to be able to specify them on\n the command line.\n \"\"\"\n\n train_file: Optional[str] = field(\n default=None, metadata={\"help\": \"A csv or a json file containing the training data.\"}\n )\n validation_file: Optional[str] = field(\n default=None, metadata={\"help\": \"A csv or a json file containing the validation data.\"}\n )\n test_file: Optional[str] = field(default=None, metadata={\"help\": \"A csv or a json file containing the test data.\"})\n\n max_seq_length: int = field(\n default=128,\n metadata={\n \"help\": \"The maximum total input sequence length after tokenization. Sequences longer \"\n \"than this will be truncated, sequences shorter will be padded.\"\n },\n )\n overwrite_cache: bool = field(\n default=False, metadata={\"help\": \"Overwrite the cached preprocessed datasets or not.\"}\n )\n pad_to_max_length: bool = field(\n default=False,\n metadata={\n \"help\": \"Whether to pad all samples to `max_seq_length`. \"\n \"If False, will pad the samples dynamically when batching to the maximum length in the batch.\"\n },\n )\n max_train_samples: Optional[int] = field(\n default=None,\n metadata={\n \"help\": \"For debugging purposes or quicker training, truncate the number of training examples to this \"\n \"value if set.\"\n },\n )\n max_eval_samples: Optional[int] = field(\n default=None,\n metadata={\n \"help\": \"For debugging purposes or quicker training, truncate the number of evaluation examples to this \"\n \"value if set.\"\n },\n )\n max_predict_samples: Optional[int] = field(\n default=None,\n metadata={\n \"help\": \"For debugging purposes or quicker training, truncate the number of predict examples to this \"\n \"value if set.\"\n },\n )\n\n def __post_init__(self):\n train_extension = self.train_file.split(\".\")[-1].lower() if self.train_file is not None else None\n validation_extension = (\n self.validation_file.split(\".\")[-1].lower() if self.validation_file is not None else None\n )\n test_extension = self.test_file.split(\".\")[-1].lower() if self.test_file is not None else None\n extensions = {train_extension, validation_extension, test_extension}\n extensions.discard(None)\n assert len(extensions) != 0, \"Need to supply at least one of --train_file, --validation_file or --test_file!\"\n assert len(extensions) == 1, \"All input files should have the same file extension, either csv or json!\"\n assert \"csv\" in extensions or \"json\" in extensions, \"Input files should have either .csv or .json extensions!\"\n self.input_file_extension = extensions.pop()\n\n\n@dataclass\nclass ModelArguments:\n \"\"\"\n Arguments pertaining to which model/config/tokenizer we are going to fine-tune from.\n \"\"\"\n\n model_name_or_path: str = field(\n metadata={\"help\": \"Path to pretrained model or model identifier from huggingface.co/models\"}\n )\n config_name: Optional[str] = field(\n default=None, metadata={\"help\": \"Pretrained config name or path if not the same as model_name\"}\n )\n tokenizer_name: Optional[str] = field(\n default=None, metadata={\"help\": \"Pretrained tokenizer name or path if not the same as model_name\"}\n )\n cache_dir: Optional[str] = field(\n default=None,\n metadata={\"help\": \"Where do you want to store the pretrained models downloaded from huggingface.co\"},\n )\n model_revision: str = field(\n default=\"main\",\n metadata={\"help\": \"The specific model version to use (can be a branch name, tag name or commit id).\"},\n )\n use_auth_token: bool = field(\n default=False,\n metadata={\n \"help\": \"Will use the token generated when running `transformers-cli login` (necessary to use this script \"\n \"with private models).\"\n },\n )\n\n\n# endregion\n\n\ndef main():\n # region Argument parsing\n # See all possible arguments in src/transformers/training_args.py\n # or by passing the --help flag to this script.\n # We now keep distinct sets of args, for a cleaner separation of concerns.\n\n parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))\n if len(sys.argv) == 2 and sys.argv[1].endswith(\".json\"):\n # If we pass only one argument to the script and it's the path to a json file,\n # let's parse it to get our arguments.\n model_args, data_args, training_args = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))\n else:\n model_args, data_args, training_args = parser.parse_args_into_dataclasses()\n output_dir = Path(training_args.output_dir)\n output_dir.mkdir(parents=True, exist_ok=True)\n # endregion\n\n # region Checkpoints\n # Detecting last checkpoint.\n checkpoint = None\n if len(os.listdir(training_args.output_dir)) > 0 and not training_args.overwrite_output_dir:\n if (output_dir / CONFIG_NAME).is_file() and (output_dir / TF2_WEIGHTS_NAME).is_file():\n checkpoint = output_dir\n logger.info(\n f\"Checkpoint detected, resuming training from checkpoint in {training_args.output_dir}. To avoid this\"\n \" behavior, change the `--output_dir` or add `--overwrite_output_dir` to train from scratch.\"\n )\n else:\n raise ValueError(\n f\"Output directory ({training_args.output_dir}) already exists and is not empty. \"\n \"Use --overwrite_output_dir to continue regardless.\"\n )\n\n # endregion\n\n # region Logging\n logging.basicConfig(\n format=\"%(asctime)s - %(levelname)s - %(name)s - %(message)s\",\n datefmt=\"%m/%d/%Y %H:%M:%S\",\n handlers=[logging.StreamHandler(sys.stdout)],\n )\n logger.setLevel(logging.INFO)\n\n logger.info(f\"Training/evaluation parameters {training_args}\")\n # endregion\n\n # region Loading data\n # For CSV/JSON files, this script will use the 'label' field as the label and the 'sentence1' and optionally\n # 'sentence2' fields as inputs if they exist. If not, the first two fields not named label are used if at least two\n # columns are provided. Note that the term 'sentence' can be slightly misleading, as they often contain more than\n # a single grammatical sentence, when the task requires it.\n #\n # If the CSVs/JSONs contain only one non-label column, the script does single sentence classification on this\n # single column. You can easily tweak this behavior (see below)\n #\n # In distributed training, the load_dataset function guarantee that only one local process can concurrently\n # download the dataset.\n data_files = {\"train\": data_args.train_file, \"validation\": data_args.validation_file, \"test\": data_args.test_file}\n data_files = {key: file for key, file in data_files.items() if file is not None}\n\n for key in data_files.keys():\n logger.info(f\"Loading a local file for {key}: {data_files[key]}\")\n\n if data_args.input_file_extension == \"csv\":\n # Loading a dataset from local csv files\n datasets = load_dataset(\"csv\", data_files=data_files, cache_dir=model_args.cache_dir)\n else:\n # Loading a dataset from local json files\n datasets = load_dataset(\"json\", data_files=data_files, cache_dir=model_args.cache_dir)\n # See more about loading any type of standard or custom dataset at\n # https://huggingface.co/docs/datasets/loading_datasets.html.\n # endregion\n\n # region Label preprocessing\n # If you've passed us a training set, we try to infer your labels from it\n if \"train\" in datasets:\n # By default we assume that if your label column looks like a float then you're doing regression,\n # and if not then you're doing classification. This is something you may want to change!\n is_regression = datasets[\"train\"].features[\"label\"].dtype in [\"float32\", \"float64\"]\n if is_regression:\n num_labels = 1\n else:\n # A useful fast method:\n # https://huggingface.co/docs/datasets/package_reference/main_classes.html#datasets.Dataset.unique\n label_list = datasets[\"train\"].unique(\"label\")\n label_list.sort() # Let's sort it for determinism\n num_labels = len(label_list)\n # If you haven't passed a training set, we read label info from the saved model (this happens later)\n else:\n num_labels = None\n label_list = None\n is_regression = None\n # endregion\n\n # region Load pretrained model and tokenizer\n # Set seed before initializing model\n set_seed(training_args.seed)\n #\n # In distributed training, the .from_pretrained methods guarantee that only one local process can concurrently\n # download model & vocab.\n if checkpoint is not None:\n config_path = training_args.output_dir\n elif model_args.config_name:\n config_path = model_args.config_name\n else:\n config_path = model_args.model_name_or_path\n if num_labels is not None:\n config = AutoConfig.from_pretrained(\n config_path,\n num_labels=num_labels,\n cache_dir=model_args.cache_dir,\n revision=model_args.model_revision,\n use_auth_token=True if model_args.use_auth_token else None,\n )\n else:\n config = AutoConfig.from_pretrained(\n config_path,\n cache_dir=model_args.cache_dir,\n revision=model_args.model_revision,\n use_auth_token=True if model_args.use_auth_token else None,\n )\n tokenizer = AutoTokenizer.from_pretrained(\n model_args.tokenizer_name if model_args.tokenizer_name else model_args.model_name_or_path,\n cache_dir=model_args.cache_dir,\n revision=model_args.model_revision,\n use_auth_token=True if model_args.use_auth_token else None,\n )\n if checkpoint is None:\n model_path = model_args.model_name_or_path\n else:\n model_path = checkpoint\n model = TFAutoModelForSequenceClassification.from_pretrained(\n model_path,\n config=config,\n cache_dir=model_args.cache_dir,\n revision=model_args.model_revision,\n use_auth_token=True if model_args.use_auth_token else None,\n )\n # endregion\n\n # region Optimizer, loss and compilation\n optimizer = tf.keras.optimizers.Adam(\n learning_rate=training_args.learning_rate,\n beta_1=training_args.adam_beta1,\n beta_2=training_args.adam_beta2,\n epsilon=training_args.adam_epsilon,\n clipnorm=training_args.max_grad_norm,\n )\n if is_regression:\n loss = tf.keras.losses.MeanSquaredError()\n metrics = []\n else:\n loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)\n metrics = [\"accuracy\"]\n model.compile(optimizer=optimizer, loss=loss, metrics=metrics)\n # endregion\n\n # region Dataset preprocessing\n # Again, we try to have some nice defaults but don't hesitate to tweak to your use case.\n column_names = {col for cols in datasets.column_names.values() for col in cols}\n non_label_column_names = [name for name in column_names if name != \"label\"]\n if \"sentence1\" in non_label_column_names and \"sentence2\" in non_label_column_names:\n sentence1_key, sentence2_key = \"sentence1\", \"sentence2\"\n elif \"sentence1\" in non_label_column_names:\n sentence1_key, sentence2_key = \"sentence1\", None\n else:\n if len(non_label_column_names) >= 2:\n sentence1_key, sentence2_key = non_label_column_names[:2]\n else:\n sentence1_key, sentence2_key = non_label_column_names[0], None\n\n # Padding strategy\n if data_args.pad_to_max_length:\n padding = \"max_length\"\n else:\n # We will pad later, dynamically at batch creation, to the max sequence length in each batch\n padding = False\n\n if data_args.max_seq_length > tokenizer.model_max_length:\n logger.warning(\n f\"The max_seq_length passed ({data_args.max_seq_length}) is larger than the maximum length for the\"\n f\"model ({tokenizer.model_max_length}). Using max_seq_length={tokenizer.model_max_length}.\"\n )\n max_seq_length = min(data_args.max_seq_length, tokenizer.model_max_length)\n\n # Ensure that our labels match the model's, if it has some pre-specified\n if \"train\" in datasets:\n if not is_regression and model.config.label2id != PretrainedConfig(num_labels=num_labels).label2id:\n label_name_to_id = model.config.label2id\n if list(sorted(label_name_to_id.keys())) == list(sorted(label_list)):\n label_to_id = label_name_to_id # Use the model's labels\n else:\n logger.warning(\n \"Your model seems to have been trained with labels, but they don't match the dataset: \",\n f\"model labels: {list(sorted(label_name_to_id.keys()))}, dataset labels: {list(sorted(label_list))}.\"\n \"\\nIgnoring the model labels as a result.\",\n )\n label_to_id = {v: i for i, v in enumerate(label_list)}\n elif not is_regression:\n label_to_id = {v: i for i, v in enumerate(label_list)}\n else:\n label_to_id = None\n # Now we've established our label2id, let's overwrite the model config with it.\n model.config.label2id = label_to_id\n if model.config.label2id is not None:\n model.config.id2label = {id: label for label, id in label_to_id.items()}\n else:\n model.config.id2label = None\n else:\n label_to_id = model.config.label2id # Just load the data from the model\n\n if \"validation\" in datasets and model.config.label2id is not None:\n validation_label_list = datasets[\"validation\"].unique(\"label\")\n for val_label in validation_label_list:\n assert val_label in label_to_id, f\"Label {val_label} is in the validation set but not the training set!\"\n\n def preprocess_function(examples):\n # Tokenize the texts\n args = (\n (examples[sentence1_key],) if sentence2_key is None else (examples[sentence1_key], examples[sentence2_key])\n )\n result = tokenizer(*args, padding=padding, max_length=max_seq_length, truncation=True)\n\n # Map labels to IDs\n if model.config.label2id is not None and \"label\" in examples:\n result[\"label\"] = [(model.config.label2id[l] if l != -1 else -1) for l in examples[\"label\"]]\n return result\n\n datasets = datasets.map(preprocess_function, batched=True, load_from_cache_file=not data_args.overwrite_cache)\n\n if \"train\" in datasets:\n train_dataset = datasets[\"train\"]\n if data_args.max_train_samples is not None:\n train_dataset = train_dataset.select(range(data_args.max_train_samples))\n # Log a few random samples from the training set so we can see that it's working as expected:\n for index in random.sample(range(len(train_dataset)), 3):\n logger.info(f\"Sample {index} of the training set: {train_dataset[index]}.\")\n\n if \"validation\" in datasets:\n eval_dataset = datasets[\"validation\"]\n if data_args.max_eval_samples is not None:\n eval_dataset = eval_dataset.select(range(data_args.max_eval_samples))\n\n if \"test\" in datasets:\n predict_dataset = datasets[\"test\"]\n if data_args.max_predict_samples is not None:\n predict_dataset = predict_dataset.select(range(data_args.max_predict_samples))\n\n # endregion\n\n # region Training\n if \"train\" in datasets:\n training_dataset = DataSequence(\n train_dataset, non_label_column_names, batch_size=training_args.per_device_train_batch_size, labels=True\n )\n if \"validation\" in datasets:\n eval_dataset = DataSequence(\n eval_dataset, non_label_column_names, batch_size=training_args.per_device_eval_batch_size, labels=True\n )\n else:\n eval_dataset = None\n\n callbacks = [SavePretrainedCallback(output_dir=training_args.output_dir)]\n model.fit(\n training_dataset,\n validation_data=eval_dataset,\n epochs=int(training_args.num_train_epochs),\n callbacks=callbacks,\n )\n elif \"validation\" in datasets:\n # If there's a validation dataset but no training set, just evaluate the metrics\n eval_dataset = DataSequence(\n eval_dataset, non_label_column_names, batch_size=training_args.per_device_eval_batch_size, labels=True\n )\n logger.info(\"Computing metrics on validation data...\")\n if is_regression:\n loss = model.evaluate(eval_dataset)\n logger.info(f\"Loss: {loss:.5f}\")\n else:\n loss, accuracy = model.evaluate(eval_dataset)\n logger.info(f\"Loss: {loss:.5f}, Accuracy: {accuracy * 100:.4f}%\")\n # endregion\n\n # region Prediction\n if \"test\" in datasets:\n logger.info(\"Doing predictions on Predict dataset...\")\n\n predict_dataset = DataSequence(\n predict_dataset, non_label_column_names, batch_size=training_args.per_device_eval_batch_size, labels=False\n )\n predictions = model.predict(predict_dataset)[\"logits\"]\n predictions = np.squeeze(predictions) if is_regression else np.argmax(predictions, axis=1)\n output_predict_file = os.path.join(training_args.output_dir, \"predict_results.txt\")\n with open(output_predict_file, \"w\") as writer:\n writer.write(\"index\\tprediction\\n\")\n for index, item in enumerate(predictions):\n if is_regression:\n writer.write(f\"{index}\\t{item:3.3f}\\n\")\n else:\n item = model.config.id2label[item]\n writer.write(f\"{index}\\t{item}\\n\")\n logger.info(f\"Wrote predictions to {output_predict_file}!\")\n # endregion\n\n\nif __name__ == \"__main__\":\n main()\n", "# coding=utf-8\n# Copyright 2021 The Fairseq Authors and The HuggingFace Inc. team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\" PyTorch Speech2Text model. \"\"\"\n\n\nimport math\nimport random\nfrom typing import Optional, Tuple\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\nfrom torch.nn import CrossEntropyLoss\n\nfrom ...activations import ACT2FN\nfrom ...file_utils import (\n add_code_sample_docstrings,\n add_start_docstrings,\n add_start_docstrings_to_model_forward,\n replace_return_docstrings,\n)\nfrom ...modeling_outputs import (\n BaseModelOutput,\n BaseModelOutputWithPastAndCrossAttentions,\n Seq2SeqLMOutput,\n Seq2SeqModelOutput,\n)\nfrom ...modeling_utils import PreTrainedModel\nfrom ...utils import logging\nfrom .configuration_speech_to_text import Speech2TextConfig\n\n\nlogger = logging.get_logger(__name__)\n\n_CONFIG_FOR_DOC = \"Speech2TextConfig\"\n_TOKENIZER_FOR_DOC = \"Speech2TextTokenizer\"\n\n\nSPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST = [\n \"facebook/s2t-small-librispeech-asr\",\n # See all Speech2Text models at https://huggingface.co/models?filter=speech_to_text\n]\n\n\n# Copied from transformers.models.bart.modeling_bart.shift_tokens_right\ndef shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int):\n \"\"\"\n Shift input ids one token to the right.\n \"\"\"\n shifted_input_ids = input_ids.new_zeros(input_ids.shape)\n shifted_input_ids[:, 1:] = input_ids[:, :-1].clone()\n shifted_input_ids[:, 0] = decoder_start_token_id\n\n assert pad_token_id is not None, \"self.model.config.pad_token_id has to be defined.\"\n # replace possible -100 values in labels by `pad_token_id`\n shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id)\n\n return shifted_input_ids\n\n\n# Copied from transformers.models.bart.modeling_bart._make_causal_mask\ndef _make_causal_mask(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0):\n \"\"\"\n Make causal mask used for bi-directional self-attention.\n \"\"\"\n bsz, tgt_len = input_ids_shape\n mask = torch.full((tgt_len, tgt_len), float(\"-inf\"))\n mask_cond = torch.arange(mask.size(-1))\n mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0)\n mask = mask.to(dtype)\n\n if past_key_values_length > 0:\n mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], dim=-1)\n return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length)\n\n\n# Copied from transformers.models.bart.modeling_bart._expand_mask\ndef _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None):\n \"\"\"\n Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.\n \"\"\"\n bsz, src_len = mask.size()\n tgt_len = tgt_len if tgt_len is not None else src_len\n\n expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype)\n\n inverted_mask = 1.0 - expanded_mask\n\n return inverted_mask.masked_fill(inverted_mask.bool(), torch.finfo(dtype).min)\n\n\nclass Conv1dSubsampler(nn.Module):\n \"\"\"\n Convolutional subsampler: a stack of 1D convolution (along temporal dimension) followed by non-linear activation\n via gated linear units (https://arxiv.org/abs/1911.08460)\n \"\"\"\n\n def __init__(self, config):\n super(Conv1dSubsampler, self).__init__()\n self.config = config\n self.num_layers = config.num_conv_layers\n self.in_channels = config.input_feat_per_channel * config.input_channels\n self.mid_channels = config.conv_channels\n self.out_channels = config.d_model\n self.kernel_sizes = config.conv_kernel_sizes\n\n self.conv_layers = nn.ModuleList(\n nn.Conv1d(\n self.in_channels if i == 0 else self.mid_channels // 2,\n self.mid_channels if i < self.num_layers - 1 else self.out_channels * 2,\n kernel_size=k,\n stride=2,\n padding=k // 2,\n )\n for i, k in enumerate(self.kernel_sizes)\n )\n\n def forward(self, input_features):\n hidden_states = input_features.transpose(1, 2).contiguous() # -> B x (C x D) x T\n for conv in self.conv_layers:\n hidden_states = conv(hidden_states)\n hidden_states = nn.functional.glu(hidden_states, dim=1)\n hidden_states = hidden_states.transpose(1, 2).contiguous() # -> T x B x (C x D)\n return hidden_states\n\n\nclass Speech2TextSinusoidalPositionalEmbedding(nn.Module):\n \"\"\"This module produces sinusoidal positional embeddings of any length.\"\"\"\n\n def __init__(self, num_positions: int, embedding_dim: int, padding_idx: Optional[int] = None):\n super().__init__()\n self.offset = 2\n self.embedding_dim = embedding_dim\n self.padding_idx = padding_idx\n self.make_weights(num_positions + self.offset, embedding_dim, padding_idx)\n\n def make_weights(self, num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None):\n emb_weights = self.get_embedding(num_embeddings, embedding_dim, padding_idx)\n if hasattr(self, \"weights\"):\n # in forward, put the weights on correct device\n emb_weights = emb_weights.to(self.weights.device)\n\n self.weights = nn.Parameter(emb_weights)\n self.weights.requires_grad = False\n self.weights.detach_()\n\n @staticmethod\n def get_embedding(num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None):\n \"\"\"\n Build sinusoidal embeddings. This matches the implementation in tensor2tensor, but differs slightly from the\n description in Section 3.5 of \"Attention Is All You Need\".\n \"\"\"\n half_dim = embedding_dim // 2\n emb = math.log(10000) / (half_dim - 1)\n emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb)\n emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(1) * emb.unsqueeze(0)\n emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(num_embeddings, -1)\n if embedding_dim % 2 == 1:\n # zero pad\n emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1)\n if padding_idx is not None:\n emb[padding_idx, :] = 0\n return emb\n\n @torch.no_grad()\n def forward(self, input_ids: torch.Tensor, past_key_values_length: int = 0):\n bsz, seq_len = input_ids.size()\n # Create the position ids from the input token ids. Any padded tokens remain padded.\n position_ids = self.create_position_ids_from_input_ids(input_ids, self.padding_idx, past_key_values_length).to(\n input_ids.device\n )\n\n # expand embeddings if needed\n max_pos = self.padding_idx + 1 + seq_len\n if max_pos > self.weights.size(0):\n self.make_weights(max_pos + self.offset, self.embedding_dim, self.padding_idx)\n\n return self.weights.index_select(0, position_ids.view(-1)).view(bsz, seq_len, -1).detach()\n\n def create_position_ids_from_input_ids(\n self, input_ids: torch.Tensor, padding_idx: int, past_key_values_length: Optional[int] = 0\n ):\n \"\"\"\n Replace non-padding symbols with their position numbers. Position numbers begin at padding_idx+1. Padding\n symbols are ignored. This is modified from fairseq's `utils.make_positions`.\n\n Args:\n x: torch.Tensor x:\n Returns: torch.Tensor\n \"\"\"\n # The series of casts and type-conversions here are carefully balanced to both work with ONNX export and XLA.\n mask = input_ids.ne(padding_idx).int()\n incremental_indices = (torch.cumsum(mask, dim=1).type_as(mask) + past_key_values_length) * mask\n return incremental_indices.long() + padding_idx\n\n\n# Copied from transformers.models.bart.modeling_bart.BartAttention with Bart->Speech2Text\nclass Speech2TextAttention(nn.Module):\n \"\"\"Multi-headed attention from 'Attention Is All You Need' paper\"\"\"\n\n def __init__(\n self,\n embed_dim: int,\n num_heads: int,\n dropout: float = 0.0,\n is_decoder: bool = False,\n bias: bool = True,\n ):\n super().__init__()\n self.embed_dim = embed_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.head_dim = embed_dim // num_heads\n assert (\n self.head_dim * num_heads == self.embed_dim\n ), f\"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim} and `num_heads`: {num_heads}).\"\n self.scaling = self.head_dim ** -0.5\n self.is_decoder = is_decoder\n\n self.k_proj = nn.Linear(embed_dim, embed_dim, bias=bias)\n self.v_proj = nn.Linear(embed_dim, embed_dim, bias=bias)\n self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias)\n self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias)\n\n def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int):\n return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous()\n\n def forward(\n self,\n hidden_states: torch.Tensor,\n key_value_states: Optional[torch.Tensor] = None,\n past_key_value: Optional[Tuple[torch.Tensor]] = None,\n attention_mask: Optional[torch.Tensor] = None,\n layer_head_mask: Optional[torch.Tensor] = None,\n output_attentions: bool = False,\n ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:\n \"\"\"Input shape: Batch x Time x Channel\"\"\"\n\n # if key_value_states are provided this layer is used as a cross-attention layer\n # for the decoder\n is_cross_attention = key_value_states is not None\n bsz, tgt_len, embed_dim = hidden_states.size()\n\n # get query proj\n query_states = self.q_proj(hidden_states) * self.scaling\n # get key, value proj\n if is_cross_attention and past_key_value is not None:\n # reuse k,v, cross_attentions\n key_states = past_key_value[0]\n value_states = past_key_value[1]\n elif is_cross_attention:\n # cross_attentions\n key_states = self._shape(self.k_proj(key_value_states), -1, bsz)\n value_states = self._shape(self.v_proj(key_value_states), -1, bsz)\n elif past_key_value is not None:\n # reuse k, v, self_attention\n key_states = self._shape(self.k_proj(hidden_states), -1, bsz)\n value_states = self._shape(self.v_proj(hidden_states), -1, bsz)\n key_states = torch.cat([past_key_value[0], key_states], dim=2)\n value_states = torch.cat([past_key_value[1], value_states], dim=2)\n else:\n # self_attention\n key_states = self._shape(self.k_proj(hidden_states), -1, bsz)\n value_states = self._shape(self.v_proj(hidden_states), -1, bsz)\n\n if self.is_decoder:\n # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states.\n # Further calls to cross_attention layer can then reuse all cross-attention\n # key/value_states (first \"if\" case)\n # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of\n # all previous decoder key/value_states. Further calls to uni-directional self-attention\n # can concat previous decoder key/value_states to current projected key/value_states (third \"elif\" case)\n # if encoder bi-directional self-attention `past_key_value` is always `None`\n past_key_value = (key_states, value_states)\n\n proj_shape = (bsz * self.num_heads, -1, self.head_dim)\n query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape)\n key_states = key_states.view(*proj_shape)\n value_states = value_states.view(*proj_shape)\n\n src_len = key_states.size(1)\n attn_weights = torch.bmm(query_states, key_states.transpose(1, 2))\n\n assert attn_weights.size() == (\n bsz * self.num_heads,\n tgt_len,\n src_len,\n ), f\"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {attn_weights.size()}\"\n\n if attention_mask is not None:\n assert attention_mask.size() == (\n bsz,\n 1,\n tgt_len,\n src_len,\n ), f\"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}\"\n attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask\n attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)\n\n attn_weights = F.softmax(attn_weights, dim=-1)\n\n if layer_head_mask is not None:\n assert layer_head_mask.size() == (\n self.num_heads,\n ), f\"Head mask for a single layer should be of size {(self.num_heads,)}, but is {layer_head_mask.size()}\"\n attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len)\n attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)\n\n if output_attentions:\n # this operation is a bit awkward, but it's required to\n # make sure that attn_weights keeps its gradient.\n # In order to do so, attn_weights have to be reshaped\n # twice and have to be reused in the following\n attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)\n attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len)\n else:\n attn_weights_reshaped = None\n\n attn_probs = F.dropout(attn_weights, p=self.dropout, training=self.training)\n\n attn_output = torch.bmm(attn_probs, value_states)\n\n assert attn_output.size() == (\n bsz * self.num_heads,\n tgt_len,\n self.head_dim,\n ), f\"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {attn_output.size()}\"\n\n attn_output = (\n attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim)\n .transpose(1, 2)\n .reshape(bsz, tgt_len, embed_dim)\n )\n\n attn_output = self.out_proj(attn_output)\n\n return attn_output, attn_weights_reshaped, past_key_value\n\n\nclass Speech2TextEncoderLayer(nn.Module):\n def __init__(self, config: Speech2TextConfig):\n super().__init__()\n self.embed_dim = config.d_model\n self.self_attn = Speech2TextAttention(\n embed_dim=self.embed_dim,\n num_heads=config.encoder_attention_heads,\n dropout=config.attention_dropout,\n )\n self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim)\n self.dropout = config.dropout\n self.activation_fn = ACT2FN[config.activation_function]\n self.activation_dropout = config.activation_dropout\n self.fc1 = nn.Linear(self.embed_dim, config.encoder_ffn_dim)\n self.fc2 = nn.Linear(config.encoder_ffn_dim, self.embed_dim)\n self.final_layer_norm = nn.LayerNorm(self.embed_dim)\n\n def forward(\n self,\n hidden_states: torch.Tensor,\n attention_mask: torch.Tensor,\n layer_head_mask: torch.Tensor,\n output_attentions: bool = False,\n ):\n \"\"\"\n Args:\n hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape :obj:`(seq_len, batch, embed_dim)`\n attention_mask (:obj:`torch.FloatTensor`): attention mask of size\n :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.\n layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size\n :obj:`(config.encoder_attention_heads,)`.\n output_attentions (:obj:`bool`, `optional`):\n Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under\n returned tensors for more detail.\n \"\"\"\n residual = hidden_states\n hidden_states = self.self_attn_layer_norm(hidden_states)\n hidden_states, attn_weights, _ = self.self_attn(\n hidden_states=hidden_states,\n attention_mask=attention_mask,\n layer_head_mask=layer_head_mask,\n output_attentions=output_attentions,\n )\n hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)\n hidden_states = residual + hidden_states\n\n residual = hidden_states\n hidden_states = self.final_layer_norm(hidden_states)\n hidden_states = self.activation_fn(self.fc1(hidden_states))\n hidden_states = F.dropout(hidden_states, p=self.activation_dropout, training=self.training)\n hidden_states = self.fc2(hidden_states)\n hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)\n hidden_states = residual + hidden_states\n\n if hidden_states.dtype == torch.float16 and (\n torch.isinf(hidden_states).any() or torch.isnan(hidden_states).any()\n ):\n clamp_value = torch.finfo(hidden_states.dtype).max - 1000\n hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value)\n\n outputs = (hidden_states,)\n\n if output_attentions:\n outputs += (attn_weights,)\n\n return outputs\n\n\nclass Speech2TextDecoderLayer(nn.Module):\n def __init__(self, config: Speech2TextConfig):\n super().__init__()\n self.embed_dim = config.d_model\n\n self.self_attn = Speech2TextAttention(\n embed_dim=self.embed_dim,\n num_heads=config.decoder_attention_heads,\n dropout=config.attention_dropout,\n is_decoder=True,\n )\n self.dropout = config.dropout\n self.activation_fn = ACT2FN[config.activation_function]\n self.activation_dropout = config.activation_dropout\n\n self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim)\n self.encoder_attn = Speech2TextAttention(\n self.embed_dim,\n config.decoder_attention_heads,\n dropout=config.attention_dropout,\n is_decoder=True,\n )\n self.encoder_attn_layer_norm = nn.LayerNorm(self.embed_dim)\n self.fc1 = nn.Linear(self.embed_dim, config.decoder_ffn_dim)\n self.fc2 = nn.Linear(config.decoder_ffn_dim, self.embed_dim)\n self.final_layer_norm = nn.LayerNorm(self.embed_dim)\n\n def forward(\n self,\n hidden_states: torch.Tensor,\n attention_mask: Optional[torch.Tensor] = None,\n encoder_hidden_states: Optional[torch.Tensor] = None,\n encoder_attention_mask: Optional[torch.Tensor] = None,\n layer_head_mask: Optional[torch.Tensor] = None,\n cross_attn_layer_head_mask: Optional[torch.Tensor] = None,\n past_key_value: Optional[Tuple[torch.Tensor]] = None,\n output_attentions: Optional[bool] = False,\n use_cache: Optional[bool] = True,\n ):\n \"\"\"\n Args:\n hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape :obj:`(seq_len, batch, embed_dim)`\n attention_mask (:obj:`torch.FloatTensor`): attention mask of size\n :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.\n encoder_hidden_states (:obj:`torch.FloatTensor`): cross attention input to the layer of shape :obj:`(seq_len, batch, embed_dim)`\n encoder_attention_mask (:obj:`torch.FloatTensor`): encoder attention mask of size\n :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.\n layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size\n :obj:`(encoder_attention_heads,)`.\n cross_attn_layer_head_mask (:obj:`torch.FloatTensor`): mask for cross-attention heads in a given layer of\n size `(decoder_attention_heads,)`.\n past_key_value (:obj:`Tuple(torch.FloatTensor)`): cached past key and value projection states\n output_attentions (:obj:`bool`, `optional`):\n Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under\n returned tensors for more detail.\n \"\"\"\n residual = hidden_states\n hidden_states = self.self_attn_layer_norm(hidden_states)\n\n # Self Attention\n # decoder uni-directional self-attention cached key/values tuple is at positions 1,2\n self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None\n # add present self-attn cache to positions 1,2 of present_key_value tuple\n hidden_states, self_attn_weights, present_key_value = self.self_attn(\n hidden_states=hidden_states,\n past_key_value=self_attn_past_key_value,\n attention_mask=attention_mask,\n layer_head_mask=layer_head_mask,\n output_attentions=output_attentions,\n )\n hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)\n hidden_states = residual + hidden_states\n\n # Cross-Attention Block\n cross_attn_present_key_value = None\n cross_attn_weights = None\n if encoder_hidden_states is not None:\n residual = hidden_states\n hidden_states = self.encoder_attn_layer_norm(hidden_states)\n\n # cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple\n cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None\n hidden_states, cross_attn_weights, cross_attn_present_key_value = self.encoder_attn(\n hidden_states=hidden_states,\n key_value_states=encoder_hidden_states,\n attention_mask=encoder_attention_mask,\n layer_head_mask=cross_attn_layer_head_mask,\n past_key_value=cross_attn_past_key_value,\n output_attentions=output_attentions,\n )\n hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)\n hidden_states = residual + hidden_states\n\n # add cross-attn to positions 3,4 of present_key_value tuple\n present_key_value = present_key_value + cross_attn_present_key_value\n\n # Fully Connected\n residual = hidden_states\n hidden_states = self.final_layer_norm(hidden_states)\n hidden_states = self.activation_fn(self.fc1(hidden_states))\n hidden_states = F.dropout(hidden_states, p=self.activation_dropout, training=self.training)\n hidden_states = self.fc2(hidden_states)\n hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)\n hidden_states = residual + hidden_states\n\n outputs = (hidden_states,)\n\n if output_attentions:\n outputs += (self_attn_weights, cross_attn_weights)\n\n if use_cache:\n outputs += (present_key_value,)\n\n return outputs\n\n\nclass Speech2TextPreTrainedModel(PreTrainedModel):\n config_class = Speech2TextConfig\n base_model_prefix = \"model\"\n\n def _init_weights(self, module):\n std = self.config.init_std\n if isinstance(module, (nn.Linear, nn.Conv1d)):\n module.weight.data.normal_(mean=0.0, std=std)\n if module.bias is not None:\n module.bias.data.zero_()\n elif isinstance(module, nn.Embedding):\n module.weight.data.normal_(mean=0.0, std=std)\n if module.padding_idx is not None:\n module.weight.data[module.padding_idx].zero_()\n\n def _get_subsampled_output_lengths(self, input_lengths: torch.LongTensor):\n \"\"\"\n Computes the output length of the convolutional layers\n \"\"\"\n\n for i in range(self.config.num_conv_layers):\n input_lengths = (input_lengths - 1) // 2 + 1\n\n return input_lengths\n\n def _get_subsampled_encoder_attn_mask(self, attention_mask):\n # generate creates 3D attention mask, because of the shape of input_features\n # convert it to 2D if thats the case\n if len(attention_mask.shape) > 2:\n attention_mask = attention_mask[:, :, -1]\n\n subsampled_lengths = self._get_subsampled_output_lengths(attention_mask.sum(-1))\n max_len = subsampled_lengths.max().item()\n bsz = attention_mask.size()[0]\n attention_mask = torch.zeros((bsz, max_len), dtype=attention_mask.dtype, device=attention_mask.device)\n\n # these two operations makes sure that all values\n # before the output lengths indices are attended to\n attention_mask[(torch.arange(bsz, device=attention_mask.device), subsampled_lengths - 1)] = 1\n attention_mask = attention_mask.flip([-1]).cumsum(-1).flip([-1]).long()\n return attention_mask\n\n\nSPEECH_TO_TEXT_START_DOCSTRING = r\"\"\"\n This model inherits from :class:`~transformers.PreTrainedModel`. Check the superclass documentation for the generic\n methods the library implements for all its model (such as downloading or saving, resizing the input embeddings,\n pruning heads etc.)\n\n This model is also a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`__\n subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to\n general usage and behavior.\n\n Parameters:\n config (:class:`~transformers.Speech2TextConfig`):\n Model configuration class with all the parameters of the model. Initializing with a config file does not\n load the weights associated with the model, only the configuration. Check out the\n :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.\n\"\"\"\n\nSPEECH_TO_TEXT_INPUTS_DOCSTRING = r\"\"\"\n Args:\n input_features (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length, feature_size)`):\n Float values of fbank features extracted from the raw speech waveform. Raw speech waveform can be obtained\n by loading a ``.flac`` or ``.wav`` audio file into an array of type :obj:`List[float]` or a\n :obj:`numpy.ndarray`, *e.g.* via the soundfile library (``pip install soundfile``). To prepare the array\n into :obj:`input_features`, the :class:`~transformers.Speech2TextTokenizer` should be used for extracting\n the fbank features, padding and conversion into a tensor of type :obj:`torch.FloatTensor`. See\n :meth:`~transformers.Speech2TextTokenizer.__call__`\n attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):\n Mask to avoid performing convolution and attention on padding token indices. Mask values selected in ``[0,\n 1]``:\n\n - 1 for tokens that are **not masked**,\n - 0 for tokens that are **masked**.\n\n `What are attention masks? <../glossary.html#attention-mask>`__\n decoder_input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`):\n Indices of decoder input sequence tokens in the vocabulary.\n\n Indices can be obtained using :class:`~transformers.SpeechToTextTokenizer`. See\n :meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for\n details.\n\n `What are decoder input IDs? <../glossary.html#decoder-input-ids>`__\n\n SpeechToText uses the :obj:`eos_token_id` as the starting token for :obj:`decoder_input_ids` generation. If\n :obj:`past_key_values` is used, optionally only the last :obj:`decoder_input_ids` have to be input (see\n :obj:`past_key_values`).\n decoder_attention_mask (:obj:`torch.LongTensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`):\n Default behavior: generate a tensor that ignores pad tokens in :obj:`decoder_input_ids`. Causal mask will\n also be used by default. <<<<<<< HEAD\n\n If you want to change padding behavior, you should read\n :func:`modeling_speech_to_text._prepare_decoder_inputs` and modify to your needs. See diagram 1 in `the\n paper <https://arxiv.org/abs/1910.13461>`__ for more information on the default strategy.\n head_mask (:obj:`torch.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`):\n Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in ``[0, 1]``:\n\n - 1 indicates the head is **not masked**,\n - 0 indicates the head is **masked**.\n\n decoder_head_mask (:obj:`torch.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):\n Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in ``[0, 1]``:\n\n - 1 indicates the head is **not masked**,\n - 0 indicates the head is **masked**.\n\n cross_attn_head_mask (:obj:`torch.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):\n Mask to nullify selected heads of the cross-attention modules. Mask values selected in ``[0, 1]``:\n\n - 1 indicates the head is **not masked**,\n - 0 indicates the head is **masked**.\n\n encoder_outputs (:obj:`tuple(tuple(torch.FloatTensor)`, `optional`):\n Tuple consists of (:obj:`last_hidden_state`, `optional`: :obj:`hidden_states`, `optional`:\n :obj:`attentions`) :obj:`last_hidden_state` of shape :obj:`(batch_size, sequence_length, hidden_size)`,\n `optional`) is a sequence of hidden-states at the output of the last layer of the encoder. Used in the\n cross-attention of the decoder.\n past_key_values (:obj:`Tuple[Tuple[torch.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):\n Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up decoding.\n\n If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids`\n (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)`\n instead of all :obj:`decoder_input_ids`` of shape :obj:`(batch_size, sequence_length)`.\n decoder_inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, target_sequence_length, hidden_size)`, `optional`):\n Optionally, instead of passing :obj:`decoder_input_ids` you can choose to directly pass an embedded\n representation. If :obj:`past_key_values` is used, optionally only the last :obj:`decoder_inputs_embeds`\n have to be input (see :obj:`past_key_values`). This is useful if you want more control over how to convert\n :obj:`decoder_input_ids` indices into associated vectors than the model's internal embedding lookup matrix.\n\n If :obj:`decoder_input_ids` and :obj:`decoder_inputs_embeds` are both unset, :obj:`decoder_inputs_embeds`\n takes the value of :obj:`inputs_embeds`.\n use_cache (:obj:`bool`, `optional`):\n If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up\n decoding (see :obj:`past_key_values`).\n output_attentions (:obj:`bool`, `optional`):\n Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned\n tensors for more detail.\n output_hidden_states (:obj:`bool`, `optional`):\n Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for\n more detail.\n return_dict (:obj:`bool`, `optional`):\n Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.\n\"\"\"\n\n\nclass Speech2TextEncoder(Speech2TextPreTrainedModel):\n \"\"\"\n Transformer encoder consisting of *config.encoder_layers* self attention layers. Each layer is a\n :class:`Speech2TextEncoderLayer`.\n\n Args:\n config: Speech2TextConfig\n embed_tokens (torch.nn.Embedding): output embedding\n \"\"\"\n\n def __init__(self, config: Speech2TextConfig):\n super().__init__(config)\n\n self.dropout = config.dropout\n self.layerdrop = config.encoder_layerdrop\n\n embed_dim = config.d_model\n self.padding_idx = config.pad_token_id\n self.max_source_positions = config.max_source_positions\n self.embed_scale = math.sqrt(embed_dim) if config.scale_embedding else 1.0\n\n self.conv = Conv1dSubsampler(config)\n\n self.embed_positions = Speech2TextSinusoidalPositionalEmbedding(\n self.max_source_positions,\n embed_dim,\n self.padding_idx,\n )\n self.layers = nn.ModuleList([Speech2TextEncoderLayer(config) for _ in range(config.encoder_layers)])\n self.layer_norm = nn.LayerNorm(config.d_model)\n\n self.init_weights()\n\n def forward(\n self,\n input_features,\n attention_mask=None,\n head_mask=None,\n output_attentions=None,\n output_hidden_states=None,\n return_dict=None,\n ):\n r\"\"\"\n Args:\n input_features (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length, feature_size)`):\n Float values of fbank features extracted from the raw speech waveform. Raw speech waveform can be\n obtained by loading a ``.flac`` or ``.wav`` audio file into an array of type :obj:`List[float]` or a\n :obj:`numpy.ndarray`, *e.g.* via the soundfile library (``pip install soundfile``). To prepare the\n array into :obj:`input_features`, the :class:`~transformers.Speech2TextTokenizer` should be used for\n extracting the fbank features, padding and conversion into a tensor of type :obj:`torch.FloatTensor`.\n See :meth:`~transformers.Speech2TextTokenizer.__call__`\n attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):\n Mask to avoid performing convolution and attention on padding token indices. Mask values selected in\n ``[0, 1]``:\n\n - 1 for tokens that are **not masked**,\n - 0 for tokens that are **masked**.\n\n `What are attention masks? <../glossary.html#attention-mask>`__\n head_mask (:obj:`torch.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`):\n Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``:\n\n - 1 indicates the head is **not masked**,\n - 0 indicates the head is **masked**.\n\n output_attentions (:obj:`bool`, `optional`):\n Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under\n returned tensors for more detail.\n output_hidden_states (:obj:`bool`, `optional`):\n Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors\n for more detail.\n return_dict (:obj:`bool`, `optional`):\n Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.\n \"\"\"\n output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions\n output_hidden_states = (\n output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states\n )\n return_dict = return_dict if return_dict is not None else self.config.use_return_dict\n\n if attention_mask is not None:\n attention_mask = self._get_subsampled_encoder_attn_mask(attention_mask)\n\n inputs_embeds = self.conv(input_features)\n inputs_embeds = self.embed_scale * inputs_embeds\n\n if attention_mask is None:\n padding_mask = torch.zeros_like(inputs_embeds, dtype=torch.long)\n else:\n padding_mask = attention_mask.ne(1).long()\n embed_pos = self.embed_positions(padding_mask)\n\n hidden_states = inputs_embeds + embed_pos\n hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)\n\n # expand attention_mask\n if attention_mask is not None:\n # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]\n attention_mask = _expand_mask(attention_mask, inputs_embeds.dtype)\n\n encoder_states = () if output_hidden_states else None\n all_attentions = () if output_attentions else None\n\n # check if head_mask has a correct number of layers specified if desired\n if head_mask is not None:\n assert head_mask.size()[0] == (\n len(self.layers)\n ), f\"The head_mask should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}.\"\n\n for idx, encoder_layer in enumerate(self.layers):\n if output_hidden_states:\n encoder_states = encoder_states + (hidden_states,)\n # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)\n dropout_probability = random.uniform(0, 1)\n if self.training and (dropout_probability < self.layerdrop): # skip the layer\n layer_outputs = (None, None)\n else:\n if getattr(self.config, \"gradient_checkpointing\", False) and self.training:\n\n def create_custom_forward(module):\n def custom_forward(*inputs):\n return module(*inputs, output_attentions)\n\n return custom_forward\n\n layer_outputs = torch.utils.checkpoint.checkpoint(\n create_custom_forward(encoder_layer),\n hidden_states,\n attention_mask,\n (head_mask[idx] if head_mask is not None else None),\n )\n else:\n layer_outputs = encoder_layer(\n hidden_states,\n attention_mask,\n layer_head_mask=(head_mask[idx] if head_mask is not None else None),\n output_attentions=output_attentions,\n )\n\n hidden_states = layer_outputs[0]\n\n if output_attentions:\n all_attentions = all_attentions + (layer_outputs[1],)\n\n hidden_states = self.layer_norm(hidden_states)\n if output_hidden_states:\n encoder_states = encoder_states + (hidden_states,)\n\n if not return_dict:\n return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None)\n return BaseModelOutput(\n last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions\n )\n\n\nclass Speech2TextDecoder(Speech2TextPreTrainedModel):\n \"\"\"\n Transformer decoder consisting of *config.decoder_layers* layers. Each layer is a :class:`Speech2TextDecoderLayer`\n\n Args:\n config: Speech2TextConfig\n embed_tokens (torch.nn.Embedding): output embedding\n \"\"\"\n\n def __init__(self, config: Speech2TextConfig):\n super().__init__(config)\n self.dropout = config.dropout\n self.layerdrop = config.decoder_layerdrop\n self.padding_idx = config.pad_token_id\n self.max_target_positions = config.max_target_positions\n self.embed_scale = math.sqrt(config.d_model) if config.scale_embedding else 1.0\n\n self.embed_tokens = nn.Embedding(config.vocab_size, config.d_model, self.padding_idx)\n\n self.embed_positions = Speech2TextSinusoidalPositionalEmbedding(\n self.max_target_positions,\n config.d_model,\n self.padding_idx,\n )\n self.layers = nn.ModuleList([Speech2TextDecoderLayer(config) for _ in range(config.decoder_layers)])\n self.layer_norm = nn.LayerNorm(config.d_model)\n\n self.init_weights()\n\n def get_input_embeddings(self):\n return self.embed_tokens\n\n def set_input_embeddings(self, value):\n self.embed_tokens = value\n\n def _prepare_decoder_attention_mask(self, attention_mask, input_shape, inputs_embeds, past_key_values_length):\n # create causal mask\n # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]\n combined_attention_mask = None\n if input_shape[-1] > 1:\n combined_attention_mask = _make_causal_mask(\n input_shape, inputs_embeds.dtype, past_key_values_length=past_key_values_length\n ).to(self.device)\n\n if attention_mask is not None:\n # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]\n expanded_attn_mask = _expand_mask(attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1])\n combined_attention_mask = (\n expanded_attn_mask if combined_attention_mask is None else expanded_attn_mask + combined_attention_mask\n )\n\n return combined_attention_mask\n\n def forward(\n self,\n input_ids=None,\n attention_mask=None,\n encoder_hidden_states=None,\n encoder_attention_mask=None,\n head_mask=None,\n cross_attn_head_mask=None,\n past_key_values=None,\n inputs_embeds=None,\n use_cache=None,\n output_attentions=None,\n output_hidden_states=None,\n return_dict=None,\n ):\n r\"\"\"\n Args:\n input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`):\n Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you\n provide it.\n\n Indices can be obtained using :class:`~transformers.Speech2TextTokenizer`. See\n :meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__`\n for details.\n\n `What are input IDs? <../glossary.html#input-ids>`__\n attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):\n Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:\n\n - 1 for tokens that are **not masked**,\n - 0 for tokens that are **masked**.\n\n `What are attention masks? <../glossary.html#attention-mask>`__\n encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, encoder_sequence_length, hidden_size)`, `optional`):\n Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention\n of the decoder.\n encoder_attention_mask (:obj:`torch.LongTensor` of shape :obj:`(batch_size, encoder_sequence_length)`, `optional`):\n Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values\n selected in ``[0, 1]``:\n\n - 1 for tokens that are **not masked**,\n - 0 for tokens that are **masked**.\n\n `What are attention masks? <../glossary.html#attention-mask>`__\n head_mask (:obj:`torch.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):\n Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``:\n\n - 1 indicates the head is **not masked**,\n - 0 indicates the head is **masked**.\n\n cross_attn_head_mask (:obj:`torch.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):\n Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention\n on hidden heads. Mask values selected in ``[0, 1]``:\n\n - 1 indicates the head is **not masked**,\n - 0 indicates the head is **masked**.\n\n past_key_values (:obj:`Tuple[Tuple[torch.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):\n Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up\n decoding.\n\n If :obj:`past_key_values` are used, the user can optionally input only the last\n :obj:`decoder_input_ids` (those that don't have their past key value states given to this model) of\n shape :obj:`(batch_size, 1)` instead of all :obj:`decoder_input_ids`` of shape :obj:`(batch_size,\n sequence_length)`.\n inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):\n Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded\n representation. This is useful if you want more control over how to convert :obj:`input_ids` indices\n into associated vectors than the model's internal embedding lookup matrix.\n output_attentions (:obj:`bool`, `optional`):\n Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under\n returned tensors for more detail.\n output_hidden_states (:obj:`bool`, `optional`):\n Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors\n for more detail.\n return_dict (:obj:`bool`, `optional`):\n Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.\n \"\"\"\n output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions\n output_hidden_states = (\n output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states\n )\n use_cache = use_cache if use_cache is not None else self.config.use_cache\n return_dict = return_dict if return_dict is not None else self.config.use_return_dict\n\n # retrieve input_ids and inputs_embeds\n if input_ids is not None and inputs_embeds is not None:\n raise ValueError(\"You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time\")\n elif input_ids is not None:\n input_shape = input_ids.size()\n input_ids = input_ids.view(-1, input_shape[-1])\n elif inputs_embeds is not None:\n input_shape = inputs_embeds.size()[:-1]\n else:\n raise ValueError(\"You have to specify either decoder_input_ids or decoder_inputs_embeds\")\n\n # past_key_values_length\n past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0\n\n if inputs_embeds is None:\n inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale\n\n attention_mask = self._prepare_decoder_attention_mask(\n attention_mask, input_shape, inputs_embeds, past_key_values_length\n )\n\n # expand encoder attention mask\n if encoder_hidden_states is not None and encoder_attention_mask is not None:\n encoder_attention_mask = self._get_subsampled_encoder_attn_mask(encoder_attention_mask)\n # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]\n encoder_attention_mask = _expand_mask(encoder_attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1])\n\n # embed positions\n positions = self.embed_positions(input_ids, past_key_values_length=past_key_values_length)\n\n hidden_states = inputs_embeds + positions\n hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)\n\n # decoder layers\n all_hidden_states = () if output_hidden_states else None\n all_self_attns = () if output_attentions else None\n all_cross_attentions = () if (output_attentions and encoder_hidden_states is not None) else None\n next_decoder_cache = () if use_cache else None\n\n # check if head_mask/cross_attn_head_mask has a correct number of layers specified if desired\n for attn_mask, mask_name in zip([head_mask, cross_attn_head_mask], [\"head_mask\", \"cross_attn_head_mask\"]):\n if attn_mask is not None:\n assert attn_mask.size()[0] == (\n len(self.layers)\n ), f\"The `{mask_name}` should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}.\"\n for idx, decoder_layer in enumerate(self.layers):\n # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)\n if output_hidden_states:\n all_hidden_states += (hidden_states,)\n dropout_probability = random.uniform(0, 1)\n if self.training and (dropout_probability < self.layerdrop):\n continue\n\n past_key_value = past_key_values[idx] if past_key_values is not None else None\n\n if getattr(self.config, \"gradient_checkpointing\", False) and self.training:\n\n if use_cache:\n logger.warning(\n \"`use_cache = True` is incompatible with `config.gradient_checkpointing = True`. Setting `use_cache = False`...\"\n )\n use_cache = False\n\n def create_custom_forward(module):\n def custom_forward(*inputs):\n # None for past_key_value\n return module(*inputs, output_attentions, use_cache)\n\n return custom_forward\n\n layer_outputs = torch.utils.checkpoint.checkpoint(\n create_custom_forward(decoder_layer),\n hidden_states,\n attention_mask,\n encoder_hidden_states,\n encoder_attention_mask,\n head_mask[idx] if head_mask is not None else None,\n cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None,\n None,\n )\n else:\n\n layer_outputs = decoder_layer(\n hidden_states,\n attention_mask=attention_mask,\n encoder_hidden_states=encoder_hidden_states,\n encoder_attention_mask=encoder_attention_mask,\n layer_head_mask=(head_mask[idx] if head_mask is not None else None),\n cross_attn_layer_head_mask=(\n cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None\n ),\n past_key_value=past_key_value,\n output_attentions=output_attentions,\n use_cache=use_cache,\n )\n hidden_states = layer_outputs[0]\n\n if use_cache:\n next_decoder_cache += (layer_outputs[3 if output_attentions else 1],)\n\n if output_attentions:\n all_self_attns += (layer_outputs[1],)\n\n if encoder_hidden_states is not None:\n all_cross_attentions += (layer_outputs[2],)\n\n hidden_states = self.layer_norm(hidden_states)\n # add hidden states from the last decoder layer\n if output_hidden_states:\n all_hidden_states += (hidden_states,)\n\n next_cache = next_decoder_cache if use_cache else None\n if not return_dict:\n return tuple(\n v\n for v in [hidden_states, next_cache, all_hidden_states, all_self_attns, all_cross_attentions]\n if v is not None\n )\n return BaseModelOutputWithPastAndCrossAttentions(\n last_hidden_state=hidden_states,\n past_key_values=next_cache,\n hidden_states=all_hidden_states,\n attentions=all_self_attns,\n cross_attentions=all_cross_attentions,\n )\n\n\n@add_start_docstrings(\n \"The bare Speech2Text Model outputting raw hidden-states without any specific head on top.\",\n SPEECH_TO_TEXT_START_DOCSTRING,\n)\nclass Speech2TextModel(Speech2TextPreTrainedModel):\n def __init__(self, config: Speech2TextConfig):\n super().__init__(config)\n\n self.encoder = Speech2TextEncoder(config)\n self.decoder = Speech2TextDecoder(config)\n\n self.init_weights()\n\n def get_input_embeddings(self):\n return self.decoder.embed_tokens\n\n def set_input_embeddings(self, value):\n self.decoder.embed_tokens = value\n\n def get_encoder(self):\n return self.encoder\n\n def get_decoder(self):\n return self.decoder\n\n @add_start_docstrings_to_model_forward(SPEECH_TO_TEXT_INPUTS_DOCSTRING)\n @add_code_sample_docstrings(\n tokenizer_class=_TOKENIZER_FOR_DOC,\n checkpoint=\"s2t_transformer_s\",\n output_type=Seq2SeqModelOutput,\n config_class=_CONFIG_FOR_DOC,\n )\n def forward(\n self,\n input_features=None,\n attention_mask=None,\n decoder_input_ids=None,\n decoder_attention_mask=None,\n head_mask=None,\n decoder_head_mask=None,\n cross_attn_head_mask=None,\n encoder_outputs=None,\n past_key_values=None,\n decoder_inputs_embeds=None,\n use_cache=None,\n output_attentions=None,\n output_hidden_states=None,\n return_dict=None,\n ):\n output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions\n output_hidden_states = (\n output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states\n )\n use_cache = use_cache if use_cache is not None else self.config.use_cache\n return_dict = return_dict if return_dict is not None else self.config.use_return_dict\n\n if encoder_outputs is None:\n encoder_outputs = self.encoder(\n input_features,\n attention_mask=attention_mask,\n head_mask=head_mask,\n output_attentions=output_attentions,\n output_hidden_states=output_hidden_states,\n return_dict=return_dict,\n )\n # If the user passed a tuple for encoder_outputs, we wrap it in a BaseModelOutput when return_dict=True\n elif return_dict and not isinstance(encoder_outputs, BaseModelOutput):\n encoder_outputs = BaseModelOutput(\n last_hidden_state=encoder_outputs[0],\n hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None,\n attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None,\n )\n\n # decoder outputs consists of (dec_features, past_key_value, dec_hidden, dec_attn)\n decoder_outputs = self.decoder(\n input_ids=decoder_input_ids,\n attention_mask=decoder_attention_mask,\n encoder_hidden_states=encoder_outputs[0],\n encoder_attention_mask=attention_mask,\n head_mask=decoder_head_mask,\n cross_attn_head_mask=cross_attn_head_mask,\n past_key_values=past_key_values,\n inputs_embeds=decoder_inputs_embeds,\n use_cache=use_cache,\n output_attentions=output_attentions,\n output_hidden_states=output_hidden_states,\n return_dict=return_dict,\n )\n\n if not return_dict:\n return decoder_outputs + encoder_outputs\n\n return Seq2SeqModelOutput(\n last_hidden_state=decoder_outputs.last_hidden_state,\n past_key_values=decoder_outputs.past_key_values,\n decoder_hidden_states=decoder_outputs.hidden_states,\n decoder_attentions=decoder_outputs.attentions,\n cross_attentions=decoder_outputs.cross_attentions,\n encoder_last_hidden_state=encoder_outputs.last_hidden_state,\n encoder_hidden_states=encoder_outputs.hidden_states,\n encoder_attentions=encoder_outputs.attentions,\n )\n\n\n@add_start_docstrings(\n \"The Speech2Text Model with a language modeling head. Can be used for summarization.\",\n SPEECH_TO_TEXT_START_DOCSTRING,\n)\nclass Speech2TextForConditionalGeneration(Speech2TextPreTrainedModel):\n base_model_prefix = \"model\"\n _keys_to_ignore_on_load_missing = [\n r\"encoder\\.version\",\n r\"decoder\\.version\",\n r\"model.encoder.embed_positions.weights\",\n r\"model.decoder.embed_positions.weights\",\n ]\n _keys_to_ignore_on_save = [\n r\"model.encoder.embed_positions.weights\",\n r\"model.decoder.embed_positions.weights\",\n ]\n\n def __init__(self, config: Speech2TextConfig):\n super().__init__(config)\n self.model = Speech2TextModel(config)\n self.lm_head = nn.Linear(config.d_model, self.config.vocab_size, bias=False)\n\n self.init_weights()\n\n def get_encoder(self):\n return self.model.get_encoder()\n\n def get_decoder(self):\n return self.model.get_decoder()\n\n def resize_token_embeddings(self, new_num_tokens: int) -> nn.Embedding:\n new_embeddings = super().resize_token_embeddings(new_num_tokens)\n return new_embeddings\n\n def get_output_embeddings(self):\n return self.lm_head\n\n def set_output_embeddings(self, new_embeddings):\n self.lm_head = new_embeddings\n\n @add_start_docstrings_to_model_forward(SPEECH_TO_TEXT_INPUTS_DOCSTRING)\n @replace_return_docstrings(output_type=Seq2SeqLMOutput, config_class=_CONFIG_FOR_DOC)\n def forward(\n self,\n input_features=None,\n attention_mask=None,\n decoder_input_ids=None,\n decoder_attention_mask=None,\n head_mask=None,\n decoder_head_mask=None,\n cross_attn_head_mask=None,\n encoder_outputs=None,\n past_key_values=None,\n decoder_inputs_embeds=None,\n labels=None,\n use_cache=None,\n output_attentions=None,\n output_hidden_states=None,\n return_dict=None,\n ):\n r\"\"\"\n labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):\n Labels for computing the language modeling loss. Indices should either be in ``[0, ...,\n config.vocab_size]`` or -100 (see ``input_ids`` docstring). Tokens with indices set to ``-100`` are ignored\n (masked), the loss is only computed for the tokens with labels in ``[0, ..., config.vocab_size]``.\n\n Returns:\n\n Example::\n\n >>> import torch\n >>> from transformers import Speech2TextProcessor, Speech2TextForConditionalGeneration\n >>> from datasets import load_dataset\n >>> import soundfile as sf\n\n >>> model = Speech2TextForConditionalGeneration.from_pretrained(\"facebook/s2t-small-librispeech-asr\")\n >>> processor = Speech2Textprocessor.from_pretrained(\"facebook/s2t-small-librispeech-asr\")\n\n >>> def map_to_array(batch):\n >>> speech, _ = sf.read(batch[\"file\"])\n >>> batch[\"speech\"] = speech\n >>> return batch\n\n >>> ds = load_dataset(\"patrickvonplaten/librispeech_asr_dummy\", \"clean\", split=\"validation\")\n >>> ds = ds.map(map_to_array)\n\n >>> input_features = processor(ds[\"speech\"][0], sampling_rate=16_000, return_tensors=\"pt\").input_features # Batch size 1\n >>> generated_ids = model.generate(input_ids=input_features)\n\n >>> transcription = processor.batch_decode(generated_ids)\n \"\"\"\n return_dict = return_dict if return_dict is not None else self.config.use_return_dict\n\n if labels is not None:\n if decoder_input_ids is None:\n decoder_input_ids = shift_tokens_right(\n labels, self.config.pad_token_id, self.config.decoder_start_token_id\n )\n\n outputs = self.model(\n input_features,\n attention_mask=attention_mask,\n decoder_input_ids=decoder_input_ids,\n encoder_outputs=encoder_outputs,\n decoder_attention_mask=decoder_attention_mask,\n head_mask=head_mask,\n decoder_head_mask=decoder_head_mask,\n cross_attn_head_mask=cross_attn_head_mask,\n past_key_values=past_key_values,\n decoder_inputs_embeds=decoder_inputs_embeds,\n use_cache=use_cache,\n output_attentions=output_attentions,\n output_hidden_states=output_hidden_states,\n return_dict=return_dict,\n )\n lm_logits = self.lm_head(outputs[0])\n\n loss = None\n if labels is not None:\n loss_fct = CrossEntropyLoss()\n loss = loss_fct(lm_logits.view(-1, self.config.vocab_size), labels.view(-1))\n\n if not return_dict:\n output = (lm_logits,) + outputs[1:]\n return ((loss,) + output) if loss is not None else output\n\n return Seq2SeqLMOutput(\n loss=loss,\n logits=lm_logits,\n past_key_values=outputs.past_key_values,\n decoder_hidden_states=outputs.decoder_hidden_states,\n decoder_attentions=outputs.decoder_attentions,\n cross_attentions=outputs.cross_attentions,\n encoder_last_hidden_state=outputs.encoder_last_hidden_state,\n encoder_hidden_states=outputs.encoder_hidden_states,\n encoder_attentions=outputs.encoder_attentions,\n )\n\n def prepare_inputs_for_generation(\n self,\n decoder_input_ids,\n past=None,\n attention_mask=None,\n head_mask=None,\n use_cache=None,\n encoder_outputs=None,\n **kwargs\n ):\n # cut decoder_input_ids if past is used\n if past is not None:\n decoder_input_ids = decoder_input_ids[:, -1:]\n\n return {\n \"encoder_outputs\": encoder_outputs,\n \"past_key_values\": past,\n \"decoder_input_ids\": decoder_input_ids,\n \"attention_mask\": attention_mask,\n \"head_mask\": head_mask,\n \"use_cache\": use_cache, # change this to avoid caching (presumably for debugging)\n }\n\n @staticmethod\n def _reorder_cache(past, beam_idx):\n reordered_past = ()\n for layer_past in past:\n reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),)\n return reordered_past\n" ]
[ [ "tensorflow.keras.losses.SparseCategoricalCrossentropy", "tensorflow.keras.losses.MeanSquaredError", "numpy.arange", "numpy.squeeze", "tensorflow.ragged.constant", "tensorflow.keras.optimizers.Adam", "numpy.random.permutation", "numpy.argmax", "numpy.array" ], [ "torch.nn.functional.softmax", "torch.nn.functional.glu", "torch.nn.functional.dropout", "torch.zeros", "torch.cat", "torch.sin", "torch.nn.Embedding", "torch.no_grad", "torch.finfo", "torch.nn.CrossEntropyLoss", "torch.bmm", "torch.arange", "torch.cos", "torch.nn.Parameter", "torch.isinf", "torch.zeros_like", "torch.nn.Linear", "torch.nn.Conv1d", "torch.isnan", "torch.nn.LayerNorm", "torch.clamp", "torch.cumsum" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10", "2.7", "2.2", "2.3", "2.4", "2.5", "2.6" ] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
ParticularMiner/dask
[ "f40ef97ac802efb6d8bef03b03c6357cf871bc0a" ]
[ "dask/dataframe/io/parquet/fastparquet.py" ]
[ "import copy\nimport pickle\nimport threading\nimport warnings\nfrom collections import OrderedDict, defaultdict\nfrom contextlib import ExitStack\n\nimport numpy as np\nimport pandas as pd\nimport tlz as toolz\nfrom packaging.version import parse as parse_version\n\nfrom dask.core import flatten\n\ntry:\n import fastparquet\n from fastparquet import ParquetFile\n from fastparquet.util import ex_from_sep, get_file_scheme, groupby_types, val_to_num\n from fastparquet.writer import make_part_file, partition_on_columns\nexcept ImportError:\n pass\n\nfrom dask.base import tokenize\n\n#########################\n# Fastparquet interface #\n#########################\nfrom dask.dataframe.io.parquet.utils import (\n Engine,\n _get_aggregation_depth,\n _normalize_index_columns,\n _parse_pandas_metadata,\n _process_open_file_options,\n _row_groups_to_parts,\n _set_gather_statistics,\n _set_metadata_task_size,\n _sort_and_analyze_paths,\n _split_user_options,\n)\nfrom dask.dataframe.io.utils import _is_local_fs, _meta_from_dtypes, _open_input_files\nfrom dask.dataframe.utils import UNKNOWN_CATEGORIES\nfrom dask.delayed import Delayed\nfrom dask.utils import natural_sort_key\n\n# Thread lock required to reset row-groups\n_FP_FILE_LOCK = threading.RLock()\n\n\ndef _paths_to_cats(paths, file_scheme):\n \"\"\"\n Extract categorical fields and labels from hive- or drill-style paths.\n FixMe: This has been pasted from https://github.com/dask/fastparquet/pull/471\n Use fastparquet.api.paths_to_cats from fastparquet>0.3.2 instead.\n\n Parameters\n ----------\n paths (Iterable[str]): file paths relative to root\n file_scheme (str):\n\n Returns\n -------\n cats (OrderedDict[str, List[Any]]): a dict of field names and their values\n \"\"\"\n if file_scheme in [\"simple\", \"flat\", \"other\"]:\n cats = {}\n return cats\n\n cats = OrderedDict()\n raw_cats = OrderedDict()\n s = ex_from_sep(\"/\")\n paths = toolz.unique(paths)\n if file_scheme == \"hive\":\n partitions = toolz.unique((k, v) for path in paths for k, v in s.findall(path))\n for key, val in partitions:\n cats.setdefault(key, set()).add(val_to_num(val))\n raw_cats.setdefault(key, set()).add(val)\n else:\n i_val = toolz.unique(\n (i, val) for path in paths for i, val in enumerate(path.split(\"/\")[:-1])\n )\n for i, val in i_val:\n key = \"dir%i\" % i\n cats.setdefault(key, set()).add(val_to_num(val))\n raw_cats.setdefault(key, set()).add(val)\n\n for key, v in cats.items():\n # Check that no partition names map to the same value after transformation by val_to_num\n raw = raw_cats[key]\n if len(v) != len(raw):\n conflicts_by_value = OrderedDict()\n for raw_val in raw_cats[key]:\n conflicts_by_value.setdefault(val_to_num(raw_val), set()).add(raw_val)\n conflicts = [\n c for k in conflicts_by_value.values() if len(k) > 1 for c in k\n ]\n raise ValueError(\"Partition names map to the same value: %s\" % conflicts)\n vals_by_type = groupby_types(v)\n\n # Check that all partition names map to the same type after transformation by val_to_num\n if len(vals_by_type) > 1:\n examples = [x[0] for x in vals_by_type.values()]\n warnings.warn(\n \"Partition names coerce to values of different types, e.g. %s\"\n % examples\n )\n\n cats = OrderedDict([(key, list(v)) for key, v in cats.items()])\n return cats\n\n\npaths_to_cats = (\n _paths_to_cats # FixMe: use fastparquet.api.paths_to_cats for fastparquet>0.3.2\n)\n\n\nclass FastParquetEngine(Engine):\n @classmethod\n def _organize_row_groups(\n cls,\n pf,\n split_row_groups,\n gather_statistics,\n stat_col_indices,\n filters,\n dtypes,\n base_path,\n has_metadata_file,\n chunksize,\n aggregation_depth,\n ):\n \"\"\"Organize row-groups by file.\"\"\"\n\n # Get partitioning metadata\n pqpartitions = list(pf.cats)\n\n # Fastparquet does not use a natural sorting\n # order for partitioned data. Re-sort by path\n if (\n pqpartitions\n and aggregation_depth\n and pf.row_groups\n and pf.row_groups[0].columns[0].file_path\n ):\n pf.row_groups = sorted(\n pf.row_groups,\n key=lambda x: natural_sort_key(x.columns[0].file_path),\n )\n\n # Store types specified in pandas metadata\n pandas_type = {}\n if pf.row_groups and pf.pandas_metadata:\n for c in pf.pandas_metadata.get(\"columns\", []):\n if \"field_name\" in c:\n pandas_type[c[\"field_name\"]] = c.get(\"pandas_type\", None)\n\n # Get the number of row groups per file\n single_rg_parts = int(split_row_groups) == 1\n file_row_groups = defaultdict(list)\n file_row_group_stats = defaultdict(list)\n file_row_group_column_stats = defaultdict(list)\n cmax_last = {}\n for rg, row_group in enumerate(pf.row_groups):\n\n # We can filter partition columns here without dealing\n # with statistics\n if (\n pqpartitions\n and filters\n and fastparquet.api.filter_out_cats(row_group, filters)\n ):\n continue\n\n # NOTE: Here we assume that all column chunks are stored\n # in the same file. This is not strictly required by the\n # parquet spec.\n fp = row_group.columns[0].file_path\n fpath = fp.decode() if isinstance(fp, bytes) else fp\n if fpath is None:\n if not has_metadata_file:\n # There doesn't need to be a file_path if the\n # row group is in the same file as the metadata.\n # Assume this is a single-file dataset.\n fpath = pf.fn\n base_path = base_path or \"\"\n else:\n raise ValueError(\n \"Global metadata structure is missing a file_path string. \"\n \"If the dataset includes a _metadata file, that file may \"\n \"have one or more missing file_path fields.\"\n )\n\n # Append a tuple to file_row_groups. This tuple will\n # be structured as: `(<local-row-group-id>, <global-row-group-id>)`\n if file_row_groups[fpath]:\n file_row_groups[fpath].append((file_row_groups[fpath][-1][0] + 1, rg))\n else:\n file_row_groups[fpath].append((0, rg))\n\n if gather_statistics:\n if single_rg_parts:\n s = {\n \"file_path_0\": fpath,\n \"num-rows\": row_group.num_rows,\n \"total_byte_size\": row_group.total_byte_size,\n \"columns\": [],\n }\n else:\n s = {\n \"num-rows\": row_group.num_rows,\n \"total_byte_size\": row_group.total_byte_size,\n }\n cstats = []\n for name, i in stat_col_indices.items():\n column = row_group.columns[i]\n if column.meta_data.statistics:\n cmin = None\n cmax = None\n # TODO: Avoid use of `pf.statistics`\n if pf.statistics[\"min\"][name][0] is not None:\n cmin = pf.statistics[\"min\"][name][rg]\n cmax = pf.statistics[\"max\"][name][rg]\n elif dtypes[name] == \"object\":\n cmin = column.meta_data.statistics.min_value\n cmax = column.meta_data.statistics.max_value\n # Older versions may not have cmin/cmax_value\n if cmin is None:\n cmin = column.meta_data.statistics.min\n if cmax is None:\n cmax = column.meta_data.statistics.max\n # Decode bytes as long as \"bytes\" is not the\n # expected `pandas_type` for this column\n if (\n isinstance(cmin, (bytes, bytearray))\n and pandas_type.get(name, None) != \"bytes\"\n ):\n cmin = cmin.decode(\"utf-8\")\n cmax = cmax.decode(\"utf-8\")\n if isinstance(cmin, np.datetime64):\n tz = getattr(dtypes[name], \"tz\", None)\n cmin = pd.Timestamp(cmin, tz=tz)\n cmax = pd.Timestamp(cmax, tz=tz)\n last = cmax_last.get(name, None)\n\n if not (filters or chunksize or aggregation_depth):\n # Only think about bailing if we don't need\n # stats for filtering\n if cmin is None or (last and cmin < last):\n # We are collecting statistics for divisions\n # only (no filters) - Column isn't sorted, or\n # we have an all-null partition, so lets bail.\n #\n # Note: This assumes ascending order.\n #\n gather_statistics = False\n file_row_group_stats = {}\n file_row_group_column_stats = {}\n break\n\n if single_rg_parts:\n s[\"columns\"].append(\n {\n \"name\": name,\n \"min\": cmin,\n \"max\": cmax,\n }\n )\n else:\n cstats += [cmin, cmax]\n cmax_last[name] = cmax\n else:\n if (\n not (filters or chunksize or aggregation_depth)\n and column.meta_data.num_values > 0\n ):\n # We are collecting statistics for divisions\n # only (no filters) - Lets bail.\n gather_statistics = False\n file_row_group_stats = {}\n file_row_group_column_stats = {}\n break\n\n if single_rg_parts:\n s[\"columns\"].append({\"name\": name})\n else:\n cstats += [None, None, None]\n if gather_statistics:\n file_row_group_stats[fpath].append(s)\n if not single_rg_parts:\n file_row_group_column_stats[fpath].append(tuple(cstats))\n\n return (\n file_row_groups,\n file_row_group_stats,\n file_row_group_column_stats,\n gather_statistics,\n base_path,\n )\n\n @classmethod\n def _get_thrift_row_groups(\n cls,\n pf,\n filename,\n row_groups,\n ):\n \"\"\"Turn a set of row-groups into bytes-serialized form\n using thrift via pickle.\n \"\"\"\n\n real_row_groups = []\n for rg, rg_global in row_groups:\n row_group = pf.row_groups[rg_global]\n columns = row_group.columns\n for c, col in enumerate(columns):\n if c:\n col.file_path = None\n md = col.meta_data\n md.key_value_metadata = None\n # NOTE: Fastparquet may need the null count in the\n # statistics, so we cannot just set statistics\n # to none. Set attributes separately:\n st = md.statistics\n if st:\n st.distinct_count = None\n st.max = None\n st.min = None\n st.max_value = None\n st.min_value = None\n md.encodings = None\n md.total_uncompressed_size = None\n md.encoding_stats = None\n row_group.columns = columns\n real_row_groups.append(row_group)\n return real_row_groups\n\n @classmethod\n def _make_part(\n cls,\n filename,\n rg_list,\n fs=None,\n pf=None,\n base_path=None,\n partitions=None,\n ):\n \"\"\"Generate a partition-specific element of `parts`.\"\"\"\n\n if partitions:\n real_row_groups = cls._get_thrift_row_groups(\n pf,\n filename,\n rg_list,\n )\n part = {\"piece\": (real_row_groups,)}\n else:\n # Get full path (empty strings should be ignored)\n full_path = fs.sep.join([p for p in [base_path, filename] if p != \"\"])\n row_groups = [rg[0] for rg in rg_list] # Don't need global IDs\n part = {\"piece\": (full_path, row_groups)}\n\n return part\n\n @classmethod\n def _collect_dataset_info(\n cls,\n paths,\n fs,\n categories,\n index,\n gather_statistics,\n filters,\n split_row_groups,\n chunksize,\n aggregate_files,\n ignore_metadata_file,\n metadata_task_size,\n parquet_file_extension,\n kwargs,\n ):\n\n # Define the parquet-file (pf) object to use for metadata,\n # Also, initialize `parts`. If `parts` is populated here,\n # then each part will correspond to a file. Otherwise, each part will\n # correspond to a row group (populated later).\n\n # Extract \"supported\" key-word arguments from `kwargs`.\n # Split items into `dataset_kwargs` and `read_kwargs`\n dataset_kwargs, read_kwargs, user_kwargs = _split_user_options(**kwargs)\n\n parts = []\n _metadata_exists = False\n if len(paths) == 1 and fs.isdir(paths[0]):\n\n # This is a directory.\n # Check if _metadata and/or _common_metadata files exists\n base = paths[0]\n _metadata_exists = True\n if not ignore_metadata_file:\n _metadata_exists = fs.isfile(fs.sep.join([base, \"_metadata\"]))\n\n # Find all files if we are not using a _metadata file\n if ignore_metadata_file or not _metadata_exists:\n # For now, we need to discover every file under paths[0]\n paths, base, fns = _sort_and_analyze_paths(fs.find(base), fs)\n _update_paths = False\n for fn in [\"_metadata\", \"_common_metadata\"]:\n try:\n fns.remove(fn)\n _update_paths = True\n except ValueError:\n pass\n if _update_paths:\n paths = [fs.sep.join([base, fn]) for fn in fns]\n _metadata_exists = False\n if _metadata_exists:\n # Using _metadata file (best-case scenario)\n pf = ParquetFile(\n fs.sep.join([base, \"_metadata\"]),\n open_with=fs.open,\n **dataset_kwargs,\n )\n else:\n # Use 0th file\n # Note that \"_common_metadata\" can cause issues for\n # partitioned datasets.\n if parquet_file_extension:\n # Raise error if all files have been filtered by extension\n len0 = len(paths)\n paths = [\n path for path in paths if path.endswith(parquet_file_extension)\n ]\n if len0 and paths == []:\n raise ValueError(\n \"No files satisfy the `parquet_file_extension` criteria \"\n f\"(files must end with {parquet_file_extension}).\"\n )\n pf = ParquetFile(\n paths[:1], open_with=fs.open, root=base, **dataset_kwargs\n )\n scheme = get_file_scheme(fns)\n pf.file_scheme = scheme\n pf.cats = paths_to_cats(fns, scheme)\n if not gather_statistics:\n parts = [fs.sep.join([base, fn]) for fn in fns]\n else:\n # This is a list of files\n paths, base, fns = _sort_and_analyze_paths(paths, fs)\n\n # Check if _metadata is in paths, and\n # remove it if ignore_metadata_file=True\n _metadata_exists = \"_metadata\" in fns\n if _metadata_exists and ignore_metadata_file:\n fns.remove(\"_metadata\")\n _metadata_exists = False\n paths = [fs.sep.join([base, fn]) for fn in fns]\n\n if _metadata_exists:\n # We have a _metadata file, lets use it\n pf = ParquetFile(\n fs.sep.join([base, \"_metadata\"]),\n open_with=fs.open,\n **dataset_kwargs,\n )\n else:\n # Rely on metadata for 0th file.\n # Will need to pass a list of paths to read_partition\n scheme = get_file_scheme(fns)\n pf = ParquetFile(\n paths[:1], open_with=fs.open, root=base, **dataset_kwargs\n )\n pf.file_scheme = scheme\n pf.cats = paths_to_cats(fns, scheme)\n if not gather_statistics:\n parts = paths.copy()\n\n # Check the `aggregate_files` setting\n aggregation_depth = _get_aggregation_depth(\n aggregate_files,\n list(pf.cats),\n )\n\n # Ensure that there is no overlap between partition columns\n # and explicit columns in `pf`\n if pf.cats:\n _partitions = [p for p in pf.cats if p not in pf.columns]\n if not _partitions:\n pf.cats = {}\n elif len(_partitions) != len(pf.cats):\n raise ValueError(\n \"No partition-columns should be written in the \\n\"\n \"file unless they are ALL written in the file.\\n\"\n \"columns: {} | partitions: {}\".format(pf.columns, pf.cats.keys())\n )\n\n return {\n \"pf\": pf,\n \"paths\": paths,\n \"has_metadata_file\": _metadata_exists,\n \"parts\": parts,\n \"base\": base,\n \"fs\": fs,\n \"gather_statistics\": gather_statistics,\n \"categories\": categories,\n \"index\": index,\n \"filters\": filters,\n \"split_row_groups\": split_row_groups,\n \"chunksize\": chunksize,\n \"aggregate_files\": aggregate_files,\n \"aggregation_depth\": aggregation_depth,\n \"metadata_task_size\": metadata_task_size,\n \"kwargs\": {\n \"dataset\": dataset_kwargs,\n \"read\": read_kwargs,\n **user_kwargs,\n },\n }\n\n @classmethod\n def _create_dd_meta(cls, dataset_info):\n\n # Collect necessary information from dataset_info\n pf = dataset_info[\"pf\"]\n index = dataset_info[\"index\"]\n categories = dataset_info[\"categories\"]\n\n columns = None\n pandas_md = pf.pandas_metadata\n\n if pandas_md:\n (\n index_names,\n column_names,\n storage_name_mapping,\n column_index_names,\n ) = _parse_pandas_metadata(pandas_md)\n # auto-ranges should not be created by fastparquet\n column_names.extend(pf.cats)\n\n else:\n index_names = []\n column_names = pf.columns + list(pf.cats)\n storage_name_mapping = {k: k for k in column_names}\n column_index_names = [None]\n\n if index is None and len(index_names) > 0:\n if len(index_names) == 1 and index_names[0] is not None:\n index = index_names[0]\n else:\n index = index_names\n\n # Normalize user inputs\n column_names, index_names = _normalize_index_columns(\n columns, column_names, index, index_names\n )\n\n all_columns = index_names + column_names\n\n categories_dict = None\n if isinstance(categories, dict):\n categories_dict = categories\n\n if categories is None:\n categories = pf.categories\n elif isinstance(categories, str):\n categories = [categories]\n else:\n categories = list(categories)\n\n # Check that categories are included in columns\n if categories and not set(categories).intersection(all_columns):\n raise ValueError(\n \"categories not in available columns.\\n\"\n \"categories: {} | columns: {}\".format(categories, list(all_columns))\n )\n\n dtypes = pf._dtypes(categories)\n dtypes = {storage_name_mapping.get(k, k): v for k, v in dtypes.items()}\n\n index_cols = index or ()\n if isinstance(index_cols, str):\n index_cols = [index_cols]\n for ind in index_cols:\n if getattr(dtypes.get(ind), \"numpy_dtype\", None):\n # index does not support masked types\n dtypes[ind] = dtypes[ind].numpy_dtype\n for cat in categories:\n if cat in all_columns:\n dtypes[cat] = pd.CategoricalDtype(categories=[UNKNOWN_CATEGORIES])\n\n for catcol in pf.cats:\n if catcol in all_columns:\n dtypes[catcol] = pd.CategoricalDtype(categories=pf.cats[catcol])\n\n meta = _meta_from_dtypes(all_columns, dtypes, index_cols, column_index_names)\n\n # Update `dataset_info` and return `meta`\n dataset_info[\"dtypes\"] = dtypes\n dataset_info[\"index\"] = index\n dataset_info[\"index_cols\"] = index_cols\n dataset_info[\"categories\"] = categories\n dataset_info[\"categories_dict\"] = categories_dict\n\n return meta\n\n @classmethod\n def _construct_collection_plan(cls, dataset_info):\n\n # Collect necessary information from dataset_info\n fs = dataset_info[\"fs\"]\n parts = dataset_info[\"parts\"]\n paths = dataset_info[\"paths\"]\n filters = dataset_info[\"filters\"]\n pf = dataset_info[\"pf\"]\n split_row_groups = dataset_info[\"split_row_groups\"]\n chunksize = dataset_info[\"chunksize\"]\n gather_statistics = dataset_info[\"gather_statistics\"]\n base_path = dataset_info[\"base\"]\n aggregation_depth = dataset_info[\"aggregation_depth\"]\n index_cols = dataset_info[\"index_cols\"]\n categories = dataset_info[\"categories\"]\n dtypes = dataset_info[\"dtypes\"]\n categories_dict = dataset_info[\"categories_dict\"]\n has_metadata_file = dataset_info[\"has_metadata_file\"]\n metadata_task_size = dataset_info[\"metadata_task_size\"]\n kwargs = dataset_info[\"kwargs\"]\n\n # Ensure metadata_task_size is set\n # (Using config file or defaults)\n metadata_task_size = _set_metadata_task_size(\n dataset_info[\"metadata_task_size\"], fs\n )\n\n # Determine which columns need statistics.\n # At this point, gather_statistics is only True if\n # the user specified calculate_divisions=True\n filter_columns = {t[0] for t in flatten(filters or [], container=list)}\n stat_col_indices = {}\n _index_cols = index_cols if (gather_statistics and len(index_cols) == 1) else []\n for i, name in enumerate(pf.columns):\n if name in _index_cols or name in filter_columns:\n stat_col_indices[name] = i\n\n # Decide final `gather_statistics` setting.\n # NOTE: The \"fastparquet\" engine requires statistics for\n # filtering even if the filter is on a paritioned column\n gather_statistics = _set_gather_statistics(\n gather_statistics,\n chunksize,\n split_row_groups,\n aggregation_depth,\n filter_columns,\n set(stat_col_indices) | filter_columns,\n )\n\n # Define common_kwargs\n common_kwargs = {\n \"categories\": categories_dict or categories,\n \"root_cats\": pf.cats,\n \"root_file_scheme\": pf.file_scheme,\n \"base_path\": base_path,\n **kwargs,\n }\n\n # Check if this is a very simple case where we can just\n # return the path names. This requires that `parts`\n # already be a list of paths. Also, we cannot be splitting\n # by row-group or collecting statistics.\n if (\n gather_statistics is False\n and not split_row_groups\n and isinstance(parts, list)\n and len(parts)\n and isinstance(parts[0], str)\n ):\n return (\n [{\"piece\": (full_path, None)} for full_path in parts],\n [],\n common_kwargs,\n )\n\n dataset_info_kwargs = {\n \"fs\": fs,\n \"split_row_groups\": split_row_groups,\n \"gather_statistics\": gather_statistics,\n \"filters\": filters,\n \"dtypes\": dtypes,\n \"stat_col_indices\": stat_col_indices,\n \"aggregation_depth\": aggregation_depth,\n \"chunksize\": chunksize,\n \"root_cats\": pf.cats,\n \"root_file_scheme\": pf.file_scheme,\n \"base_path\": \"\" if base_path is None else base_path,\n \"has_metadata_file\": has_metadata_file,\n }\n\n if (\n has_metadata_file\n or metadata_task_size == 0\n or metadata_task_size > len(paths)\n ):\n # Construct the output-partitioning plan on the\n # client process (in serial). This means we have\n # a global _metadata file, or that `metadata_task_size`\n # is zero or larger than the number of files.\n pf_or_paths = pf if has_metadata_file else paths\n parts, stats = cls._collect_file_parts(pf_or_paths, dataset_info_kwargs)\n\n else:\n # We DON'T have a global _metadata file to work with.\n # We should loop over files in parallel\n parts, stats = [], []\n if paths:\n # Build and compute a task graph to construct stats/parts\n gather_parts_dsk = {}\n name = \"gather-pq-parts-\" + tokenize(paths, dataset_info_kwargs)\n finalize_list = []\n for task_i, file_i in enumerate(\n range(0, len(paths), metadata_task_size)\n ):\n finalize_list.append((name, task_i))\n gather_parts_dsk[finalize_list[-1]] = (\n cls._collect_file_parts,\n paths[file_i : file_i + metadata_task_size],\n dataset_info_kwargs,\n )\n\n def _combine_parts(parts_and_stats):\n parts, stats = [], []\n for part, stat in parts_and_stats:\n parts += part\n if stat:\n stats += stat\n return parts, stats\n\n gather_parts_dsk[\"final-\" + name] = (_combine_parts, finalize_list)\n parts, stats = Delayed(\"final-\" + name, gather_parts_dsk).compute()\n\n return parts, stats, common_kwargs\n\n @classmethod\n def _collect_file_parts(\n cls,\n pf_or_files,\n dataset_info_kwargs,\n ):\n\n # Collect necessary information from dataset_info\n fs = dataset_info_kwargs[\"fs\"]\n split_row_groups = dataset_info_kwargs[\"split_row_groups\"]\n gather_statistics = dataset_info_kwargs[\"gather_statistics\"]\n stat_col_indices = dataset_info_kwargs[\"stat_col_indices\"]\n filters = dataset_info_kwargs[\"filters\"]\n dtypes = dataset_info_kwargs[\"dtypes\"]\n chunksize = dataset_info_kwargs[\"chunksize\"]\n aggregation_depth = dataset_info_kwargs[\"aggregation_depth\"]\n base_path = dataset_info_kwargs.get(\"base_path\", None)\n root_cats = dataset_info_kwargs.get(\"root_cats\", None)\n root_file_scheme = dataset_info_kwargs.get(\"root_file_scheme\", None)\n has_metadata_file = dataset_info_kwargs[\"has_metadata_file\"]\n\n # Get ParquetFile\n if not isinstance(pf_or_files, fastparquet.api.ParquetFile):\n # Construct local `ParquetFile` object\n pf = ParquetFile(\n pf_or_files,\n open_with=fs.open,\n root=base_path,\n )\n # Update hive-partitioning to match global cats/scheme\n pf.cats = root_cats or {}\n if root_cats:\n pf.file_scheme = root_file_scheme\n else:\n # We already have a ParquetFile object to work with\n pf = pf_or_files\n\n # Organize row-groups by file\n (\n file_row_groups,\n file_row_group_stats,\n file_row_group_column_stats,\n gather_statistics,\n base_path,\n ) = cls._organize_row_groups(\n pf,\n split_row_groups,\n gather_statistics,\n stat_col_indices,\n filters,\n dtypes,\n base_path,\n has_metadata_file,\n chunksize,\n aggregation_depth,\n )\n\n # Convert organized row-groups to parts\n parts, stats = _row_groups_to_parts(\n gather_statistics,\n split_row_groups,\n aggregation_depth,\n file_row_groups,\n file_row_group_stats,\n file_row_group_column_stats,\n stat_col_indices,\n cls._make_part,\n make_part_kwargs={\n \"fs\": fs,\n \"pf\": pf,\n \"base_path\": base_path,\n \"partitions\": list(pf.cats),\n },\n )\n\n return parts, stats\n\n @classmethod\n def read_metadata(\n cls,\n fs,\n paths,\n categories=None,\n index=None,\n gather_statistics=None,\n filters=None,\n split_row_groups=False,\n chunksize=None,\n aggregate_files=None,\n ignore_metadata_file=False,\n metadata_task_size=None,\n parquet_file_extension=None,\n **kwargs,\n ):\n\n # Stage 1: Collect general dataset information\n dataset_info = cls._collect_dataset_info(\n paths,\n fs,\n categories,\n index,\n gather_statistics,\n filters,\n split_row_groups,\n chunksize,\n aggregate_files,\n ignore_metadata_file,\n metadata_task_size,\n parquet_file_extension,\n kwargs,\n )\n\n # Stage 2: Generate output `meta`\n meta = cls._create_dd_meta(dataset_info)\n\n # Stage 3: Generate parts and stats\n parts, stats, common_kwargs = cls._construct_collection_plan(dataset_info)\n\n # Cannot allow `None` in columns if the user has specified index=False\n index = dataset_info[\"index\"]\n if index is False and None in meta.columns:\n meta.drop(columns=[None], inplace=True)\n\n # Add `common_kwargs` to the first element of `parts`.\n # We can return as a separate element in the future, but\n # should avoid breaking the API for now.\n if len(parts):\n parts[0][\"common_kwargs\"] = common_kwargs\n parts[0][\"aggregation_depth\"] = dataset_info[\"aggregation_depth\"]\n\n if len(parts) and len(parts[0][\"piece\"]) == 1:\n\n # Strip all partition-dependent or unnecessary\n # data from the `ParquetFile` object\n pf = dataset_info[\"pf\"]\n pf.row_groups = None\n pf.fmd.row_groups = None\n pf._statistics = None\n parts[0][\"common_kwargs\"][\"parquet_file\"] = pf\n\n return (meta, stats, parts, index)\n\n @classmethod\n def multi_support(cls):\n return cls == FastParquetEngine\n\n @classmethod\n def read_partition(\n cls,\n fs,\n pieces,\n columns,\n index,\n categories=(),\n root_cats=None,\n root_file_scheme=None,\n base_path=None,\n **kwargs,\n ):\n\n null_index_name = False\n base_path = False if not root_cats else base_path\n if isinstance(index, list):\n if index == [None]:\n # Handling a None-labeled index...\n # The pandas metadata told us to read in an index\n # labeled `None`. If this corresponds to a `RangeIndex`,\n # fastparquet will need use the pandas metadata to\n # construct the index. Otherwise, the index will correspond\n # to a column named \"__index_level_0__\". We will need to\n # check the `ParquetFile` object for this column below.\n index = []\n null_index_name = True\n columns += index\n\n # Use global `parquet_file` object. Need to reattach\n # the desired row_group\n parquet_file = kwargs.pop(\"parquet_file\", None)\n\n # Always convert pieces to list\n if not isinstance(pieces, list):\n pieces = [pieces]\n\n sample = pieces[0]\n if isinstance(sample, tuple):\n if isinstance(sample[0], str):\n # We have paths to read from\n assert parquet_file is None\n\n row_groups = []\n rg_offset = 0\n parquet_file = ParquetFile(\n [p[0] for p in pieces],\n open_with=fs.open,\n root=base_path or False,\n **kwargs.get(\"dataset\", {}),\n )\n for piece in pieces:\n _pf = (\n parquet_file\n if len(pieces) == 1\n else ParquetFile(\n piece[0],\n open_with=fs.open,\n root=base_path or False,\n **kwargs.get(\"dataset\", {}),\n )\n )\n n_local_row_groups = len(_pf.row_groups)\n local_rg_indices = piece[1] or list(range(n_local_row_groups))\n row_groups += [\n parquet_file.row_groups[rg + rg_offset]\n for rg in local_rg_indices\n ]\n rg_offset += n_local_row_groups\n update_parquet_file = len(row_groups) < len(parquet_file.row_groups)\n\n elif parquet_file:\n\n row_groups = []\n for piece in pieces:\n # `piece[1]` will contain actual row-group objects,\n # but they may be pickled\n rgs = piece[0]\n if isinstance(rgs, bytes):\n rgs = pickle.loads(rgs)\n row_groups += rgs\n update_parquet_file = True\n\n else:\n raise ValueError(\"Neither path nor ParquetFile detected!\")\n\n if update_parquet_file:\n with _FP_FILE_LOCK:\n for rg in row_groups:\n for chunk in rg.columns:\n s = chunk.file_path\n if s and isinstance(s, bytes):\n chunk.file_path = s.decode()\n\n parquet_file.fmd.row_groups = row_groups\n # NOTE: May lose cats after `_set_attrs` call\n save_cats = parquet_file.cats\n parquet_file._set_attrs()\n parquet_file.cats = save_cats\n\n if null_index_name:\n if \"__index_level_0__\" in parquet_file.columns:\n # See \"Handling a None-labeled index\" comment above\n index = [\"__index_level_0__\"]\n columns += index\n\n # Update hive-partitioning information if necessary\n parquet_file.cats = root_cats or {}\n if root_cats:\n parquet_file.file_scheme = root_file_scheme\n\n parquet_file._dtypes = (\n lambda *args: parquet_file.dtypes\n ) # ugly patch, could be fixed\n\n # Convert ParquetFile to pandas\n return cls.pf_to_pandas(\n parquet_file,\n fs=fs,\n columns=columns,\n categories=categories,\n index=index,\n **kwargs.get(\"read\", {}),\n )\n\n else:\n # `sample` is NOT a tuple\n raise ValueError(f\"Expected tuple, got {type(sample)}\")\n\n @classmethod\n def pf_to_pandas(\n cls,\n pf,\n fs=None,\n columns=None,\n categories=None,\n index=None,\n open_file_options=None,\n **kwargs,\n ):\n # This method was mostly copied from the fastparquet\n # `ParquetFile.to_pandas` definition. We maintain our\n # own implmentation in Dask to enable better remote\n # file-handling control\n\n # Handle selected columns\n if columns is not None:\n columns = columns[:]\n else:\n columns = pf.columns + list(pf.cats)\n if index:\n columns += [i for i in index if i not in columns]\n\n # Extract row-groups and pre-allocate df\n rgs = pf.row_groups\n size = sum(rg.num_rows for rg in rgs)\n df, views = pf.pre_allocate(size, columns, categories, index)\n start = 0\n\n # Get a map of file names -> row-groups\n fn_rg_map = defaultdict(list)\n for rg in rgs:\n fn = pf.row_group_filename(rg)\n fn_rg_map[fn].append(rg)\n\n # Define file-opening options\n precache_options, open_file_options = _process_open_file_options(\n open_file_options,\n **(\n {\n \"allow_precache\": False,\n \"default_cache\": \"readahead\",\n }\n if _is_local_fs(fs)\n else {\n \"metadata\": pf,\n \"columns\": list(set(columns).intersection(pf.columns)),\n \"row_groups\": [rgs for rgs in fn_rg_map.values()],\n \"default_engine\": \"fastparquet\",\n \"default_cache\": \"readahead\",\n }\n ),\n )\n\n with ExitStack() as stack:\n\n for fn, infile in zip(\n fn_rg_map.keys(),\n _open_input_files(\n list(fn_rg_map.keys()),\n fs=fs,\n context_stack=stack,\n precache_options=precache_options,\n **open_file_options,\n ),\n ):\n for rg in fn_rg_map[fn]:\n thislen = rg.num_rows\n parts = {\n name: (\n v\n if name.endswith(\"-catdef\")\n else v[start : start + thislen]\n )\n for (name, v) in views.items()\n }\n\n # Add row-group data to df\n pf.read_row_group_file(\n rg,\n columns,\n categories,\n index,\n assign=parts,\n partition_meta=pf.partition_meta,\n infile=infile,\n **kwargs,\n )\n start += thislen\n return df\n\n @classmethod\n def initialize_write(\n cls,\n df,\n fs,\n path,\n append=False,\n partition_on=None,\n ignore_divisions=False,\n division_info=None,\n schema=None,\n object_encoding=\"utf8\",\n index_cols=None,\n custom_metadata=None,\n **kwargs,\n ):\n if index_cols is None:\n index_cols = []\n if append and division_info is None:\n ignore_divisions = True\n fs.mkdirs(path, exist_ok=True)\n if object_encoding == \"infer\" or (\n isinstance(object_encoding, dict) and \"infer\" in object_encoding.values()\n ):\n raise ValueError(\n '\"infer\" not allowed as object encoding, '\n \"because this required data in memory.\"\n )\n\n metadata_file_exists = False\n if append:\n try:\n # to append to a dataset without _metadata, need to load\n # _common_metadata or any data file here\n pf = fastparquet.api.ParquetFile(path, open_with=fs.open)\n metadata_file_exists = fs.exists(fs.sep.join([path, \"_metadata\"]))\n except (OSError, ValueError):\n # append for create\n append = False\n if append:\n if pf.file_scheme not in [\"hive\", \"empty\", \"flat\"]:\n raise ValueError(\n \"Requested file scheme is hive, but existing file scheme is not.\"\n )\n elif (set(pf.columns) != set(df.columns) - set(partition_on)) or (\n set(partition_on) != set(pf.cats)\n ):\n raise ValueError(\n \"Appended columns not the same.\\n\"\n \"Previous: {} | New: {}\".format(pf.columns, list(df.columns))\n )\n elif (pd.Series(pf.dtypes).loc[pf.columns] != df[pf.columns].dtypes).any():\n raise ValueError(\n \"Appended dtypes differ.\\n{}\".format(\n set(pf.dtypes.items()) ^ set(df.dtypes.items())\n )\n )\n else:\n df = df[pf.columns + partition_on]\n\n fmd = pf.fmd\n i_offset = fastparquet.writer.find_max_part(fmd.row_groups)\n if not ignore_divisions:\n if not set(index_cols).intersection([division_info[\"name\"]]):\n ignore_divisions = True\n if not ignore_divisions:\n minmax = fastparquet.api.sorted_partitioned_columns(pf)\n # If fastparquet detects that a partitioned column isn't sorted, it won't\n # appear in the resulting min/max dictionary\n old_end = (\n minmax[index_cols[0]][\"max\"][-1]\n if index_cols[0] in minmax\n else None\n )\n divisions = division_info[\"divisions\"]\n if old_end is None or divisions[0] <= old_end:\n raise ValueError(\n \"Appended divisions overlapping with previous ones.\"\n \"\\n\"\n \"Previous: {} | New: {}\".format(old_end, divisions[0])\n )\n else:\n fmd = fastparquet.writer.make_metadata(\n df._meta,\n object_encoding=object_encoding,\n index_cols=index_cols,\n ignore_columns=partition_on,\n **kwargs,\n )\n i_offset = 0\n if custom_metadata is not None:\n kvm = fmd.key_value_metadata or []\n kvm.extend(\n [\n fastparquet.parquet_thrift.KeyValue(key=key, value=value)\n for key, value in custom_metadata.items()\n ]\n )\n fmd.key_value_metadata = kvm\n\n extra_write_kwargs = {\"fmd\": fmd}\n return i_offset, fmd, metadata_file_exists, extra_write_kwargs\n\n @classmethod\n def write_partition(\n cls,\n df,\n path,\n fs,\n filename,\n partition_on,\n return_metadata,\n fmd=None,\n compression=None,\n custom_metadata=None,\n **kwargs,\n ):\n # Update key/value metadata if necessary\n fmd = copy.copy(fmd)\n for s in fmd.schema:\n if isinstance(s.name, bytes):\n # can be coerced to bytes on copy\n s.name = s.name.decode()\n if custom_metadata and fmd is not None:\n fmd.key_value_metadata = fmd.key_value_metadata + (\n [\n fastparquet.parquet_thrift.KeyValue(key=key, value=value)\n for key, value in custom_metadata.items()\n ]\n )\n\n if not len(df):\n # Write nothing for empty partitions\n rgs = []\n elif partition_on:\n mkdirs = lambda x: fs.mkdirs(x, exist_ok=True)\n if parse_version(fastparquet.__version__) >= parse_version(\"0.1.4\"):\n rgs = partition_on_columns(\n df, partition_on, path, filename, fmd, compression, fs.open, mkdirs\n )\n else:\n rgs = partition_on_columns(\n df,\n partition_on,\n path,\n filename,\n fmd,\n fs.sep,\n compression,\n fs.open,\n mkdirs,\n )\n else:\n with fs.open(fs.sep.join([path, filename]), \"wb\") as fil:\n fmd.num_rows = len(df)\n rg = make_part_file(\n fil, df, fmd.schema, compression=compression, fmd=fmd\n )\n for chunk in rg.columns:\n chunk.file_path = filename\n rgs = [rg]\n if return_metadata:\n return rgs\n else:\n return []\n\n @classmethod\n def write_metadata(cls, parts, meta, fs, path, append=False, **kwargs):\n _meta = copy.copy(meta)\n rgs = meta.row_groups\n if parts:\n for rg in parts:\n if rg is not None:\n if isinstance(rg, list):\n for r in rg:\n rgs.append(r)\n else:\n rgs.append(rg)\n _meta.row_groups = rgs\n fn = fs.sep.join([path, \"_metadata\"])\n fastparquet.writer.write_common_metadata(\n fn, _meta, open_with=fs.open, no_row_groups=False\n )\n\n # if appending, could skip this, but would need to check existence\n fn = fs.sep.join([path, \"_common_metadata\"])\n fastparquet.writer.write_common_metadata(fn, _meta, open_with=fs.open)\n" ]
[ [ "pandas.Timestamp", "pandas.CategoricalDtype", "pandas.Series" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.3", "1.1", "1.5", "0.24", "1.0", "0.25", "1.2" ], "scipy": [], "tensorflow": [] } ]
facebookresearch/pythia
[ "079740bee4b357a7b1b866d35e2f1fad6edba8a4" ]
[ "mmf/modules/encoders.py" ]
[ "# Copyright (c) Facebook, Inc. and its affiliates.\nimport importlib\nimport logging\nimport os\nimport pickle\nimport re\nfrom collections import OrderedDict\nfrom copy import deepcopy\nfrom dataclasses import asdict, dataclass\nfrom enum import Enum\nfrom typing import Any\n\nimport torch\nimport torchvision\nfrom mmf.common.registry import registry\nfrom mmf.models.frcnn import GeneralizedRCNN\nfrom mmf.modules.embeddings import ProjectionEmbedding, TextEmbedding\nfrom mmf.modules.hf_layers import BertModelJit\nfrom mmf.modules.layers import Identity\nfrom mmf.utils.build import build_image_encoder, build_text_encoder\nfrom mmf.utils.download import download_pretrained_model\nfrom mmf.utils.file_io import PathManager\nfrom mmf.utils.general import get_absolute_path\nfrom mmf.utils.logger import log_class_usage\nfrom omegaconf import MISSING, OmegaConf\nfrom torch import nn, Tensor\nfrom transformers.configuration_auto import AutoConfig\nfrom transformers.modeling_auto import AutoModel\n\ntry:\n from detectron2.modeling import build_resnet_backbone, ShapeSpec\nexcept ImportError:\n pass\n\n\nlogger = logging.getLogger()\n\n\nclass Encoder(nn.Module):\n @dataclass\n class Config:\n name: str = MISSING\n\n def __init__(self):\n super().__init__()\n log_class_usage(\"Encoder\", self.__class__)\n\n @classmethod\n def from_params(cls, **kwargs):\n config = OmegaConf.structured(cls.Config(**kwargs))\n return cls(config)\n\n\nclass EncoderFactory(nn.Module):\n @dataclass\n class Config:\n type: str = MISSING\n params: Encoder.Config = MISSING\n\n\nclass ImageFeatureEncoderTypes(Enum):\n default = \"default\"\n identity = \"identity\"\n projection = \"projection\"\n frcnn_fc7 = \"finetune_faster_rcnn_fpn_fc7\"\n\n\nclass ImageFeatureEncoder(Encoder):\n @dataclass\n class Config(Encoder.Config):\n in_dim: int = MISSING\n\n\nclass ImageFeatureEncoderFactory(EncoderFactory):\n @dataclass\n class Config(EncoderFactory.Config):\n type: ImageFeatureEncoderTypes = MISSING\n params: ImageFeatureEncoder.Config = MISSING\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n encoder_type = config.type\n if isinstance(encoder_type, ImageFeatureEncoderTypes):\n encoder_type = encoder_type.value\n\n assert (\n \"in_dim\" in config.params\n ), \"ImageFeatureEncoder require 'in_dim' param in config\"\n params = config.params\n\n if encoder_type == \"default\" or encoder_type == \"identity\":\n self.module = Identity()\n self.module.in_dim = params.in_dim\n self.module.out_dim = params.in_dim\n elif encoder_type == \"projection\":\n if \"module\" not in params:\n params = deepcopy(params)\n params.module = \"linear\"\n self.module = ProjectionEmbedding(**params)\n elif encoder_type == \"finetune_faster_rcnn_fpn_fc7\":\n self.module = FinetuneFasterRcnnFpnFc7(params)\n else:\n raise NotImplementedError(\"Unknown Image Encoder: %s\" % encoder_type)\n\n self.out_dim = self.module.out_dim\n\n def forward(self, *args, **kwargs):\n return self.module(*args, **kwargs)\n\n\[email protected]_encoder(\"finetune_faster_rcnn_fpn_fc7\")\nclass FinetuneFasterRcnnFpnFc7(ImageFeatureEncoder):\n @dataclass\n class Config(ImageFeatureEncoder.Config):\n name: str = \"finetune_faster_rcnn_fpn_fc7\"\n in_dim: int = MISSING\n weights_file: str = \"fc7_w.pkl\"\n bias_file: str = \"fc7_b.pkl\"\n model_data_dir: str = MISSING\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n model_data_dir = get_absolute_path(config.model_data_dir)\n\n if not os.path.isabs(config.weights_file):\n weights_file = os.path.join(model_data_dir, config.weights_file)\n if not os.path.isabs(config.bias_file):\n bias_file = os.path.join(model_data_dir, config.bias_file)\n\n if not PathManager.exists(bias_file) or not PathManager.exists(weights_file):\n download_path = download_pretrained_model(\"detectron.vmb_weights\")\n weights_file = get_absolute_path(os.path.join(download_path, \"fc7_w.pkl\"))\n bias_file = get_absolute_path(os.path.join(download_path, \"fc7_b.pkl\"))\n\n with PathManager.open(weights_file, \"rb\") as w:\n weights = pickle.load(w)\n with PathManager.open(bias_file, \"rb\") as b:\n bias = pickle.load(b)\n out_dim = bias.shape[0]\n\n self.lc = nn.Linear(config.in_dim, out_dim)\n self.lc.weight.data.copy_(torch.from_numpy(weights))\n self.lc.bias.data.copy_(torch.from_numpy(bias))\n self.out_dim = out_dim\n\n def _load_from_state_dict(\n self,\n state_dict,\n prefix,\n local_metadata,\n strict,\n missing_keys,\n unexpected_keys,\n error_msgs,\n ):\n old_prefix = prefix + \"module.\"\n for k in list(state_dict.keys()):\n if k.startswith(old_prefix):\n new_k = k.replace(old_prefix, prefix)\n state_dict[new_k] = state_dict.pop(k)\n\n super()._load_from_state_dict(\n state_dict,\n prefix,\n local_metadata,\n strict,\n missing_keys,\n unexpected_keys,\n error_msgs,\n )\n\n def forward(self, image):\n i2 = self.lc(image)\n i3 = nn.functional.relu(i2)\n return i3\n\n\[email protected]_encoder(\"identity\")\nclass IdentityEncoder(Encoder):\n @dataclass\n class Config(Encoder.Config):\n name: str = \"identity\"\n # Random in_dim if not specified\n in_dim: int = 100\n\n def __init__(self, config: Config):\n super().__init__()\n self.module = nn.Identity()\n self.in_dim = config.get(\"in_dim\", 100)\n self.out_dim = self.in_dim\n\n def forward(self, x):\n return self.module(x)\n\n\nclass ImageEncoderTypes(Enum):\n default = \"default\"\n identity = \"identity\"\n torchvision_resnet = \"torchvision_resnet\"\n resnet152 = \"resnet152\"\n detectron2_resnet = \"detectron2_resnet\"\n\n\nclass ImageEncoderFactory(EncoderFactory):\n @dataclass\n class Config(EncoderFactory.Config):\n type: ImageEncoderTypes = MISSING\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n self._type = config.type\n\n if isinstance(self._type, ImageEncoderTypes):\n self._type = self._type.value\n\n params = config.params\n\n if self._type == \"default\" or self._type == \"identity\":\n self.module = nn.Identity()\n self.module.out_dim = params.in_dim\n elif self._type == \"resnet152\":\n self.module = ResNet152ImageEncoder(params)\n elif self._type == \"torchvision_resnet\":\n self.module = TorchvisionResNetImageEncoder(params)\n elif self._type == \"detectron2_resnet\":\n self.module = Detectron2ResnetImageEncoder(params)\n elif self._type == \"frcnn\":\n self.module = FRCNNImageEncoder(params)\n else:\n raise NotImplementedError(\"Unknown Image Encoder: %s\" % self._type)\n\n @property\n def out_dim(self):\n return self.module.out_dim\n\n def forward(self, image):\n return self.module(image)\n\n\n# Taken from facebookresearch/mmbt with some modifications\[email protected]_encoder(\"resnet152\")\nclass ResNet152ImageEncoder(Encoder):\n @dataclass\n class Config(Encoder.Config):\n name: str = \"resnet152\"\n pretrained: bool = True\n # \"avg\" or \"adaptive\"\n pool_type: str = \"avg\"\n num_output_features: int = 1\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n self.config = config\n model = torchvision.models.resnet152(pretrained=config.get(\"pretrained\", True))\n modules = list(model.children())[:-2]\n self.model = nn.Sequential(*modules)\n\n pool_func = (\n nn.AdaptiveAvgPool2d if config.pool_type == \"avg\" else nn.AdaptiveMaxPool2d\n )\n\n # -1 will keep the original feature size\n if config.num_output_features == -1:\n self.pool = nn.Identity()\n elif config.num_output_features in [1, 2, 3, 5, 7]:\n self.pool = pool_func((config.num_output_features, 1))\n elif config.num_output_features == 4:\n self.pool = pool_func((2, 2))\n elif config.num_output_features == 6:\n self.pool = pool_func((3, 2))\n elif config.num_output_features == 8:\n self.pool = pool_func((4, 2))\n elif config.num_output_features == 9:\n self.pool = pool_func((3, 3))\n\n self.out_dim = 2048\n\n def forward(self, x):\n # Bx3x224x224 -> Bx2048x7x7 -> Bx2048xN -> BxNx2048\n out = self.pool(self.model(x))\n out = torch.flatten(out, start_dim=2)\n out = out.transpose(1, 2).contiguous()\n return out # BxNx2048\n\n\[email protected]_encoder(\"torchvision_resnet\")\nclass TorchvisionResNetImageEncoder(Encoder):\n @dataclass\n class Config(Encoder.Config):\n name: str = \"resnet50\"\n pretrained: bool = False\n zero_init_residual: bool = True\n num_output_features: int = -1\n pool_type: str = \"avg\"\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n self.config = config\n\n model = getattr(torchvision.models, config.name)(\n pretrained=config.pretrained, zero_init_residual=config.zero_init_residual\n )\n\n # checks if use_avgpool exists to maintain the old logic\n self.use_avgpool = config.get(\"use_avgpool\", None)\n if self.use_avgpool: # use_avgpool is True\n config.num_output_features = 1\n config.pool_type = \"avg\"\n elif self.use_avgpool is False: # use_avgpool is False\n config.num_output_features = -1\n\n if config.pretrained:\n model = self._load_pretrained(model, config)\n\n modules = list(model.children())[:-2]\n self.model = nn.Sequential(*modules)\n self.pool = self._pool_func(config)\n self.out_dim = config.get(\"out_dim\", 2048)\n\n def _load_pretrained(self, model, config: Config):\n pretrained_model = config.get(\"pretrained_model\", \"supervised\")\n if pretrained_model == \"supervised\":\n pass # this is already loaded via torchvision using pretrained=True\n elif os.path.exists(pretrained_model):\n model.load_state_dict(torch.load(pretrained_model))\n else:\n try:\n with PathManager.open(pretrained_model, \"rb\") as f:\n model.load_state_dict(\n torch.load(f, map_location=lambda storage, loc: storage),\n strict=False,\n )\n except Exception:\n raise Exception(f\"unknown pretrained ResNet model: {pretrained_model}\")\n return model\n\n def _pool_func(self, config: Config):\n pool_func = (\n nn.AdaptiveAvgPool2d if config.pool_type == \"avg\" else nn.AdaptiveMaxPool2d\n )\n # -1 will keep the original feature size\n if config.num_output_features == -1:\n pool = nn.Identity()\n elif config.num_output_features in [1, 2, 3, 5, 7]:\n pool = pool_func((config.num_output_features, 1))\n elif config.num_output_features == 4:\n pool = pool_func((2, 2))\n elif config.num_output_features == 6:\n pool = pool_func((3, 2))\n elif config.num_output_features == 8:\n pool = pool_func((4, 2))\n elif config.num_output_features == 9:\n pool = pool_func((3, 3))\n\n return pool\n\n def forward(self, x):\n # B x 3 x 224 x 224 -> B x out_dim x 7 x 7\n out = self.pool(self.model(x))\n if self.use_avgpool is None:\n out = torch.flatten(out, start_dim=2)\n out = out.transpose(1, 2).contiguous() # BxNxout_dim\n else:\n out = torch.flatten(out, start_dim=1) # BxN*out_dim\n return out\n\n\[email protected]_encoder(\"detectron2_resnet\")\nclass Detectron2ResnetImageEncoder(Encoder):\n @dataclass\n class Config(Encoder.Config):\n name: str = \"detectron2_resnet\"\n pretrained: bool = True\n pretrained_path: str = None\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n self.config = config\n pretrained = config.get(\"pretrained\", False)\n pretrained_path = config.get(\"pretrained_path\", None)\n\n self.resnet = build_resnet_backbone(config, ShapeSpec(channels=3))\n\n if pretrained:\n state_dict = torch.hub.load_state_dict_from_url(\n pretrained_path, progress=False\n )\n new_state_dict = OrderedDict()\n replace_layer = {\"backbone.\": \"\"}\n\n for key, value in state_dict[\"model\"].items():\n new_key = re.sub(\n r\"(backbone\\.)\", lambda x: replace_layer[x.groups()[0]], key\n )\n new_state_dict[new_key] = value\n self.resnet.load_state_dict(new_state_dict, strict=False)\n\n self.out_dim = 2048\n\n def forward(self, x):\n x = self.resnet(x)\n return x[\"res5\"]\n\n\[email protected]_encoder(\"frcnn\")\nclass FRCNNImageEncoder(Encoder):\n @dataclass\n class Config(Encoder.Config):\n name: str = \"frcnn\"\n pretrained: bool = True\n pretrained_path: str = None\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n self.config = config\n pretrained = config.get(\"pretrained\", False)\n pretrained_path = config.get(\"pretrained_path\", None)\n self.frcnn = GeneralizedRCNN(config)\n if pretrained:\n state_dict = torch.load(pretrained_path)\n self.frcnn.load_state_dict(state_dict)\n self.frcnn.eval()\n\n def forward(\n self,\n x: torch.Tensor,\n sizes: torch.Tensor = None,\n scales_yx: torch.Tensor = None,\n padding: torch.Tensor = None,\n max_detections: int = 0,\n return_tensors: str = \"pt\",\n ):\n x = self.frcnn(\n x,\n sizes,\n scales_yx=scales_yx,\n padding=padding,\n max_detections=max_detections,\n return_tensors=return_tensors,\n )\n return x\n\n\nclass TextEncoderTypes(Enum):\n identity = \"identity\"\n transformer = \"transformer\"\n embedding = \"embedding\"\n\n\nclass TextEncoderFactory(EncoderFactory):\n @dataclass\n class Config(EncoderFactory.Config):\n # identity, transformer or embedding as of now\n type: TextEncoderTypes = MISSING\n params: Encoder.Config = MISSING\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n self._type = config.type\n if isinstance(self._type, TextEncoderTypes):\n self._type = self._type.value\n\n if self._type == \"identity\":\n self.module = nn.Identity()\n elif self._type == \"transformer\":\n self._module = TransformerEncoder(config.params)\n self.module = self._module.module\n elif self._type == \"embedding\":\n self.module = TextEmbeddingEncoder(config.params)\n else:\n raise NotImplementedError(f\"Unknown Text Encoder {self._type}\")\n\n def forward(self, *args, **kwargs):\n return self.module(*args, **kwargs)\n\n\[email protected]_encoder(\"text_embedding\")\nclass TextEmbeddingEncoder(Encoder):\n @dataclass\n class Config(Encoder.Config):\n name: str = \"text_embedding\"\n operator: str = MISSING\n # Keeping this Any for now as this\n # needs a separate refactor PR.\n embedding_params: Any = MISSING\n\n def __init__(self, config: Config):\n super().__init__()\n self._operator = config.operator\n self._embedding_params = config.embedding_params\n\n self.module = TextEmbedding(\n self._embedding_params.type, **self._embedding_params.params\n )\n\n def forward(self, x):\n x = self.module(x)\n if self._operator == \"sum\":\n x = x.sum(dim=1)\n elif self._operator == \"concat\":\n x = torch.cat(x, dim=1)\n elif self._operator == \"mul\":\n x = torch.prod(x, dim=1)\n\n return x.squeeze()\n\n\[email protected]_encoder(\"transformer\")\nclass TransformerEncoder(Encoder):\n @dataclass\n class Config(Encoder.Config):\n name: str = \"transformer\"\n num_segments: int = 2\n bert_model_name: str = \"bert-base-uncased\"\n # Options below can be overridden to update the bert configuration used\n # to initialize the bert encoder. If some option is missing or\n # if you are using an encoder different then BERT, add extra parameters\n # by inheriting and extending this config\n # Those options will automatically override the options for your transformer\n # encoder's configuration. For e.g. vocab_size is missing here, just add\n # vocab_size: x to update the size of the vocabulary with which encoder is\n # initialized. If you update the default values, the transformer you\n # will get will be initialized from scratch.\n hidden_size: int = 768\n num_hidden_layers: int = 12\n num_attention_heads: int = 12\n output_attentions: bool = False\n output_hidden_states: bool = False\n random_init: bool = False\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n self.config = config\n hf_params = {\"config\": self._build_encoder_config(config)}\n should_random_init = self.config.get(\"random_init\", False)\n\n # For BERT models, initialize using Jit version\n if self.config.bert_model_name.startswith(\"bert-\"):\n if should_random_init:\n self.module = BertModelJit(**hf_params)\n else:\n self.module = BertModelJit.from_pretrained(\n self.config.bert_model_name, **hf_params\n )\n else:\n if should_random_init:\n self.module = AutoModel.from_config(**hf_params)\n else:\n self.module = AutoModel.from_pretrained(\n self.config.bert_model_name, **hf_params\n )\n\n self.embeddings = self.module.embeddings\n self.original_config = self.config\n self.config = self.module.config\n self._init_segment_embeddings()\n\n def _init_segment_embeddings(self):\n if self.original_config.get(\"num_segments\", None):\n num_segments = self.original_config.num_segments\n if hasattr(self.embeddings, \"token_type_embeddings\"):\n new_embeds = nn.Embedding(num_segments, self.config.hidden_size)\n new_embeds.weight.data[:2].copy_(\n self.embeddings.token_type_embeddings.weight\n )\n for idx in range(2, num_segments - 1):\n new_embeds.weight.data[idx].copy_(\n self.embeddings.token_type_embeddings.weight.data.mean(dim=0)\n )\n self.embeddings.token_type_embeddings = new_embeds\n\n def _build_encoder_config(self, config: Config):\n return AutoConfig.from_pretrained(\n config.bert_model_name, **OmegaConf.to_container(config)\n )\n\n def forward(self, *args, return_sequence=False, **kwargs) -> Tensor:\n # Only return pooled output\n output = self.module(*args, **kwargs)\n return output[0] if return_sequence else output[1]\n\n\nclass MultiModalEncoderBase(Encoder):\n __jit_unused_properties__ = [\"encoder_config\"]\n\n @dataclass\n class Config(Encoder.Config):\n # This actually is Union[ImageEncoderConfig, ImageFeatureEncoderConfig]\n modal_encoder: EncoderFactory.Config = ImageEncoderFactory.Config(\n type=ImageEncoderTypes.resnet152, params=ResNet152ImageEncoder.Config()\n )\n text_encoder: EncoderFactory.Config = TextEncoderFactory.Config(\n type=TextEncoderTypes.transformer, params=TransformerEncoder.Config()\n )\n direct_features_input: bool = False\n modal_hidden_size: int = 2048\n text_hidden_size: int = 768\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n self.config = config\n\n self._modal_encoder_config = self.config.get(\"modal_encoder\", None)\n\n self._is_direct_features_input = self.config.get(\"direct_features_input\", False)\n\n self.build()\n self.modal_hidden_size = self.config.get(\"modal_hidden_size\", None)\n self.text_hidden_size = self.config.get(\"text_hidden_size\", None)\n\n def build(self):\n encoders = self._build_encoders(self.config)\n self.text_encoder, self.modal_encoder = encoders[0], encoders[1]\n\n self._encoder_config = None\n if self.text_encoder:\n self._encoder_config = self.text_encoder.config\n\n @property\n def encoder_config(self):\n return self._encoder_config\n\n def _build_encoders(self, config):\n text_encoder = None\n if config.get(\"text_encoder\", None):\n text_encoder = build_text_encoder(config.text_encoder)\n\n modal_encoder = None\n if config.get(\"modal_encoder\", None):\n modal_encoder = self._build_modal_encoder(config.modal_encoder)\n\n return (text_encoder, modal_encoder)\n\n def _build_modal_encoder(self, config):\n return build_image_encoder(\n config, direct_features=self._is_direct_features_input\n )\n\n\nclass PooledEncoder(Encoder):\n \"\"\"\n Standard pooled encoder class which takes in an input, encodes it with an encoder\n implemented and returned from `self.build_encoder` function, pools it based\n `pool_type` and `num_output_features` specified, flattens it and returns it\n back as a tensor.\n \"\"\"\n\n @dataclass\n class Config(Encoder.Config):\n num_output_features: int = 1 # How many output features need to be returned.\n pool_type: str = \"avg\" # type of pooling to apply \"avg\" | \"adaptive\"\n out_dim: int = MISSING # size of out dim expected\n three_d: bool = False # if input requires 3D pooling (for video)\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n self.encoder = self.build_encoder(config)\n pool_func = (\n nn.AdaptiveAvgPool2d if config.pool_type == \"avg\" else nn.AdaptiveMaxPool2d\n )\n params = (config.num_output_features, 1)\n if config.three_d:\n pool_func = (\n nn.AdaptiveAvgPool3d\n if config.pool_type == \"avg\"\n else nn.AdaptiveMaxPool3d\n )\n params = (config.num_output_features, 1, 1)\n # -1 will keep the original feature size\n if config.num_output_features == -1:\n self.pool = nn.Identity()\n else:\n self.pool = pool_func(params)\n self.out_dim = config.out_dim\n\n def build_encoder(self, config: Config, *args, **kwargs):\n \"\"\"Build an encoder on whose output the pooling will be applied.\n\n Args:\n config (Config): Config parameter required to build the encoder.\n\n Raises:\n NotImplementedError: Not implemented by default.\n \"\"\"\n raise NotImplementedError()\n\n def forward(self, x: Tensor) -> Tensor:\n out = self.encoder(x)\n out = self.pool(out)\n out = torch.flatten(out, start_dim=2)\n out = out.transpose(1, 2).contiguous()\n return out\n\n\[email protected]_encoder(\"pytorchvideo\")\nclass PytorchVideoEncoder(Encoder):\n \"\"\"A thin wrapper around pytorchvideo models.\n This class is responsible for integrating pytorchvideo models as encoders.\n THis class attempts to construct a pytorchvideo model from torch hub.\n If this fails for a random weight model, and pytorchvideo package is available,\n build the model with random weights from pytorchvideo.models.\n\n Config:\n name (str): Always 'pytorchvideo' Used for builder_encoder()\n random_init (bool): Flag to load pretrained weights\n model_name (str): Name of the pytorchvideo model to use\n drop_last_n_layers (int):\n <=0 value for the number of layers to drop off the end\n pooler_name (str): Name of pooler used on model output\n\n Raises:\n ImportError:\n The constructor raises an ImportError if pytorchvideo is not installed.\n \"\"\"\n\n @dataclass\n class Config(Encoder.Config):\n name: str = \"pytorchvideo\"\n random_init: bool = False\n model_name: str = \"slowfast_r50\"\n drop_last_n_layers: int = -1\n pooler_name: str = \"identity\"\n\n PYTORCHVIDEO_REPO = \"facebookresearch/pytorchvideo:main\"\n\n def __init__(self, config: Config):\n super().__init__()\n config = OmegaConf.create({**asdict(self.Config()), **config})\n if config.random_init:\n params = dict(**OmegaConf.to_container(config))\n params = {\n k: v\n for k, v in params.items()\n if k not in PytorchVideoEncoder.Config().__dict__\n }\n try:\n model = torch.hub.load(\n PytorchVideoEncoder.PYTORCHVIDEO_REPO,\n model=config.model_name,\n pretrained=False,\n **params,\n )\n except BaseException as err:\n pytorchvideo_spec = importlib.util.find_spec(\"pytorchvideo\")\n if pytorchvideo_spec is None:\n raise err\n import pytorchvideo.models.hub as hub\n\n model_create_fn = getattr(hub, config.model_name)\n model = model_create_fn(pretrained=False, **params)\n else:\n # load weights from TorchHub\n model = torch.hub.load(\n PytorchVideoEncoder.PYTORCHVIDEO_REPO,\n model=config.model_name,\n pretrained=True,\n )\n encoder_list = []\n if config.drop_last_n_layers == 0:\n encoder_list += [model]\n else:\n modules_list = list(model.children())\n if len(modules_list) == 1:\n modules_list = list(modules_list[0].children())\n modules = modules_list[: config.drop_last_n_layers]\n encoder_list += modules\n\n pooler = registry.get_pool_class(config.pooler_name)()\n encoder_list += [pooler]\n self.encoder = nn.Sequential(*encoder_list)\n\n def forward(self, *args, **kwargs):\n # pass along input to model\n # assumes caller obeys the dynamic model signature\n return self.encoder(*args, **kwargs)\n\n\[email protected]_encoder(\"r2plus1d_18\")\nclass R2Plus1D18VideoEncoder(PooledEncoder):\n \"\"\"\n R2Plus1D based video encoder. Returns back a tensor of dim 2048.\n By default, pretrained version is used.\n See https://arxiv.org/abs/1711.11248.\n \"\"\"\n\n @dataclass\n class Config(PooledEncoder.Config):\n name: str = \"r2plus1d_18\"\n out_dim: int = 512 # out dim\n pretrained: bool = True # if should use pretrained version or not\n three_d: bool = True\n\n def build_encoder(self, config: Config, *args, **kwargs):\n model = torchvision.models.video.r2plus1d_18(\n pretrained=config.get(\"pretrained\", True)\n )\n modules = list(model.children())[:-2]\n return nn.Sequential(*modules)\n\n\[email protected]_encoder(\"resnet18_audio\")\nclass ResNet18AudioEncoder(PooledEncoder):\n \"\"\"\n Audio encoder based on ResNet18 used in various audio classification paper\n as a baseline. By default, not pretrained version is used.\n \"\"\"\n\n @dataclass\n class Config(PooledEncoder.Config):\n name: str = \"resnet18_audio\"\n out_dim: int = 512\n pretrained: bool = False\n\n def build_encoder(self, config: Config, *args, **kwargs):\n model = torchvision.models.resnet18(pretrained=config.get(\"pretrained\", False))\n model.conv1 = nn.Conv2d(1, 64, kernel_size=7, stride=2, padding=3, bias=False)\n modules = list(model.children())[:-2]\n return nn.Sequential(*modules)\n\n\[email protected]_encoder(\"vit\")\nclass ViTEncoder(Encoder):\n @dataclass\n class Config(Encoder.Config):\n name: str = \"vit\"\n # See https://huggingface.co/models?filter=vit for available options\n pretrained_model_name: str = \"google/vit-base-patch16-224\"\n random_init: bool = False\n gradient_checkpointing: bool = False\n\n def __init__(self, config: Config, *args, **kwargs):\n super().__init__()\n self.config = config\n self.module, self.hf_config = self._model_class.from_config(config)\n self.embeddings = self.module.embeddings\n self.out_dim = self.hf_config.hidden_size\n\n @property\n def _model_class(self):\n from mmf.modules.vit import ViTModel\n\n return ViTModel\n\n def forward(self, *args, **kwargs):\n if \"output_hidden_states\" not in kwargs:\n kwargs[\"output_hidden_states\"] = False\n output = self.module(*args, **kwargs)\n return output[\"last_hidden_state\"], output.get(\"hidden_states\", None)\n" ]
[ [ "torch.nn.Sequential", "torch.load", "torch.cat", "torch.nn.Conv2d", "torch.from_numpy", "torch.nn.Embedding", "torch.nn.Linear", "torch.nn.Identity", "torch.nn.functional.relu", "torch.prod", "torch.flatten", "torch.hub.load", "torch.hub.load_state_dict_from_url" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
ZPAVelocity/DataStructureExercise
[ "39b1cce859e5c46599b3a6e69ac80ade5920aa34" ]
[ "DynamicProgramming/matrixChainMultiplication.py" ]
[ "import sys\nimport numpy as np\n\n\ndef main():\n p = [30, 35, 15, 5, 10, 20, 25]\n m, s = matrixChainOrder(p)\n \n print('m')\n for i in m:\n print(i)\n print('s')\n for i in s:\n print(i)\n\n\ndef matrixMultiply(A, B):\n if A.shape[1] != B.shape[0]:\n print('incompatible dimensions')\n return np.array([[]])\n C = np.array([[0 for i in range(A.shape[0])] for i in range(B.shape[1])])\n for i in range(A.shape[0]):\n for j in range(B.shape[1]):\n C[i][j] = 0\n for k in range(A.shape[1]):\n C[i][j] += + A[i][k] * B[k][j]\n return C\n\n\ndef matrixChainOrder(p):\n n = len(p) - 1\n m = [[0 for i in range(n)] for j in range(n)]\n s = [[0 for i in range(n)] for j in range(n)]\n\n for i in range(0, n):\n m[i][i] = 0\n\n for l in range(2, n + 1): # l is the chain length\n for i in range(0, n - l + 1):\n j = i + l - 1\n m[i][j] = sys.maxsize\n for k in range(i, j):\n q = m[i][k] + m[k + 1][j] + p[i] * p[k + 1] * p[j + 1]\n if q < m[i][j]:\n m[i][j] = q\n s[i][j] = k + 1\n return m, s\n\n\nif __name__ == \"__main__\":\n main()\n" ]
[ [ "numpy.array" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
IngrojShrestha/language
[ "674a3d016b1e17658e301e8d9bdfa63e3d3f5d15" ]
[ "language/bert_extraction/steal_bert_qa/data_generation/preprocess_thief_dev_squad.py" ]
[ "# coding=utf-8\n# Copyright 2018 The Google AI Language Team Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Construct a held-out / validation set from a large pool of WIKI / RANDOM queries ensuring there is no overlap with the train set.\"\"\"\nimport json\nimport random\n\nimport numpy as np\n\nimport tensorflow.compat.v1 as tf\n\napp = tf.compat.v1.app\nflags = tf.flags\ngfile = tf.gfile\nlogging = tf.logging\n\nflags.DEFINE_string(\"pool_dataset\", None,\n \"Large pool of queries having training set distribution.\")\nflags.DEFINE_string(\"train_dataset\", None,\n \"Training set of queries used for model extraction.\")\nflags.DEFINE_integer(\"dev_dataset_size\", 10570,\n \"Number of QAs in held-out set. (default: SQuAD 1.1 size\")\nflags.DEFINE_string(\"output_path\", None, \"Output path for the held-out set.\")\nflags.DEFINE_integer(\"random_seed\", 42, \"Random seed for determinism.\")\n\nFLAGS = flags.FLAGS\n\n\ndef main(_):\n random.seed(FLAGS.random_seed)\n np.random.seed(FLAGS.random_seed)\n\n with gfile.Open(FLAGS.pool_dataset, \"r\") as f:\n pool_data = json.loads(f.read())[\"data\"]\n\n with gfile.Open(FLAGS.train_dataset, \"r\") as f:\n train_data = json.loads(f.read())[\"data\"]\n\n all_train_paras = {}\n\n for inst in train_data:\n for para in inst[\"paragraphs\"]:\n all_train_paras[para[\"context\"]] = 1\n\n num_dev_questions = FLAGS.dev_dataset_size\n\n # sanity check to verify all pool dataset question IDs are unique\n num_pool_questions = 0\n pool_qids = {}\n\n for inst in pool_data:\n for para in inst[\"paragraphs\"]:\n for qa in para[\"qas\"]:\n num_pool_questions += 1\n pool_qids[qa[\"id\"]] = 1\n\n assert len(pool_qids) == num_pool_questions\n\n random.shuffle(pool_data)\n\n output_data = {\"data\": [], \"version\": FLAGS.version}\n\n for instance in pool_data:\n curr_instance = {\"title\": \"Random dev data\", \"paragraphs\": []}\n for para in instance[\"paragraphs\"]:\n # Even if there is a paragraph overlap, do not consider it for the\n # held-out set since we want to minimize overlap\n if para[\"context\"] in all_train_paras:\n continue\n # Assume different paragraphs have different questions\n curr_instance[\"paragraphs\"].append(para)\n num_dev_questions = num_dev_questions - len(para[\"qas\"])\n if num_dev_questions <= 0:\n break\n if curr_instance[\"paragraphs\"]:\n output_data[\"data\"].append(curr_instance)\n if num_dev_questions <= 0:\n break\n\n total_questions = 0\n for instance in output_data[\"data\"]:\n for para in instance[\"paragraphs\"]:\n for qa in para[\"qas\"]:\n total_questions += 1\n\n logging.info(\"Final dataset size = %d\", total_questions)\n\n with gfile.Open(FLAGS.output_path, \"w\") as f:\n f.write(json.dumps(output_data))\n\n\nif __name__ == \"__main__\":\n app.run(main)\n" ]
[ [ "numpy.random.seed" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
tangzhiyi11/Paddle
[ "790cadd1f06fabeadc4b9aeca5622ea50985b990", "790cadd1f06fabeadc4b9aeca5622ea50985b990" ]
[ "python/paddle/fluid/tests/unittests/test_egr_python_api.py", "python/paddle/fluid/tests/unittests/dygraph_to_static/test_declarative.py" ]
[ "# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.\n# \n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# \n# http://www.apache.org/licenses/LICENSE-2.0\n# \n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport paddle.fluid.core as core\nimport paddle.fluid.eager.eager_tensor_patch_methods as eager_tensor_patch_methods\nimport paddle\nimport numpy as np\nfrom paddle.fluid.framework import _test_eager_guard, EagerParamBase, _in_eager_mode\nfrom paddle.fluid.data_feeder import convert_dtype\nimport unittest\nimport copy\n\n\nclass EagerScaleTestCase(unittest.TestCase):\n def test_scale_base(self):\n with _test_eager_guard():\n paddle.set_device(\"cpu\")\n arr = np.ones([4, 16, 16, 32]).astype('float32')\n tensor = paddle.to_tensor(arr, 'float32', core.CPUPlace())\n print(tensor)\n tensor = core.eager.scale(tensor, 2.0, 0.9, True, False)\n for i in range(0, 100):\n tensor = core.eager.scale(tensor, 2.0, 0.9, True, False)\n print(tensor)\n self.assertEqual(tensor.shape, [4, 16, 16, 32])\n self.assertEqual(tensor.stop_gradient, True)\n\n def test_retain_grad_and_run_backward(self):\n with _test_eager_guard():\n paddle.set_device(\"cpu\")\n\n input_data = np.ones([4, 16, 16, 32]).astype('float32')\n data_eager = paddle.to_tensor(input_data, 'float32',\n core.CPUPlace(), False)\n\n grad_data = np.ones([4, 16, 16, 32]).astype('float32')\n grad_eager = paddle.to_tensor(grad_data, 'float32', core.CPUPlace())\n\n data_eager.retain_grads()\n\n out_eager = core.eager.scale(data_eager, 1.0, 0.9, True, True)\n self.assertFalse(data_eager.grad._is_initialized())\n out_eager.backward(grad_eager, False)\n self.assertTrue(data_eager.grad._is_initialized())\n self.assertTrue(np.array_equal(data_eager.grad.numpy(), input_data))\n\n def test_retain_grad_and_run_backward_raises(self):\n with _test_eager_guard():\n paddle.set_device(\"cpu\")\n\n input_data = np.ones([4, 16, 16, 32]).astype('float32')\n data_eager = paddle.to_tensor(input_data, 'float32',\n core.CPUPlace(), False)\n\n grad_data = np.ones([4, 16, 16, 32]).astype('float32')\n grad_data2 = np.ones([4, 16]).astype('float32')\n grad_eager = paddle.to_tensor(grad_data, 'float32', core.CPUPlace())\n grad_eager2 = paddle.to_tensor(grad_data2, 'float32',\n core.CPUPlace())\n\n data_eager.retain_grads()\n\n out_eager = core.eager.scale(data_eager, 1.0, 0.9, True, True)\n self.assertFalse(data_eager.grad._is_initialized())\n with self.assertRaisesRegexp(\n AssertionError,\n \"The type of grad_tensor must be paddle.Tensor\"):\n out_eager.backward(grad_data, False)\n\n with self.assertRaisesRegexp(\n AssertionError,\n \"Tensor shape not match, Tensor of grad_tensor /*\"):\n out_eager.backward(grad_eager2, False)\n\n\nclass EagerDtypeTestCase(unittest.TestCase):\n def check_to_tesnsor_and_numpy(self, dtype, proto_dtype):\n with _test_eager_guard():\n arr = np.random.random([4, 16, 16, 32]).astype(dtype)\n tensor = paddle.to_tensor(arr, dtype)\n self.assertEqual(tensor.dtype, proto_dtype)\n self.assertTrue(np.array_equal(arr, tensor.numpy()))\n\n def test_dtype_base(self):\n print(\"Test_dtype\")\n self.check_to_tesnsor_and_numpy('bool', core.VarDesc.VarType.BOOL)\n self.check_to_tesnsor_and_numpy('int8', core.VarDesc.VarType.INT8)\n self.check_to_tesnsor_and_numpy('uint8', core.VarDesc.VarType.UINT8)\n self.check_to_tesnsor_and_numpy('int16', core.VarDesc.VarType.INT16)\n self.check_to_tesnsor_and_numpy('int32', core.VarDesc.VarType.INT32)\n self.check_to_tesnsor_and_numpy('int64', core.VarDesc.VarType.INT64)\n self.check_to_tesnsor_and_numpy('float16', core.VarDesc.VarType.FP16)\n self.check_to_tesnsor_and_numpy('float32', core.VarDesc.VarType.FP32)\n self.check_to_tesnsor_and_numpy('float64', core.VarDesc.VarType.FP64)\n self.check_to_tesnsor_and_numpy('complex64',\n core.VarDesc.VarType.COMPLEX64)\n self.check_to_tesnsor_and_numpy('complex128',\n core.VarDesc.VarType.COMPLEX128)\n\n\nclass EagerTensorPropertiesTestCase(unittest.TestCase):\n def constructor(self, place):\n egr_tensor = core.eager.EagerTensor()\n self.assertEqual(egr_tensor.persistable, False)\n self.assertTrue(\"generated\" in egr_tensor.name)\n self.assertEqual(egr_tensor.shape, [])\n self.assertEqual(egr_tensor.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor.stop_gradient, True)\n\n egr_tensor0 = core.eager.EagerTensor(\n core.VarDesc.VarType.FP32, [4, 16, 16, 32], \"test_eager_tensor\",\n core.VarDesc.VarType.LOD_TENSOR, True)\n self.assertEqual(egr_tensor0.persistable, True)\n self.assertEqual(egr_tensor0.name, \"test_eager_tensor\")\n self.assertEqual(egr_tensor0.shape, [4, 16, 16, 32])\n self.assertEqual(egr_tensor0.dtype, core.VarDesc.VarType.FP32)\n\n arr0 = np.random.rand(4, 16, 16, 32).astype('float32')\n egr_tensor1 = core.eager.EagerTensor(arr0, place, True, False,\n \"numpy_tensor1\", False)\n self.assertEqual(egr_tensor1.persistable, True)\n self.assertEqual(egr_tensor1.name, \"numpy_tensor1\")\n self.assertEqual(egr_tensor1.shape, [4, 16, 16, 32])\n self.assertEqual(egr_tensor1.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor1.stop_gradient, False)\n self.assertTrue(egr_tensor1.place._equals(place))\n self.assertTrue(np.array_equal(egr_tensor1.numpy(), arr0))\n\n arr1 = np.random.randint(100, size=(4, 16, 16, 32), dtype=np.int64)\n egr_tensor2 = core.eager.EagerTensor(arr1, place, False, True,\n \"numpy_tensor2\", True)\n self.assertEqual(egr_tensor2.persistable, False)\n self.assertEqual(egr_tensor2.name, \"numpy_tensor2\")\n self.assertEqual(egr_tensor2.shape, [4, 16, 16, 32])\n self.assertEqual(egr_tensor2.dtype, core.VarDesc.VarType.INT64)\n self.assertEqual(egr_tensor2.stop_gradient, True)\n self.assertTrue(egr_tensor2.place._equals(place))\n self.assertTrue(np.array_equal(egr_tensor2.numpy(), arr1))\n\n arr2 = np.random.rand(4, 16, 16, 32, 64).astype('float32')\n egr_tensor3 = core.eager.EagerTensor(arr2)\n self.assertEqual(egr_tensor3.persistable, False)\n self.assertTrue(\"generated_tensor\" in egr_tensor3.name)\n self.assertEqual(egr_tensor3.shape, [4, 16, 16, 32, 64])\n self.assertEqual(egr_tensor3.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor3.stop_gradient, True)\n self.assertTrue(\n egr_tensor3.place._equals(\n paddle.fluid.framework._current_expected_place()))\n self.assertTrue(np.array_equal(egr_tensor3.numpy(), arr2))\n\n egr_tensor3.stop_gradient = False\n egr_tensor4 = core.eager.EagerTensor(egr_tensor3)\n self.assertEqual(egr_tensor4.persistable, False)\n self.assertTrue(\"generated_tensor\" in egr_tensor4.name)\n self.assertEqual(egr_tensor4.shape, egr_tensor3.shape)\n self.assertEqual(egr_tensor4.dtype, egr_tensor3.dtype)\n self.assertEqual(egr_tensor4.stop_gradient, True)\n self.assertTrue(\n egr_tensor4.place._equals(\n paddle.fluid.framework._current_expected_place()))\n self.assertTrue(\n np.array_equal(egr_tensor4.numpy(), egr_tensor3.numpy()))\n\n arr4 = np.random.rand(4, 16, 16, 32).astype('float32')\n egr_tensor5 = core.eager.EagerTensor(arr4, place)\n self.assertEqual(egr_tensor5.persistable, False)\n self.assertTrue(\"generated_tensor\" in egr_tensor5.name)\n self.assertEqual(egr_tensor5.shape, [4, 16, 16, 32])\n self.assertEqual(egr_tensor5.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor5.stop_gradient, True)\n self.assertTrue(egr_tensor5.place._equals(place))\n self.assertTrue(np.array_equal(egr_tensor5.numpy(), arr4))\n\n egr_tensor6 = core.eager.EagerTensor(egr_tensor5, core.CPUPlace())\n self.assertEqual(egr_tensor6.persistable, False)\n self.assertTrue(\"generated_tensor\" in egr_tensor6.name)\n self.assertEqual(egr_tensor6.shape, [4, 16, 16, 32])\n self.assertEqual(egr_tensor6.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor6.stop_gradient, True)\n self.assertEqual(egr_tensor6.place.is_cpu_place(), True)\n self.assertTrue(\n np.array_equal(egr_tensor6.numpy(), egr_tensor5.numpy()))\n\n egr_tensor7 = core.eager.EagerTensor(arr4, place, True)\n self.assertEqual(egr_tensor7.persistable, True)\n self.assertTrue(\"generated_tensor\" in egr_tensor7.name)\n self.assertEqual(egr_tensor7.shape, [4, 16, 16, 32])\n self.assertEqual(egr_tensor7.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor7.stop_gradient, True)\n self.assertTrue(egr_tensor7.place._equals(place))\n self.assertTrue(np.array_equal(egr_tensor7.numpy(), arr4))\n\n egr_tensor8 = core.eager.EagerTensor(egr_tensor6, place, \"egr_tensor8\")\n self.assertEqual(egr_tensor8.persistable, False)\n self.assertEqual(egr_tensor8.name, \"egr_tensor8\")\n self.assertEqual(egr_tensor8.shape, [4, 16, 16, 32])\n self.assertEqual(egr_tensor8.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor8.stop_gradient, True)\n self.assertTrue(egr_tensor8.place._equals(place))\n self.assertTrue(\n np.array_equal(egr_tensor8.numpy(), egr_tensor5.numpy()))\n\n egr_tensor9 = core.eager.EagerTensor(arr4, place, True, True)\n self.assertEqual(egr_tensor9.persistable, True)\n self.assertTrue(\"generated_tensor\" in egr_tensor9.name)\n self.assertEqual(egr_tensor9.shape, [4, 16, 16, 32])\n self.assertEqual(egr_tensor9.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor9.stop_gradient, True)\n self.assertTrue(egr_tensor9.place._equals(place))\n self.assertTrue(np.array_equal(egr_tensor9.numpy(), arr4))\n\n x = np.random.rand(3, 3).astype('float32')\n t = paddle.fluid.Tensor()\n t.set(x, paddle.fluid.CPUPlace())\n egr_tensor10 = core.eager.EagerTensor(t, place)\n self.assertEqual(egr_tensor10.persistable, False)\n self.assertTrue(\"generated_tensor\" in egr_tensor10.name)\n self.assertEqual(egr_tensor10.shape, [3, 3])\n self.assertEqual(egr_tensor10.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor10.stop_gradient, True)\n self.assertTrue(egr_tensor10.place._equals(place))\n self.assertTrue(np.array_equal(egr_tensor10.numpy(), x))\n\n egr_tensor11 = core.eager.EagerTensor(t, place, \"framework_constructed\")\n self.assertEqual(egr_tensor11.persistable, False)\n self.assertTrue(\"framework_constructed\" in egr_tensor11.name)\n self.assertEqual(egr_tensor11.shape, [3, 3])\n self.assertEqual(egr_tensor11.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor11.stop_gradient, True)\n self.assertTrue(egr_tensor11.place._equals(place))\n self.assertTrue(np.array_equal(egr_tensor11.numpy(), x))\n\n egr_tensor12 = core.eager.EagerTensor(t)\n self.assertEqual(egr_tensor12.persistable, False)\n self.assertTrue(\"generated_tensor\" in egr_tensor12.name)\n self.assertEqual(egr_tensor12.shape, [3, 3])\n self.assertEqual(egr_tensor12.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor12.stop_gradient, True)\n self.assertTrue(egr_tensor12.place._equals(paddle.fluid.CPUPlace()))\n self.assertTrue(np.array_equal(egr_tensor12.numpy(), x))\n\n with self.assertRaisesRegexp(\n ValueError, \"The shape of Parameter should not be None\"):\n eager_param = EagerParamBase(shape=None, dtype=\"float32\")\n\n with self.assertRaisesRegexp(\n ValueError, \"The dtype of Parameter should not be None\"):\n eager_param = EagerParamBase(shape=[1, 1], dtype=None)\n\n with self.assertRaisesRegexp(\n ValueError,\n \"The dimensions of shape for Parameter must be greater than 0\"):\n eager_param = EagerParamBase(shape=[], dtype=\"float32\")\n\n with self.assertRaisesRegexp(\n ValueError,\n \"Each dimension of shape for Parameter must be greater than 0, but received /*\"\n ):\n eager_param = EagerParamBase(shape=[-1], dtype=\"float32\")\n\n eager_param = EagerParamBase(shape=[1, 1], dtype=\"float32\")\n self.assertTrue(eager_param.trainable)\n eager_param.trainable = False\n self.assertFalse(eager_param.trainable)\n with self.assertRaisesRegexp(\n ValueError,\n \"The type of trainable MUST be bool, but the type is /*\"):\n eager_param.trainable = \"False\"\n\n def test_constructor(self):\n print(\"Test_constructor\")\n paddle.set_device(\"cpu\")\n place_list = [core.CPUPlace()]\n if core.is_compiled_with_cuda():\n place_list.append(core.CUDAPlace(0))\n with _test_eager_guard():\n for p in place_list:\n self.constructor(p)\n\n def test_copy_and_copy_to(self):\n print(\"Test_copy_and_copy_to\")\n with _test_eager_guard():\n paddle.set_device(\"cpu\")\n arr = np.ones([4, 16, 16, 32]).astype('float32')\n arr1 = np.zeros([4, 16]).astype('float32')\n arr2 = np.ones([4, 16, 16, 32]).astype('float32') + np.ones(\n [4, 16, 16, 32]).astype('float32')\n tensor = paddle.to_tensor(arr, core.VarDesc.VarType.FP32,\n core.CPUPlace())\n self.assertEqual(tensor.stop_gradient, True)\n tensor.stop_gradient = False\n print(\"Set persistable\")\n tensor.persistable = False\n tensor1 = paddle.to_tensor(arr1, core.VarDesc.VarType.FP32,\n core.CPUPlace())\n tensor1.persistable = True\n self.assertEqual(tensor1.stop_gradient, True)\n self.assertTrue(np.array_equal(tensor.numpy(), arr))\n print(\"Test copy_\")\n tensor.copy_(tensor1, True)\n self.assertEqual(tensor.persistable, True)\n self.assertEqual(tensor.shape, [4, 16])\n self.assertEqual(tensor.dtype, core.VarDesc.VarType.FP32)\n self.assertTrue(np.array_equal(tensor.numpy(), arr1))\n\n print(\"Test _copy_to\")\n tensor2 = paddle.to_tensor(arr2, core.VarDesc.VarType.FP32,\n core.CPUPlace())\n self.assertTrue(np.array_equal(tensor2.numpy(), arr2))\n self.assertTrue(tensor2.place.is_cpu_place())\n tensor2.persistable = True\n tensor2.stop_gradient = False\n if core.is_compiled_with_cuda():\n tensor3 = tensor2._copy_to(True, core.CUDAPlace(0))\n self.assertTrue(np.array_equal(tensor3.numpy(), arr2))\n self.assertTrue(tensor3.persistable, True)\n self.assertTrue(tensor3.stop_gradient, True)\n self.assertTrue(tensor3.place.is_gpu_place())\n else:\n tensor3 = tensor2._copy_to(True, core.CPUPlace())\n self.assertTrue(np.array_equal(tensor3.numpy(), arr2))\n self.assertTrue(tensor3.persistable, True)\n self.assertTrue(tensor3.stop_gradient, True)\n self.assertTrue(tensor3.place.is_cpu_place())\n\n def test_properties(self):\n print(\"Test_properties\")\n with _test_eager_guard():\n paddle.set_device(\"cpu\")\n arr = np.ones([4, 16, 16, 32]).astype('float32')\n tensor = paddle.to_tensor(arr, core.VarDesc.VarType.FP32,\n core.CPUPlace())\n self.assertEqual(tensor.shape, [4, 16, 16, 32])\n tensor.name = 'tensor_name_test'\n self.assertEqual(tensor.name, 'tensor_name_test')\n self.assertEqual(tensor.persistable, False)\n tensor.persistable = True\n self.assertEqual(tensor.persistable, True)\n tensor.persistable = False\n self.assertEqual(tensor.persistable, False)\n self.assertTrue(tensor.place.is_cpu_place())\n self.assertEqual(tensor._place_str, 'CPUPlace')\n self.assertEqual(tensor.stop_gradient, True)\n tensor.stop_gradient = False\n self.assertEqual(tensor.stop_gradient, False)\n tensor.stop_gradient = True\n self.assertEqual(tensor.stop_gradient, True)\n\n def test_global_properties(self):\n print(\"Test_global_properties\")\n self.assertFalse(core._in_eager_mode())\n with _test_eager_guard():\n self.assertTrue(core._in_eager_mode())\n self.assertFalse(core._in_eager_mode())\n\n def test_place_guard(self):\n core._enable_eager_mode()\n if core.is_compiled_with_cuda():\n paddle.set_device(\"gpu:0\")\n with paddle.fluid.framework._dygraph_place_guard(core.CPUPlace()):\n self.assertTrue(core.eager._get_expected_place().is_cpu_place())\n else:\n paddle.set_device(\"cpu\")\n with paddle.fluid.framework._dygraph_place_guard(core.CPUPlace()):\n self.assertTrue(core.eager._get_expected_place().is_cpu_place())\n core._disable_eager_mode()\n\n\nclass EagerParamBaseUsageTestCase(unittest.TestCase):\n def test_print(self):\n with _test_eager_guard():\n linear = paddle.nn.Linear(3, 3, bias_attr=False)\n print(linear.weight)\n\n def test_copy(self):\n with _test_eager_guard():\n linear = paddle.nn.Linear(1, 3)\n linear_copy = copy.deepcopy(linear)\n linear_copy2 = linear.weight._copy_to(core.CPUPlace(), True)\n self.assertTrue(\n np.array_equal(linear.weight.numpy(),\n linear_copy.weight.numpy()))\n self.assertTrue(\n np.array_equal(linear.weight.numpy(), linear_copy2.numpy()))\n\n def func_fp16_initilaizer(self):\n paddle.set_default_dtype(\"float16\")\n linear1 = paddle.nn.Linear(1, 3, bias_attr=False)\n linear2 = paddle.nn.Linear(\n 1,\n 3,\n bias_attr=False,\n weight_attr=paddle.fluid.initializer.Uniform())\n linear3 = paddle.nn.Linear(\n 1,\n 3,\n bias_attr=False,\n weight_attr=paddle.fluid.initializer.TruncatedNormalInitializer())\n linear4 = paddle.nn.Linear(\n 1,\n 3,\n bias_attr=False,\n weight_attr=paddle.fluid.initializer.MSRAInitializer())\n res = [\n linear1.weight.numpy(), linear2.weight.numpy(),\n linear3.weight.numpy(), linear4.weight.numpy()\n ]\n paddle.set_default_dtype(\"float32\")\n return res\n\n def test_fp16_initializer(self):\n res1 = list()\n res2 = list()\n paddle.seed(102)\n paddle.framework.random._manual_program_seed(102)\n with _test_eager_guard():\n res1 = self.func_fp16_initilaizer()\n res2 = self.func_fp16_initilaizer()\n\n for i in range(len(res1)):\n self.assertTrue(np.array_equal(res1[i], res2[i]))\n\n def func_layer_helper_base(self, value):\n base = paddle.fluid.layer_helper_base.LayerHelperBase(\"test_layer\",\n \"test_layer\")\n return base.to_variable(value).numpy()\n\n def func_base_to_variable(self, value):\n paddle.fluid.dygraph.base.to_variable(value)\n\n def test_to_variable(self):\n value = np.random.rand(4, 16, 16, 32).astype('float32')\n res1 = None\n res3 = None\n with _test_eager_guard():\n res1 = self.func_layer_helper_base(value)\n res3 = self.func_base_to_variable(value)\n res2 = self.func_layer_helper_base(value)\n res4 = self.func_base_to_variable(value)\n self.assertTrue(np.array_equal(res1, res2))\n self.assertTrue(np.array_equal(res3, res4))\n\n def test_backward_with_single_tensor(self):\n arr4 = np.random.rand(4, 16, 16, 32).astype('float32')\n egr_tensor12 = core.eager.EagerTensor(arr4, core.CPUPlace())\n egr_tensor12.retain_grads()\n arr = np.ones([4, 16, 16, 32]).astype('float32')\n self.assertEqual(egr_tensor12.persistable, False)\n self.assertTrue(\"generated_tensor\" in egr_tensor12.name)\n self.assertEqual(egr_tensor12.shape, [4, 16, 16, 32])\n self.assertEqual(egr_tensor12.dtype, core.VarDesc.VarType.FP32)\n self.assertEqual(egr_tensor12.stop_gradient, True)\n self.assertTrue(egr_tensor12.place._equals(paddle.fluid.CPUPlace()))\n self.assertTrue(np.array_equal(egr_tensor12.numpy(), arr4))\n self.assertTrue(np.array_equal(egr_tensor12.gradient(), None))\n egr_tensor12.backward()\n self.assertTrue(np.array_equal(egr_tensor12.gradient(), arr))\n\n\nclass EagerGuardTestCase(unittest.TestCase):\n def test__test_eager_guard(self):\n tracer = paddle.fluid.dygraph.tracer.Tracer()\n with _test_eager_guard(tracer):\n self.assertTrue(_in_eager_mode())\n\n\nif __name__ == \"__main__\":\n unittest.main()\n", "# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport unittest\n\nimport paddle\nimport paddle.fluid as fluid\nfrom paddle.static import InputSpec\nfrom paddle.fluid.dygraph import to_variable, declarative, ProgramTranslator, Layer, jit\nfrom paddle.fluid.dygraph.dygraph_to_static.program_translator import ConcreteProgram, StaticFunction\n\nfrom test_basic_api_transformation import dyfunc_to_variable\n\nprogram_trans = ProgramTranslator()\n\n\nclass SimpleNet(Layer):\n def __init__(self):\n super(SimpleNet, self).__init__()\n self.linear = fluid.dygraph.Linear(10, 3)\n\n @declarative(input_spec=[InputSpec(shape=[None, 10], dtype='float32')])\n def forward(self, x, a=1, b=2):\n y = self.inner_function(x)\n return y\n\n # `declarative` is not essential, add it to test for robustness.\n @declarative\n def inner_function(self, x):\n y = self.linear(x)\n return y\n\n def add_func(self, x, y):\n z = x + y\n return z\n\n @declarative(input_spec=[[InputSpec([None, 10]), InputSpec([None, 10])]])\n def func_with_list(self, l, int_val=1):\n x, y = l\n z = x + y\n z = z + int_val\n return z\n\n @declarative(input_spec=[{\n 'x': InputSpec([None, 10]),\n 'y': InputSpec([None, 10])\n }])\n def func_with_dict(self, d):\n x = d['x']\n y = d['y']\n z = x + y\n\n return z\n\n @declarative(input_spec=[[\n InputSpec([None]), {\n 'x': InputSpec([None, 10]),\n 'y': InputSpec([None, 10])\n }\n ]])\n def func_with_list_dict(self, dl):\n bias = dl[0]\n x = dl[1]['x']\n y = dl[1]['y']\n\n z = x + y\n z = z + bias\n\n return z\n\n\nclass TestStaticFunctionInstance(unittest.TestCase):\n def test_instance_same_class(self):\n with fluid.dygraph.guard(fluid.CPUPlace()):\n net_1 = SimpleNet()\n net_2 = SimpleNet()\n\n self.assertTrue(isinstance(net_1.forward, StaticFunction))\n self.assertTrue(isinstance(net_2.forward, StaticFunction))\n self.assertNotEqual(net_1.forward, net_2.forward)\n\n # convert layer into static progam of net_1\n net_1.forward.concrete_program\n self.assertTrue(len(net_1.forward.program_cache) == 1)\n # check no conversion applid with net_2\n self.assertTrue(len(net_2.forward.program_cache) == 0)\n\n\nclass TestInputSpec(unittest.TestCase):\n def setUp(self):\n pass\n\n def test_with_input_spec(self):\n with fluid.dygraph.guard(fluid.CPUPlace()):\n x = to_variable(np.ones([4, 10]).astype('float32'))\n y = to_variable(np.ones([4, 10]).astype('float32') * 2)\n int_val = 4.\n\n net = SimpleNet()\n\n # 1. each method holds independent program cache\n out = net(x)\n self.assertTrue(len(net.forward.program_cache) == 1)\n\n # 2. test save load\n net.inner_function(x)\n jit.save(net, './simple_net')\n infer_net = fluid.dygraph.jit.load('./simple_net')\n pred = infer_net(x)\n self.assertTrue(np.allclose(out.numpy(), pred.numpy()))\n\n # 3. we can decorate any method\n x_2 = to_variable(np.ones([4, 20]).astype('float32'))\n # uses `declarative(func)` instead of `@declarative`\n net.add_func = declarative(net.add_func)\n out = net.add_func(x_2, np.ones([20]).astype('float32'))\n self.assertTrue(len(net.add_func.program_cache) == 1)\n\n # 5. test input with list\n out = net.func_with_list([x, y], int_val)\n\n # 6. test input with dict\n out = net.func_with_dict({'x': x, 'y': y})\n\n # 7. test input with lits contains dict\n int_np = np.ones([1]).astype('float32')\n out = net.func_with_list_dict([int_np, {'x': x, 'y': y}])\n\n def test_with_error(self):\n with fluid.dygraph.guard(fluid.CPUPlace()):\n x = to_variable(np.ones([4, 10]).astype('float32'))\n y = to_variable(np.ones([4, 10]).astype('float32') * 2)\n int_val = 4.\n\n net = SimpleNet()\n\n # 1. kwargs and input_spec should not be specificed in same time\n with self.assertRaises(ValueError):\n net(x, a=1, other_kwarg=2)\n\n # 2. requires len(input_spec) <= len(args)\n with self.assertRaises(ValueError):\n net.add_func = declarative(\n net.add_func,\n input_spec=[\n InputSpec([-1, 10]), InputSpec([-1, 10]),\n InputSpec([10])\n ])\n net.add_func(x, y)\n\n def test_concrete_program(self):\n with fluid.dygraph.guard(fluid.CPUPlace()):\n x = to_variable(np.ones([4, 10]).astype('float32'))\n y = to_variable(np.ones([4, 10]).astype('float32') * 2)\n int_val = 4.\n\n net = SimpleNet()\n # We can get concrete_program by specificing InputSpec information. Faking input is no need.\n net.add_func = declarative(\n net.add_func,\n input_spec=[\n InputSpec([-1, 10]), InputSpec(\n [-1, 10], name='y')\n ])\n cp1 = net.add_func.concrete_program\n self.assertTrue(cp1.inputs[-1].shape == (-1, 10))\n self.assertTrue(cp1.inputs[-1].name == 'y')\n\n # generate another program\n net.add_func = declarative(\n net.add_func,\n input_spec=[InputSpec([10]), InputSpec(\n [10], name='label')])\n cp2 = net.add_func.concrete_program\n self.assertTrue(cp2.inputs[-1].shape == (10, ))\n self.assertTrue(cp2.inputs[-1].name == 'label')\n # Note(Aurelius84): New instance will be returned if we use `declarative(foo)` every time.\n # So number of cache program is 1.\n self.assertTrue(len(net.add_func.program_cache) == 1)\n self.assertTrue(cp1 != cp2)\n\n\ndef foo_func(a, b, c=1, d=2):\n z = a + b\n return z\n\n\nclass TestDifferentInputSpecCacheProgram(unittest.TestCase):\n def setUp(self):\n program_trans.enable(True)\n\n def test_with_different_input(self):\n with fluid.dygraph.guard(fluid.CPUPlace()):\n x_data = np.ones([16, 10]).astype('float32')\n y_data = np.ones([10]).astype('float32') * 2\n z_data = np.ones([10]).astype('float32') * 2.2\n\n foo = declarative(foo_func)\n\n # [16, 10] + [10] (varbase)\n out_1 = foo(to_variable(x_data), to_variable(y_data))\n self.assertTrue(np.allclose(x_data + y_data, out_1.numpy()))\n self.assertTrue(len(foo.program_cache) == 1)\n self.assertTrue(len(foo.program_cache.concrete_programs()) == 1)\n\n # [16, 10] + [10] (numpy)\n out_2 = foo(to_variable(x_data), y_data)\n self.assertTrue(np.allclose(x_data + y_data, out_2.numpy()))\n self.assertTrue(len(foo.program_cache) == 1)\n\n # [16, 10] + [10] (numpy)\n out_3 = foo(to_variable(x_data), z_data)\n self.assertTrue(np.allclose(x_data + z_data, out_3.numpy()))\n # hit cache program\n self.assertTrue(len(foo.program_cache) == 1)\n\n # [16, 10] + [10] (numpy) with other different arguments (c=3)\n out_4 = foo(to_variable(x_data), z_data, 3)\n self.assertTrue(np.allclose(x_data + z_data, out_4.numpy()))\n # create a new program\n self.assertTrue(len(foo.program_cache) == 2)\n\n def test_get_concrete_program(self):\n\n foo = declarative(foo_func)\n\n # 1. specific InputSpec for `x`/`y`\n concrete_program_1 = foo.get_concrete_program(\n InputSpec([None, 10]), InputSpec([10]))\n self.assertTrue(len(foo.program_cache) == 1)\n\n # 2. specific `c`/`d` explicitly with same default value\n concrete_program_2 = foo.get_concrete_program(\n InputSpec([None, 10]), InputSpec([10]), 1, 2)\n self.assertTrue(concrete_program_2 == concrete_program_1)\n self.assertTrue(len(foo.program_cache) == 1)\n\n # 3. specific `c` = 2\n concrete_program_3 = foo.get_concrete_program(\n InputSpec([None, 10]), InputSpec([10]), c=2)\n self.assertTrue(concrete_program_3 != concrete_program_1)\n self.assertTrue(len(foo.program_cache) == 2)\n\n # 4. specific x.shape = [10]\n concrete_program_4 = foo.get_concrete_program(\n InputSpec([10]), InputSpec([10]))\n self.assertTrue(concrete_program_4 != concrete_program_1)\n self.assertTrue(len(foo.program_cache) == 3)\n\n # 5. only specific InputSpec of x\n with self.assertRaises(ValueError):\n concrete_program_5 = foo.get_concrete_program(InputSpec([10]))\n\n # 6. specific unknown kwargs `e`=4\n with self.assertRaises(TypeError):\n concrete_program_5 = foo.get_concrete_program(\n InputSpec([10]), InputSpec([10]), e=4)\n\n def test_concrete_program(self):\n with fluid.dygraph.guard(fluid.CPUPlace()):\n\n # usage 1\n foo_1 = paddle.jit.to_static(\n foo_func,\n input_spec=[\n InputSpec(\n [10], name='x'), InputSpec(\n [10], name='y')\n ])\n self.assertTrue(isinstance(foo_1.concrete_program, ConcreteProgram))\n\n # usage 2\n foo_2 = paddle.jit.to_static(foo_func)\n out = foo_2(paddle.rand([10]), paddle.rand([10]))\n self.assertTrue(isinstance(foo_2.concrete_program, ConcreteProgram))\n\n # raise error\n foo_3 = paddle.jit.to_static(foo_func)\n with self.assertRaises(ValueError):\n foo_3.concrete_program\n\n\nclass TestInputDefaultName(unittest.TestCase):\n def setUp(self):\n paddle.disable_static()\n self.net = SimpleNet()\n\n def assert_default_name(self, func_name, input_names):\n decorated_func = getattr(self.net, func_name)\n\n spec_names = [x.name for x in decorated_func.inputs]\n self.assertListEqual(spec_names, input_names)\n\n def test_common_input(self):\n self.assert_default_name('forward', ['x'])\n\n def test_list_input(self):\n self.assert_default_name('func_with_list', ['l_0', 'l_1'])\n\n def test_dict_input(self):\n self.assert_default_name('func_with_dict', ['x', 'y'])\n\n def test_nest_input(self):\n self.assert_default_name('func_with_list_dict', ['dl_0', 'x', 'y'])\n\n\nclass TestDeclarativeAPI(unittest.TestCase):\n def test_error(self):\n func = declarative(dyfunc_to_variable)\n\n paddle.enable_static()\n\n # Failed to run the callable object decorated by '@paddle.jit.to_static'\n # if it does NOT in dynamic mode.\n with self.assertRaises(RuntimeError):\n func(np.ones(5).astype(\"int32\"))\n\n program_trans.enable(False)\n with self.assertRaises(AssertionError):\n # AssertionError: We Only support to_variable in imperative mode,\n # please use fluid.dygraph.guard() as context to run it in imperative Mode\n func(np.ones(5).astype(\"int32\"))\n\n\nclass TestDecorateModelDirectly(unittest.TestCase):\n def setUp(self):\n paddle.disable_static()\n program_trans.enable(True)\n self.x = to_variable(np.ones([4, 10]).astype('float32'))\n\n def test_fake_input(self):\n net = SimpleNet()\n net = declarative(net)\n y = net(self.x)\n self.assertTrue(len(net.forward.program_cache) == 1)\n\n def test_input_spec(self):\n net = SimpleNet()\n net = declarative(net, input_spec=[InputSpec([None, 8, 10])])\n self.assertTrue(len(net.forward.inputs) == 1)\n self.assertTrue(len(net.forward.program_cache) == 1)\n input_shape = net.forward.inputs[0].shape\n self.assertListEqual(list(input_shape), [-1, 8, 10])\n\n # redecorate\n net = declarative(net, input_spec=[InputSpec([None, 16, 10])])\n input_shape = net.forward.inputs[0].shape\n self.assertListEqual(list(input_shape), [-1, 16, 10])\n\n\nclass TestErrorWithInitFromStaticMode(unittest.TestCase):\n def test_raise_error(self):\n # disable imperative\n paddle.enable_static()\n\n net = SimpleNet()\n with self.assertRaisesRegexp(RuntimeError,\n \"only available in dynamic mode\"):\n net.forward.concrete_program\n\n with self.assertRaisesRegexp(RuntimeError,\n \"only available in dynamic mode\"):\n net.forward.inputs\n\n with self.assertRaisesRegexp(RuntimeError,\n \"only available in dynamic mode\"):\n net.forward.outputs\n\n\nclass CallNonForwardFuncNet(paddle.nn.Layer):\n def __init__(self):\n super(CallNonForwardFuncNet, self).__init__()\n self.sub = CallNonForwardFuncSubNet()\n\n @paddle.jit.to_static\n def forward(self):\n return self.sub.func()\n\n\nclass CallNonForwardFuncSubNet(paddle.nn.Layer):\n def __init__(self):\n super(CallNonForwardFuncSubNet, self).__init__()\n self.a = paddle.to_tensor([1, 2])\n\n def func(self):\n x = self.a * 2\n return x\n\n\nclass TestCallNonForwardFunc(unittest.TestCase):\n def test_call_non_forward(self):\n paddle.disable_static()\n net = CallNonForwardFuncNet()\n out = net()\n self.assertEqual(out.numpy().tolist(), [2, 4])\n paddle.enable_static()\n\n\nif __name__ == '__main__':\n unittest.main()\n" ]
[ [ "numpy.random.random", "numpy.array_equal", "numpy.ones", "numpy.random.rand", "numpy.zeros", "numpy.random.randint" ], [ "numpy.ones" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
Efreeto/face-alignment
[ "d496866ac3d66c8353ba3e0305f16ac8a2ccc017" ]
[ "face_alignment/FaceLandmarksDataset.py" ]
[ "import torch\nfrom torch.utils.data import Dataset\nfrom skimage import io, color, transform\nimport torchvision\nimport os, glob\nimport numpy as np\nimport random\nfrom scipy import ndimage\nfrom PIL import Image\nimport torch.nn.functional as F\n\nfrom . import utils\n\n######################################################################\n# Transforms\n# ----------\n#\n# One issue we can see from the above is that the samples are not of the\n# same size. Most neural networks expect the images of a fixed size.\n# Therefore, we will need to write some prepocessing code.\n# Let's create three transforms:\n#\n# - ``Rescale``: to scale the image\n# - ``RandomCrop``: to crop from image randomly. This is data\n# augmentation.\n# - ``ToTensor``: to convert the numpy images to torch images (we need to\n# swap axes).\n#\n# We will write them as callable classes instead of simple functions so\n# that parameters of the transform need not be passed everytime it's\n# called. For this, we just need to implement ``__call__`` method and\n# if required, ``__init__`` method. We can then use a transform like this:\n#\n# ::\n#\n# tsfm = Transform(params)\n# transformed_sample = tsfm(sample)\n#\n# Observe below how these transforms had to be applied both on the image and\n# landmarks.\n#\n\n\nclass Rescale(object):\n \"\"\"Rescale the image in a sample to a given size.\n\n Args:\n output_size (tuple or tuple): Desired output size. If tuple, output is\n matched to output_size. If int, smaller of image edges is matched\n to output_size keeping aspect ratio the same.\n \"\"\"\n\n def __init__(self, output_size):\n assert isinstance(output_size, (int, tuple))\n self.output_size = output_size\n\n def __call__(self, sample):\n image, landmarks = sample['image'], sample['landmarks']\n\n h, w = image.shape[:2]\n if isinstance(self.output_size, int):\n if h > w:\n new_h, new_w = self.output_size * h / w, self.output_size\n else:\n new_h, new_w = self.output_size, self.output_size * w / h\n else:\n new_h, new_w = self.output_size\n\n new_h, new_w = int(new_h), int(new_w)\n\n img = transform.resize(image, (new_h, new_w))\n\n # h and w are swapped for landmarks because for images,\n # x and y axes are axis 1 and 0 respectively\n landmarks = landmarks * [new_w / w, new_h / h]\n\n img = img.astype('float32')\n landmarks = landmarks.astype('float32')\n\n return {'image': img, 'landmarks': landmarks}\n\nclass RandomHorizFlip(object):\n def __call__(self, sample):\n image, landmarks = sample['image'], sample['landmarks']\n if random.random() < 0.5:\n image = np.fliplr(image).copy()\n landmarks = landmarks.transpose()\n landmarks[0] = image.shape[1] - landmarks[0]\n landmarks = landmarks.transpose()\n landmarks = utils.shuffle_lr(landmarks)\n\n return {'image': image, 'landmarks': landmarks}\n\n\n__imagenet_stats = {'mean': [0.485, 0.456, 0.406],\n 'std': [0.229, 0.224, 0.225]}\n\nimagenet_pca = {\n 'eigval': torch.Tensor([0.2175, 0.0188, 0.0045]),\n 'eigvec': torch.Tensor([\n [-0.5675, 0.7192, 0.4009],\n [-0.5808, -0.0045, -0.8140],\n [-0.5836, -0.6948, 0.4203],\n ])\n}\n\nclass Lighting(object):\n \"\"\"Lighting noise(AlexNet - style PCA - based noise)\"\"\"\n\n def __init__(self, alphastd=0.1, eigval=imagenet_pca['eigval'], eigvec=imagenet_pca['eigvec']):\n self.alphastd = alphastd\n self.eigval = eigval\n self.eigvec = eigvec\n\n def __call__(self, sample):\n image, landmarks = sample['image'], sample['landmarks']\n if self.alphastd == 0:\n return image\n\n alpha = image.new().resize_(3).normal_(0, self.alphastd)\n rgb = self.eigvec.type_as(image).clone()\\\n .mul(alpha.view(1, 3).expand(3, 3))\\\n .mul(self.eigval.view(1, 3).expand(3, 3))\\\n .sum(1).squeeze()\n\n return {'image': image.add(rgb.view(3, 1, 1).expand_as(image)), 'landmarks': landmarks}\n\n\nclass FaceColorJitter(object):\n\n def __init__(self, brightness=0.4, contrast=0.4, saturation=0.4):\n self.color_jitter = torchvision.transforms.ColorJitter(brightness, contrast, saturation)\n\n def __call__(self, sample):\n image, landmarks = sample['image'], sample['landmarks'].copy()\n\n to_pil = torchvision.transforms.ToPILImage()\n img = to_pil(image)\n img = self.color_jitter(img)\n to_tensor = torchvision.transforms.ToTensor()\n image = to_tensor(img).numpy().transpose(1,2,0)\n return {'image': image, 'landmarks': landmarks}\n\n\nclass RandomRotation(object):\n def __init__(self, maximum_angle=50., minimum_angle=5.):\n self.maximum_angle = maximum_angle - minimum_angle\n self.minimum_angle = minimum_angle\n\n def __call__(self, sample):\n image, landmarks = sample['image'], sample['landmarks']\n rotation_angle = (random.random() - 0.5) * 2 * self.maximum_angle\n if rotation_angle > 0:\n rotation_angle += self.minimum_angle\n else:\n rotation_angle -= self.minimum_angle\n manual_theta = utils.transformation_matrix(-rotation_angle)\n manual_theta_inv = utils.transformation_matrix(rotation_angle)\n\n image_rot = ndimage.rotate(image, rotation_angle, reshape=True)\n origin_org = ((image.shape[1] / 2.0, image.shape[0] / 2.0))\n origin_rot = ((image_rot.shape[1] / 2.0, image_rot.shape[0] / 2.0))\n\n landmarks_rot = landmarks - origin_org\n landmarks_rot = np.asarray(np.dot(landmarks_rot, manual_theta_inv)[:, :2])\n landmarks_rot = landmarks_rot + origin_rot\n\n sample['image_rot'] = image_rot\n sample['landmarks_rot'] = landmarks_rot\n sample['theta'] = manual_theta\n sample['angle'] = rotation_angle\n\n return sample\n\n\nclass LandmarkCrop(object):\n def __init__(self, resolution):\n self.resolution = resolution\n\n def __call__(self, sample):\n image, landmarks = sample['image'], sample['landmarks']\n bbox = utils.bounding_box(landmarks)\n center, scale = utils.center_scale_from_bbox(bbox)\n image = utils.crop(image, center, scale, self.resolution)\n # landmarks = landmarks - (bbox[0], bbox[1])\n sample['image'] = image\n sample['landmarks'] = landmarks\n\n if 'image_rot' in sample: # if RandomRotation, crop around the rotated image\n image, landmarks = sample['image_rot'], sample['landmarks_rot']\n bbox = utils.bounding_box(landmarks)\n center, scale = utils.center_scale_from_bbox(bbox)\n image = utils.crop(image, center, scale, self.resolution)\n # landmarks = landmarks - (bbox[0], bbox[1])\n sample['image_rot'] = image\n sample['landmarks_rot'] = landmarks\n\n return sample\n\n\nclass CreateHeatmaps(object):\n def __init__(self, output_size=64, n_features=68):\n self.output_size = output_size\n self.n_features = n_features\n\n def __call__(self, sample):\n landmarks = sample['landmarks']\n center, scale = utils.center_scale_from_bbox(utils.bounding_box(landmarks))\n heatmap = np.zeros((self.n_features, self.output_size, self.output_size))\n for i in range(self.n_features):\n new_pts = utils.transform(landmarks[i], center, scale, self.output_size)\n heatmap[i] = utils.draw_gaussian(heatmap[i], new_pts, 1)\n sample['heatmaps'] = torch.from_numpy(heatmap).view(self.n_features, self.output_size, self.output_size).float()\n\n if 'image_rot' in sample: # if RandomRotation, crop around the rotated image\n landmarks = sample['landmarks_rot']\n center, scale = utils.center_scale_from_bbox(utils.bounding_box(landmarks))\n heatmap = np.zeros((self.n_features, self.output_size, self.output_size))\n for i in range(self.n_features):\n new_pts = utils.transform(landmarks[i], center, scale, self.output_size)\n heatmap[i] = utils.draw_gaussian(heatmap[i], new_pts, 1)\n sample['heatmaps_rot'] = torch.from_numpy(heatmap).view(self.n_features, self.output_size, self.output_size).float()\n\n return sample\n\nclass CreateHeatmaps2(object):\n def __init__(self, output_size=64, n_features=68):\n self.output_size = output_size\n self.n_features = n_features\n if self.n_features==68:\n self.neigbor_list = [[2],[1,3],[2,4],[3,5],[4,6],[5,7],[6,8],[7,9],[8,10],\n [9,11],[10,12],[11,13],[12,14],[13,15],[14,16],[15,17],\n [16], [19], [18,20], [19,21], [20,22], [21],[24],[23,25],\n [24,26],[25,27],[26],[29],[28,30],[29,31],[30,34],[33],\n [32,34],[33,35],[34,36],[35],[],[37,39],[38,40],[],[40,42],\n [37,41],[],[43,45],[44,46],[],[46,48],[43,47],[],[49,51],\n [50,52],[51,53],[52,54],[53,55],[],[55,57],[56,58],[57,59],\n [58,60],[59,49],[49],[61,63],[62,64],[63,65],[55],[65,67],\n [66,68],[61,67]]\n elif self.n_features==108:\n self.neigbor_list = [[2],[1,3],[2,4],[3,5],[4,6],[5,7],[6,8],[7,9],[8,10],\n [9,11],[10,12],[11,13],[12,14],[13,15],[14,16],[15,17],\n [16,18],[17,19],[18,20],[19,21],[20,22],[21,23],[22,24],\n [23,25],[24,26],[25,27],[26,28],[27,29],[28,30],[29,31],\n [30,32],[31,33],[32],[],[34,36],[35,37],[36,38],[], [39,41],\n [40,42],[41,43], [],[45],[44,46], [45,47], [46], [49],[48,50],\n [],[50,52],[51],[],[53,55],[54,56],[],[56,58], [],[],[59,61],\n [60,62],[],[62,64],[],[],[65,67],[66,68],[],[],[69,71],[70,72],[]\n [54,55],[58,57],[],[60,61],[63,64],[],[81],[82],[79,83],[80,84],\n [81,85],[82,86],[83,87],[84,88],[48],[52],[],[89,91],[90,92],\n [91,93],[92,94],[93,95],[],[95,97],[96,98],[97,99],[98,100],[89,99],\n [],[101,103],[102,104],[103,105],[],[105,107],[106,108],[101,107]]\n\n def __call__(self, sample):\n landmarks = sample['landmarks']\n center, scale = center_scale_from_landmark(landmarks)\n heatmap = np.zeros((self.n_features, self.output_size, self.output_size))\n foo = np.zeros((self.output_size, self.output_size))\n\n for i in range(self.n_features):\n neighbors = self.get_neighbors(i)\n num_neighbors = len(neighbors)\n if num_neighbors == 0:\n heatmap[i] = utils.draw_gaussian(heatmap[i], utils.transform(landmarks[i], center, scale, self.output_size), 1)\n foo = utils.draw_gaussian(foo, utils.transform(landmarks[i], center, scale, self.output_size), 1)\n else:\n if num_neighbors == 2:\n points = np.zeros((3,2))\n points[0] = utils.transform(landmarks[neighbors[0]-1], center, scale, self.output_size).numpy()\n points[1] = utils.transform(landmarks[i], center, scale, self.output_size).numpy()\n points[2] = utils.transform(landmarks[neighbors[1]-1], center, scale, self.output_size).numpy()\n else:\n points = np.zeros((2,2))\n points[0] = utils.transform(landmarks[neighbors[0]-1], center, scale, self.output_size).numpy()\n points[1] = utils.transform(landmarks[i], center, scale, self.output_size).numpy()\n\n heatmap[i] = utils.draw_gaussian2(heatmap[i], points, 1)\n # foo = utils.draw_gaussian(foo, utils.transform(landmarks[i], center, scale, self.output_size), 1)\n foo = utils.draw_gaussian2(foo, points, 1)\n \"\"\"\n from PIL import Image\n im = Image.fromarray(foo*255)\n im.show()\n \"\"\"\n\n heatmaps = torch.from_numpy(heatmap).view(1, self.n_features, self.output_size, self.output_size).float()\n\n return {'image': sample['image'], 'landmarks': heatmaps}\n\n def get_neighbors(self, landmark):\n return self.neigbor_list[landmark]\n\n\nclass RandomCrop(object):\n \"\"\"Crop randomly the image in a sample.\n\n Args:\n output_size (tuple or int): Desired output size. If int, square crop\n is made.\n \"\"\"\n\n def __init__(self, output_size):\n assert isinstance(output_size, (int, tuple))\n if isinstance(output_size, int):\n self.output_size = (output_size, output_size)\n else:\n assert len(output_size) == 2\n self.output_size = output_size\n\n def __call__(self, sample):\n image, landmarks = sample['image'], sample['landmarks']\n\n h, w = image.shape[:2]\n new_h, new_w = self.output_size\n\n top = np.random.randint(0, h - new_h)\n left = np.random.randint(0, w - new_w)\n\n image = image[top: top + new_h,\n left: left + new_w]\n\n landmarks = landmarks - [left, top]\n\n return {'image': image, 'landmarks': landmarks}\n\nclass ToTensor(object):\n \"\"\"Convert ndarrays in sample to Tensors.\"\"\"\n\n def __call__(self, sample):\n for key in sample:\n if key in ['image', 'image_rot']:\n sample[key] = torchvision.transforms.ToTensor()(sample[key])\n elif key in ['filename', 'angle', 'heatmaps', 'heatmaps_rot']:\n continue\n else:\n sample[key] = torch.from_numpy(sample[key]).float()\n return sample\n\nclass FaceLandmarksDataset(Dataset):\n \"\"\"Face Landmarks dataset.\"\"\"\n\n def __init__(self, path, type=1, transforms=None):\n \"\"\"\n Args:\n path (string): Directory with all the images and landmarks.\n transforms (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.type = type\n self.transforms = transforms\n\n image_exts = ('*.jpg', '*.png')\n self.images_list = []\n for ext in image_exts:\n self.images_list.extend(sorted(glob.glob(os.path.join(path, ext))))\n assert self.images_list, \"path does not contain images\"\n\n def __len__(self):\n return len(self.images_list)\n\n def __getitem__(self, idx):\n image = io.imread(self.images_list[idx])\n image = color.grey2rgb(image) # For some gray scale images\n\n filename = self.images_list[idx]\n basename = os.path.splitext(filename)[0]\n if self.type == 1: # 300W, lfpw\n landmarks = np.loadtxt(basename + '.pts', skiprows=3, comments='}')\n elif self.type == 2: # land110\n landmarks = np.loadtxt(basename + '.land', skiprows=1)\n # landmarks = np.vstack((landmarks[0:32:2], landmarks[32:64], landmarks[88:108]))\n elif self.type == 3: # FEI\n landmarks = np.ones((68,2))\n elif self.type == 4: # 8W\n landmarks = np.loadtxt(basename + '.pts')\n\n sample = {'image': image, 'landmarks': landmarks, 'filename': filename}\n if self.transforms:\n sample = self.transforms(sample)\n\n return sample\n" ]
[ [ "numpy.dot", "torch.Tensor", "numpy.fliplr", "torch.from_numpy", "scipy.ndimage.rotate", "numpy.ones", "numpy.zeros", "numpy.loadtxt", "numpy.random.randint" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [ "1.7", "1.0", "0.10", "1.2", "0.14", "0.19", "1.5", "0.12", "0.17", "0.13", "1.6", "1.4", "1.9", "1.3", "1.10", "0.15", "0.18", "0.16", "1.8" ], "tensorflow": [] } ]
PNNL-Comp-Mass-Spec/CRNT4SBML
[ "20406f452863f35f766b504fe2b3f3ab034b62fe", "20406f452863f35f766b504fe2b3f3ab034b62fe" ]
[ "crnt4sbml/safety_wrap.py", "tests/nuts_script.py" ]
[ "import os\nimport pickle\nimport numpy\nimport antimony\nimport roadrunner\nimport rrplugins\nimport sys\n\nroadrunner.Logger.setLevel(roadrunner.Logger.LOG_ERROR)\nroadrunner.Logger.disableLogging()\nroadrunner.Logger.disableConsoleLogging()\nroadrunner.Logger.disableFileLogging()\nrrplugins.setLogLevel('error')\n\nstderr_fileno = sys.stderr.fileno()\nstderr_save = os.dup(stderr_fileno)\nstderr_pipe = os.pipe()\nos.dup2(stderr_pipe[1], stderr_fileno)\nos.close(stderr_pipe[1])\n\n\n# functions taken from Tellurium!! Give them\n# credit, they deserve it!\n#################################################\ndef __check_antimony_return_code(code):\n if code < 0:\n raise Exception('Antimony: {}'.format(antimony.getLastError()))\n\n\ndef __antimony_to_sbml(ant):\n try:\n isfile = os.path.isfile(ant)\n except ValueError:\n isfile = False\n if isfile:\n code = antimony.loadAntimonyFile(ant)\n else:\n code = antimony.loadAntimonyString(ant)\n __check_antimony_return_code(code)\n mid = antimony.getMainModuleName()\n return antimony.getSBMLString(mid)\n\n\ndef __loada(ant):\n return __load_antimony_model(ant)\n\n\ndef __load_antimony_model(ant):\n sbml = __antimony_to_sbml(ant)\n return roadrunner.RoadRunner(sbml)\n\n\nwith open('input_arguments.pickle', 'rb') as pickle_file:\n input_arguments = pickle.loads(pickle_file.read())\n\nant_str = input_arguments[0]\ndirection = input_arguments[1]\nauto = rrplugins.Plugin(\"tel_auto2000\")\nauto_parameters = input_arguments[2]\n\nantimony_r = __loada(ant_str)\n\n# # making the directory auto_fort_files if is does not exist\n# if not os.path.isdir(\"./auto_fort_files\"):\n# os.mkdir(\"./auto_fort_files\")\n\nauto.setProperty(\"SBML\", antimony_r.getCurrentSBML())\nauto.setProperty(\"ScanDirection\", direction)\nauto.setProperty(\"PreSimulation\", \"True\")\nauto.setProperty(\"PreSimulationDuration\", 1.0)\nauto.setProperty('KeepTempFiles', True)\nauto.setProperty(\"TempFolder\", \"auto_fort_files\")\n\n# assigning values provided by the user\nfor i in auto_parameters.keys():\n auto.setProperty(i, auto_parameters[i])\n\ntry:\n auto.execute()\n # indices where special points are\n pts = auto.BifurcationPoints\n # labeling of special points\n lbls = auto.BifurcationLabels\n # all data for parameters and species found by continuation\n bi_data = auto.BifurcationData\n\n # convertes bi_data to numpy array, where first\n # column is the principal continuation parameter and\n # the rest of the columns are the species\n bi_data_np = bi_data.toNumpy\n flag = True\n\nexcept Exception as e:\n flag = False\n pts = []\n lbls = []\n bi_data_np = numpy.zeros(2)\n\nant_float_ids = antimony_r.model.getFloatingSpeciesIds()\nnumpy.save('bi_data_np.npy', bi_data_np)\n\noutput_arguments = [pts, lbls, ant_float_ids, flag]\n\nif os.path.exists(\"output_arguments.pickle\"):\n os.remove(\"output_arguments.pickle\")\n with open('output_arguments.pickle', 'wb') as outf:\n outf.write(pickle.dumps(output_arguments))\nelse:\n with open('output_arguments.pickle', 'wb') as outf:\n outf.write(pickle.dumps(output_arguments))\n\nos.close(stderr_pipe[0])\nos.dup2(stderr_save, stderr_fileno)\nos.close(stderr_save)\nos.close(stderr_fileno)\n", "import sys\nsys.path.insert(0, \"..\")\nimport crnt4sbml\nimport numpy\nimport sympy\nimport pandas\nimport scipy.integrate as itg\nimport dill\nfrom plotnine import ggplot, aes, geom_line, ylim, scale_color_distiller, facet_wrap, theme_bw, geom_path, geom_point, labs, annotate\nfrom matplotlib import rc\nrc('text', usetex=True)\n\n# network = crnt4sbml.CRNT(\"../sbml_files/insulin_signaling_motifs/simple_biterminal.xml\")\n# network = crnt4sbml.CRNT(\"../sbml_files/insulin_signaling_motifs/simple_biterminal_v2.xml\")\n# network = crnt4sbml.CRNT(\"../sbml_files/insulin_signaling_motifs/Nuts.xml\") # No, but zero value found\n# network = crnt4sbml.CRNT(\"../sbml_files/insulin_signaling_motifs/Nuts_submodel_1.xml\") # Yes\n# network = crnt4sbml.CRNT(\"../sbml_files/insulin_signaling_motifs/Nuts_submodel_2.xml\") # No, bifurcation and limit points found and zero value found\n# network = crnt4sbml.CRNT(\"../sbml_files/insulin_signaling_motifs/Nuts_submodel_3.xml\")\n\n# network = crnt4sbml.CRNT(\"../sbml_files/insulin_signaling_motifs/Nuts_submodel_4c.xml\")\nnetwork = crnt4sbml.CRNT(\"../sbml_files/insulin_signaling_motifs/Nuts_submodel_1.xml\")\n# network = crnt4sbml.CRNT(\"../sbml_files/insulin_signaling_motifs/Nuts_submodel_4d.xml\")\n\n#network = crnt4sbml.CRNT(\"../sbml_files/insulin_signaling_motifs/subspace_strange.xml\")\n\nsignal = \"C1\"\nresponse = \"s11\"\n\n\n# network = crnt4sbml.CRNT(\"../sbml_files/two_dim_tk.xml\")\n# signal = \"C1\"\n# response = \"s1\"\n\nnetwork.basic_report()\nnetwork.print_c_graph()\n\nGA = network.get_general_approach(signal=signal, response=response, fix_reactions=True)\n\nprint(GA.get_conservation_laws())\n# sys.exit()\n# print(GA.get_fixed_reactions())\n# print(GA.get_solutions_to_fixed_reactions())\n#\n# sympy.pprint(GA.get_independent_odes_subs())\n\nbnds = [(0.0, 100.0)]*len(network.get_c_graph().get_reactions()) + [(0.0, 100.0)]*len(network.get_c_graph().get_species())\n\n\n# bnds = [(1e-2, 100.0)]*len(network.get_c_graph().get_reactions()) + [(1e-2, 100.0)]*(len(network.get_c_graph().get_species())-1) + [(0.0, 1e-5)]\n\n# bnds = [(1e-2, 100.0)]*len(network.get_c_graph().get_reactions()) + [(1e-2, 100.0)]*(len(network.get_c_graph().get_species())-1) + [0.0]\n\n# bnds = [(1,2), (3,4), (5,6), (7,8), (9,10), (11, 12), (13,14), (15,16), (17,18), (19, 20), (21,22), (23,24), (25,26), (27,28), (29,30), (31,32), 0.0]\n\n# bnds = [(1e-2, 100.0)]*len(network.get_c_graph().get_reactions()) + [(1e-2, 100.0)]*(len(network.get_c_graph().get_species())-1) + [(0.0, 1e-12)]\n\n\nprint(network.get_c_graph().get_reactions() + network.get_c_graph().get_species())\n\n\n# print(GA.get_input_vector())\n\n# [re5f, re5d, re5c, re6, re7f, re7d, re7c, re8, re16, re18, s9, s10, s2, s2s9, s11, s2s10, s1] # nuts submodel 4c\n# [re5f, re5d, re5c, re6, re7f, re7d, re7c, re8, s9, s10, s2, s2s9, s11, s2s10] # nuts submodel 1\n\n\n# nuts submodel 1\n# bnds = [(16.5, 17.5), (92.0, 92.5), (0.01, 0.025), (0.2, 0.25), (0.78, 0.79), (3.6, 3.7), (0.15, 0.25), (0.06, 0.065)] + \\\n# [(25.85, 25.95), (8.1, 8.15), (18.6, 18.67), (88.85, 88.95), (99.7, 99.8), (30.6, 30.7)]\n\n# bnds = [(16.5, 17.5), (92.0, 92.5), (0.01, 0.025), (0.2, 0.25), (0.78, 0.79), (3.6, 3.7), (0.15, 0.25), (0.06, 0.065), (0.0, 100.0), (0.0, 100.0)] + \\\n# [(25.85, 25.95), (8.1, 8.15), (18.6, 18.67), (88.85, 88.95), (99.7, 99.8), (30.6, 30.7), (0.0, 1.0)]\n\n# bnds = [(16.5, 17.5), (92.0, 92.5), (0.01, 0.025), (0.2, 0.25), (0.78, 0.79), (3.6, 3.7), (0.15, 0.25), (0.06, 0.065), (0.0, 100.0), (0.0, 100.0)] + \\\n# [(25.85, 25.95), (8.1, 8.15), (18.6, 18.67), (88.85, 88.95), (99.7, 99.8), (30.6, 30.7), (0.0, 100.0)]\n\n# bnds = [(10.0, 20.0), (90.0, 100.0), (0.0, 1.0), (0.0, 1.0), (0.0, 1.0), (2.0, 4.0), (0.0, 1.0), (0.0, 1.0), (0.0, 100.0), (0.0, 100.0)] + \\\n# [(20.0, 30.0), (5.0, 10.0), (15.0, 20.0), (80.0, 90.0), (90.0, 100.0), (30.0, 31.0), (0.0, 100.0)]\n\n\n# bnds = [(1.6e+01, 1.7e+01), (9.15e+01, 9.3e+01), (2.0e-02, 2.1e-02), (2.1e-01, 2.3e-01), (7.8e-01, 7.9e-01), (3.6e+00, 3.7e+00),\n# (1.9e-01, 1.99e-01), (6.0e-02, 6.1e-02), (2.9e+01, 2.91e+01), (2.9e+01, 2.91e+01), (2.5e+01, 2.6e+01),\n# (8.1e+00, 8.11e+00), (1.8e+01, 1.9e+01), (8.88e+01, 8.9e+01),\n# (9.95e+01, 9.99e+01), (3.0e+01, 3.1e+01), (9.95e-05, 9.99e-05)]\n\n# print(GA.get_input_vector())\n# sys.exit()\n\n# bnds = [16.976763, 92.360763, 0.019929, 0.218282, 0.786835, 3.685272, 0.203731, 0.062582]\n\n# print(network.get_c_graph().get_species())\n# sys.exit()\n\n# [re5f, re5d, re5c, re6, re7f, re7d, re7c, re8]\n\n# s10 8.116689\n# s2s9 88.901835\n# s11 99.785741\n# s2s10 30.651950\n\n# s2 = 138.219047 - 30.651950 - 88.901835\n# s9 = 253.374303 - 8.116689 - 99.785741 - 30.651950 - 88.901835\n\n\nprint(GA.get_decision_vector())\n\nparams_for_global_min, obj_fun_vals = GA.run_optimization(bounds=bnds, iterations=5, seed=0, print_flag=True,\n dual_annealing_iters=1000, confidence_level_flag=True)\n\n\n# params_for_global_min, obj_fun_vals, my_rank = GA.run_mpi_optimization(bounds=bnds, iterations=20, seed=0, print_flag=True,\n# dual_annealing_iters=1000, confidence_level_flag=True)\n#\n# GA.generate_report()\n#\n# if my_rank == 0:\nnumpy.save('./num_cont_nuts_model_3/params3.npy', params_for_global_min)\n\n# params_for_global_min = numpy.load('./num_cont_nuts_model_3/params2.npy')\n\n# params_for_global_min = numpy.load('./num_cont_nuts_model_3/params2.npy')\n\n# print(params_for_global_min)\n#\nmultistable_param_ind, plot_specifications = GA.run_greedy_continuity_analysis(species=response, parameters=params_for_global_min, print_lbls_flag=True,\n auto_parameters={'PrincipalContinuationParameter': signal},\n dir_path='./num_cont_nuts_model_3')\n\nsys.exit()\n\n# sympy.pprint(GA.get_independent_odes_subs())\n# print(params_for_global_min[5])\n#\n# print(GA.get_variables_for_lambda_functions())\n\n\nsys.exit()\nmultistable_param_ind, plot_specifications = GA.run_continuity_analysis(species=response, parameters=[params_for_global_min[0]], print_lbls_flag=True,\n auto_parameters={'PrincipalContinuationParameter': signal,\n 'RL0':0.0, 'RL1':100.0, 'NMX': 1000000,\n 'ITMX': 100, 'DSMAX': 1e-2,\n 'A1': 1e10, 'ITNW': 10, 'NTST': 10, 'NCOL': 10},\n dir_path='./num_cont_nuts_model_3')\n\nsys.exit()\n\n# if my_rank == 0:\n# print(params_for_global_min)\n# print(params_for_global_min)\n# print(obj_fun_vals)\n\n#\n# GA.get_full_set_of_values(params_for_global_min)\n\n# [re5f, re5c, re7f, re7c] [re5d, re6, re7d, re8, s9, s10, s2, s2s9, s11, s2s10]\n# sys.exit()\n\n# params_for_global_min = numpy.load('./num_cont_nuts_model_3/params.npy')\n# if my_rank == 0:\n# numpy.save('./num_cont_nuts_model_3/params.npy', params_for_global_min)\n\nprint(\"original\")\nsympy.pprint(network.get_c_graph().get_ode_system())\n\nprint(\"\")\n\nsympy.pprint(GA.get_independent_odes_subs())\n\njac = GA.get_independent_odes_subs().jacobian(sympy.Matrix(GA.get_independent_species()))\n\nprint(\"jac of submodel 4c\")\nsympy.pprint(jac)\n\nprint(\"\")\n\n#sympy.pprint(jac.nullspace())\n\n\n\n# det_jac = jac.det(method='lu')\n# print(det_jac)\n# print(\"\")\n#\n# sympy_species = [sympy.Symbol(i, positive=True) for i in network.get_c_graph().get_species()]\n#\n# # print(sympy_species)\n# #\n# #\n# # sys.exit()\n#\n# zero_det_jac = det_jac.subs(GA.get_independent_species()[4], sympy.S.Zero)\n#\n# zero_det_jac = zero_det_jac.subs(GA.get_independent_species()[4], sympy.S.Zero)\n#\n# print(zero_det_jac)\n# print(\"\")\n\n\n#\n#\n# sympy_reactions = [sympy.Symbol(i, positive=True) for i in network.get_c_graph().get_reactions()]\n# print(sympy_reactions)\n# print(\"\")\n# zero_det_jac = det_jac.subs(sympy_reactions[8], sympy_reactions[9])\n# print(zero_det_jac)\n\n# sys.exit()\n#\n\n\nnetwork = crnt4sbml.CRNT(\"../sbml_files/insulin_signaling_motifs/Nuts_submodel_1.xml\")\n\nsignal = \"C1\"\nresponse = \"s11\"\n# network.basic_report()\n# network.print_c_graph()\n\nGA = network.get_general_approach(signal=signal, response=response, fix_reactions=False)\njac = GA.get_independent_odes_subs().jacobian(sympy.Matrix(GA.get_independent_species()))\n\nprint(\"nuts submodel 1\")\nsympy.pprint(jac)\n\nsys.exit()\n\nprint(\"\")\n\ndet_jac = jac.det(method='lu')\n\nprint(\"nuts submodel 1\")\nprint(det_jac)\n\n\n\n\n\n\n\n\n\n\n\nsys.exit()\n\nprint(GA.get_independent_species())\n#\n\n# sympy.pprint(GA.get_independent_odes())\n#\n# print(GA.get_conservation_laws())\n#\n# sympy_species = [sympy.Symbol(i, positive=True) for i in network.get_c_graph().get_species()]\n#\n# snd_eq = sympy_species[0] + sympy_species[1] - sympy.Symbol('C1', positive=True)\n#\n# print(snd_eq)\n#\n# diff_system = sympy.Matrix([[GA.get_independent_odes()[0]],[snd_eq]])\n# print(\"\")\n# print(\"Diff system\")\n# sympy.pprint(diff_system)\n#\n# jac = diff_system.jacobian(sympy.Matrix(sympy_species))\n#\n# sympy.pprint(jac)\n#\n# print(jac.det(method='lu'))\n#\n# sys.exit()\n\nmultistable_param_ind, plot_specifications = GA.run_greedy_continuity_analysis(species=response, parameters=params_for_global_min, print_lbls_flag=True,\n auto_parameters={'PrincipalContinuationParameter': signal},\n dir_path='./num_cont_nuts_model_3')\n\n\n# for i in multistable_param_ind:\n# print(params_for_global_min[i])\n# print(obj_fun_vals[i])\n\n# multistable_param_ind, plot_specifications = GA.run_continuity_analysis(species=response, parameters=params_for_global_min, print_lbls_flag=True,\n# auto_parameters={'PrincipalContinuationParameter': signal},\n# dir_path='./num_cont_nuts_model_3')\n\n# multistable_param_ind, plot_specifications = GA.run_continuity_analysis(species=response, parameters=[params_for_global_min[0]], print_lbls_flag=True,\n# auto_parameters={'PrincipalContinuationParameter': signal,\n# 'RL0': 100.0, 'RL1': 300.0, 'NMX': 1000000,\n# 'ITMX': 100, 'DSMAX': 100,\n# 'A1': 1e10, 'ITNW': 100, 'NTST': 100, 'NCOL': 100},\n# dir_path='./num_cont_nuts_model')\n\n#multistable_param_ind\n\n# print(GA.get_input_vector())\n\nGA.generate_report()\nsys.exit()\n\n# numpy.save('./num_cont_nuts_model/params.npy', params_for_global_min)\n# numpy.save('./num_cont_graphs/params.npy', params_for_global_min)\n# params_for_global_min = numpy.load('./num_cont_nuts_model/params.npy')\n\n# cont_return_vals = [multistable_param_ind, plot_specifications]\n#\n# with open(\"./num_cont_nuts_model/cont_vals.dill\", 'wb') as f:\n# dill.dump(cont_return_vals, f)\n\n# with open(\"./num_cont_nuts_sub_1_model/cont_vals.dill\", 'rb') as f:\n# out = dill.load(f)\n\nprint(params_for_global_min)\n# sys.exit()\n\nmultistable_param_ind = [0] #out[0]\nplot_specifications = [[[138.017, 138.32], [85.92064, 127.13776], [[201.095103, 99.761304, 'LP'], [138.118, 114.452, 'LP']]]]\n# [[[138.017, 138.32], [85.92064, 127.13776], [[138.219, 99.7857, 'LP'], [138.118, 114.452, 'LP']]]] # out[1]\n\nprint(plot_specifications)\n# sys.exit()\n\n# Parameters that produced bistability.\n# re* are kinetic constants. Units can be found here help(network.get_physiological_range).\ndf = pandas.DataFrame(numpy.vstack([params_for_global_min[i] for i in multistable_param_ind]).T,\n columns=[\"set\" + str(i + 1) for i in multistable_param_ind],\n index=[str(i) for i in GA.get_variables_for_lambda_functions()])\n\nprint(df)\n\nodes = network.get_c_graph().get_ode_system()\nsympy.pprint(odes)\n\n################## selected parameter set #########################\ndecision_vector_values = numpy.array(df['set1'])\nplot_specifications = plot_specifications[0] # warning, overwriting variable!!!\n\n\n################ ODEs ###################################\nprint(\"Original ODEs\")\nodes = network.get_c_graph().get_ode_system()\nsympy.pprint(odes)\n\n# why we need this? String -> Sympy objects\n# construct sympy form of reactions and species\nsympy_reactions = [sympy.Symbol(i, positive=True) for i in network.get_c_graph().get_reactions()]\nsympy_species = [sympy.Symbol(i, positive=True) for i in network.get_c_graph().get_species()]\n# joining together\nlambda_inputs = sympy_reactions + sympy_species\n# creating a lambda function for each ODE to\node_lambda_functions = [sympy.utilities.lambdify(lambda_inputs, odes[i]) for i in range(len(odes))]\n\n############################### kinetic constants ########################################################\nkinetic_constants = numpy.array([decision_vector_values[i] for i in range(len(sympy_reactions))])\n\nprint(kinetic_constants)\n\n################################# Computing material conservation values ############################\n# combine equilibrium species' concentrations according to conservation relationships\nconservation_values = numpy.array(decision_vector_values[len(sympy_reactions) + len(GA.get_independent_species()):])\n\nprint(conservation_values)\n\nprint(sympy_species)\n\n################################# starting concentrations ############################################\n# this assumes that a chemical moiety in one state (species) and other species containing this moiety are zero\n# assignment of conservation values to species requires exploring the model in CellDesigner\ny_fwd = [0.0, 0.0, conservation_values[0], 0.0, conservation_values[1], 0.0, 0.0]\ny_rev = [conservation_values[1], 0.0, conservation_values[0], 0.0, 0.0, 0.0, 0.0]\n\n# 62.898048\n\n# 201.0951028941771 - 62.898048\n\n# Note, the continuation parameter C3 (first position) will be varied during simulations\n\n############ simulation ###################\n# computing dy/dt increments\ndef f(cs, t, ks, ode_lambda_func):\n return [i(*tuple(ks), *tuple(cs)) for i in ode_lambda_func] # dy/dt\n\ndef sim_fun_fwd(x):\n y_fwd[2] = x # updating s1 concentration or C3\n return itg.odeint(f, y_fwd, t, args=(kinetic_constants, ode_lambda_functions))\n\ndef sim_fun_rev(x):\n y_rev[2] = x # updating s2 concentration\n return itg.odeint(f, y_rev, t, args=(kinetic_constants, ode_lambda_functions))\n\n# starting and ending time in seconds, number of data points\nt = numpy.linspace(0.0, 7000.0, 100)\n# signal parameter scanning range and data points. Forward scan.\nC3_scan = numpy.linspace(*plot_specifications[0], 60)\nsim_res_fwd = [sim_fun_fwd(i) for i in C3_scan] # occupies sys.getsizeof(sim_res_rev[0])*len(sim_res_rev)/2**20 Mb\n# Reverse C3_scan. Reverse means that s2 is already high and signal is decreasing.\nsim_res_rev = [sim_fun_rev(i) for i in numpy.flip(C3_scan)]\n\nout = pandas.DataFrame(columns=['dir', 'signal', 'time'] + network.get_c_graph().get_species())\nfor i in range(len(sim_res_fwd)):\n out_i = pandas.DataFrame(sim_res_fwd[i], columns=out.columns[3:])\n out_i['time'] = t\n out_i['signal'] = C3_scan[i]\n out_i['dir'] = 'Low $[S^{**}]$'\n out = pandas.concat([out, out_i[out.columns]])\nfor i in range(len(sim_res_rev)):\n out_i = pandas.DataFrame(sim_res_rev[i], columns=out.columns[3:])\n out_i['time'] = t\n out_i['signal'] = numpy.flip(C3_scan)[i]\n out_i['dir'] = 'High $[S^{**}]$'\n out = pandas.concat([out, out_i[out.columns]])\nout.to_csv(\"./num_cont_graphs/sim2.txt\", sep=\"\\t\", index=False)\n\n###################### plotting ##################################\ng = (ggplot(out, aes('time', response, group='signal', color='signal'))\n + geom_line(size=0.5)\n + ylim(0, 202)\n + labs(x=\"time\", y=\"$[S^{**}]$\")\n + scale_color_distiller(palette='RdYlBu', type=\"diverging\", name=\"$B_{tot}$\")\n + facet_wrap('~dir')\n + theme_bw())\ng.save(filename=\"./num_cont_graphs/sim_fwd_rev2.png\", format=\"png\", width=8, height=4, units='in', verbose=False)\n\neq = out[out.time == max(out.time)]\n\ng = (ggplot(eq)\n + aes(x='signal', y=response, color='dir')\n + labs(x=\"$B_{tot}$\", y=\"$[S^{**}]$\", color=\"\")\n + geom_path(size=2, alpha=0.5)\n + geom_point(color=\"black\")\n + theme_bw()\n + geom_point(color=\"black\")\n + annotate(\"point\", x=plot_specifications[2][0][0], y=plot_specifications[2][0][1], colour=\"red\", shape=\"*\",\n size=3.5)\n + annotate(\"text\", x=plot_specifications[2][0][0], y=plot_specifications[2][0][1],\n label=plot_specifications[2][0][2]))\n # + annotate(\"point\", x=plot_specifications[2][1][0], y=plot_specifications[2][1][1], colour=\"red\", shape=\"*\",\n # size=3.5)\n # + annotate(\"text\", x=plot_specifications[2][1][0], y=plot_specifications[2][1][1],\n # label=plot_specifications[2][1][2]))\ng.save(filename=\"./num_cont_graphs/sim_bif_diag2.png\", format=\"png\", width=6, height=4, units='in', verbose=False)\n" ]
[ [ "numpy.zeros", "numpy.save" ], [ "pandas.concat", "numpy.linspace", "numpy.vstack", "numpy.save", "scipy.integrate.odeint", "pandas.DataFrame", "numpy.array", "numpy.flip", "matplotlib.rc" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "1.3", "0.19", "1.1", "1.5", "0.24", "0.20", "1.0", "0.25", "1.2" ], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "0.16", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "1.3", "1.8" ], "tensorflow": [] } ]
TimeTraveller-San/FairGAN
[ "526c2937714fc322714db54dc6a3f392f2c88e18" ]
[ "we.py" ]
[ "from __future__ import print_function, division\nimport re\nimport sys\nimport numpy as np\nimport scipy.sparse\nimport codecs\nfrom sklearn.decomposition import PCA\nif sys.version_info[0] < 3:\n import io\n open = io.open\nelse:\n unicode = str\n\"\"\"\nTools for debiasing word embeddings\n\nMan is to Computer Programmer as Woman is to Homemaker? Debiasing Word Embeddings\nTolga Bolukbasi, Kai-Wei Chang, James Zou, Venkatesh Saligrama, and Adam Kalai\n2016\n\"\"\"\n\nDEFAULT_NUM_WORDS = 27000\nFILENAMES = {\"g_wiki\": \"glove.6B.300d.small.txt\",\n \"g_twitter\": \"glove.twitter.27B.200d.small.txt\",\n \"g_crawl\": \"glove.840B.300d.small.txt\",\n \"w2v\": \"GoogleNews-word2vec.small.txt\",\n \"w2v_large\": \"GoogleNews-word2vec.txt\"}\n\n\ndef dedup(seq):\n seen = set()\n return [x for x in seq if not (x in seen or seen.add(x))]\n\n\ndef safe_word(w):\n # ignore words with numbers, etc.\n # [a-zA-Z\\.'_\\- :;\\(\\)\\]] for emoticons\n return (re.match(r\"^[a-z_]*$\", w) and len(w) < 20 and not re.match(r\"^_*$\", w))\n\n\ndef to_utf8(text, errors='strict', encoding='utf8'):\n \"\"\"Convert a string (unicode or bytestring in `encoding`), to bytestring in utf8.\"\"\"\n if isinstance(text, unicode):\n return text.encode('utf8')\n # do bytestring -> unicode -> utf8 full circle, to ensure valid utf8\n return unicode(text, encoding, errors=errors).encode('utf8')\n\n\ndef load_embeddings_from_np(filename):\n print('loading ...')\n with codecs.open(filename + '.vocab', 'r', 'utf-8') as f_embed:\n vocab = [line.strip() for line in f_embed]\n wv = np.load(filename + '.wv.npy')\n return vocab, wv\n\nclass WordEmbedding:\n def __init__(self, fname):\n self.thresh = None\n self.max_words = None\n self.desc = fname\n print(\"*** Reading data from \" + fname)\n if fname.endswith(\".bin\"):\n import gensim.models\n model = gensim.models.KeyedVectors.load_word2vec_format(fname, binary=True)\n words = sorted([w for w in model.vocab], key=lambda w: model.vocab[w].index)\n vecs = [model[w] for w in words]\n elif fname.endswith(\".txt\"):\n print(\"Loading w2vec format\")\n vecs = []\n words = []\n with open(fname, \"r\") as f:\n lines = f.readlines()\n for line in lines:\n tokens = line.split()\n v = np.array([float(x) for x in tokens[-300:]])\n w = \"_\".join([str(x) for x in tokens[:-300]])\n if len(v) != 300:\n print(f\"Weird line: {tokens} | {len(v)}\")\n continue\n words.append(w)\n vecs.append(v)\n else:\n print(\"Loading numpy format\")\n words, vecs = load_embeddings_from_np(fname)\n\n self.vecs = np.array(vecs, dtype='float32')\n print(self.vecs.shape)\n self.words = words\n self.reindex()\n norms = np.linalg.norm(self.vecs, axis=1)\n if max(norms)-min(norms) > 0.0001:\n self.normalize()\n\n def reindex(self):\n self.index = {w: i for i, w in enumerate(self.words)}\n self.rindex = {i: w for i, w in enumerate(self.words)}\n self.n, self.d = self.vecs.shape\n assert self.n == len(self.words) == len(self.index)\n self._neighbors = None\n print(self.n, \"words of dimension\", self.d, \":\", \", \".join(self.words[:4] + [\"...\"] + self.words[-4:]))\n\n def v(self, word):\n return self.vecs[self.index[word]]\n\n def diff(self, word1, word2):\n v = self.vecs[self.index[word1]] - self.vecs[self.index[word2]]\n return v/np.linalg.norm(v)\n\n def normalize(self):\n self.desc += \", normalize\"\n self.vecs /= np.linalg.norm(self.vecs, axis=1)[:, np.newaxis]\n self.reindex()\n\n def shrink(self, numwords):\n self.desc += \", shrink \" + str(numwords)\n self.filter_words(lambda w: self.index[w]<numwords)\n\n def filter_words(self, test):\n \"\"\"\n Keep some words based on test, e.g. lambda x: x.lower()==x\n \"\"\"\n self.desc += \", filter\"\n kept_indices, words = zip(*[[i, w] for i, w in enumerate(self.words) if test(w)])\n self.words = list(words)\n self.vecs = self.vecs[kept_indices, :]\n self.reindex()\n\n def save(self, filename):\n with open(filename, \"w\") as f:\n f.write(\"\\n\".join([w+\" \" + \" \".join([str(x) for x in v]) for w, v in zip(self.words, self.vecs)]))\n print(\"Wrote\", self.n, \"words to\", filename)\n\n def save_w2v(self, filename, binary=True):\n with open(filename, 'wb') as fout:\n fout.write(to_utf8(\"%s %s\\n\" % self.vecs.shape))\n # store in sorted order: most frequent words at the top\n for i, word in enumerate(self.words):\n row = self.vecs[i]\n if binary:\n fout.write(to_utf8(word) + b\" \" + row.tostring())\n else:\n fout.write(to_utf8(\"%s %s\\n\" % (word, ' '.join(\"%f\" % val for val in row))))\n\n def remove_directions(self, directions): #directions better be orthogonal\n self.desc += \", removed\"\n for direction in directions:\n self.desc += \" \"\n if type(direction) is np.ndarray:\n v = direction / np.linalg.norm(direction)\n self.desc += \"vector \"\n else:\n w1, w2 = direction\n v = self.diff(w1, w2)\n self.desc += w1 + \"-\" + w2\n self.vecs = self.vecs - self.vecs.dot(v)[:, np.newaxis].dot(v[np.newaxis, :])\n self.normalize()\n\n def compute_neighbors_if_necessary(self, thresh, max_words):\n thresh = float(thresh) # dang python 2.7!\n if self._neighbors is not None and self.thresh == thresh and self.max_words == max_words:\n return\n print(\"Computing neighbors\")\n self.thresh = thresh\n self.max_words = max_words\n vecs = self.vecs[:max_words]\n dots = vecs.dot(vecs.T)\n dots = scipy.sparse.csr_matrix(dots * (dots >= 1-thresh/2))\n from collections import Counter\n rows, cols = dots.nonzero()\n nums = list(Counter(rows).values())\n print(\"Mean:\", np.mean(nums)-1)\n print(\"Median:\", np.median(nums)-1)\n rows, cols, vecs = zip(*[(i, j, vecs[i]-vecs[j]) for i, j, x in zip(rows, cols, dots.data) if i<j])\n self._neighbors = rows, cols, np.array([v/np.linalg.norm(v) for v in vecs])\n\n def neighbors(self, word, thresh=1):\n dots = self.vecs.dot(self.v(word))\n dd = dict(zip([abs(dot) for dot in dots], [i for i in range(len(dots))]))\n ns=[]\n for dot in sorted(dd, reverse=True):\n if dot>1-thresh/2:\n ns.append(self.words[int(dd[dot])])\n return ns[1:] #Since first word is the word itself\n\n def neighborsNoSort(self, word, thresh=1):\n dots = self.vecs.dot(self.v(word))\n dd = dict(zip([abs(dot) for dot in dots], [i for i in range(len(dots))]))\n ns=[]\n for dot in sorted(dd, reverse=True):\n if dot>1-thresh/2:\n ns.append(self.words[int(dd[dot])])\n return ns[1:] #Since first word is the word itself\n\n def more_words_like_these(self, words, topn=50, max_freq=100000):\n v = sum(self.v(w) for w in words)\n dots = self.vecs[:max_freq].dot(v)\n thresh = sorted(dots)[-topn]\n words = [w for w, dot in zip(self.words, dots) if dot>=thresh]\n return sorted(words, key=lambda w: self.v(w).dot(v))[-topn:][::-1]\n\n def best_analogies_dist_thresh(self, v, thresh=1, topn=500, max_words=50000):\n \"\"\"Metric is cos(a-c, b-d) if |b-d|^2 < thresh, otherwise 0\n \"\"\"\n vecs, vocab = self.vecs[:max_words], self.words[:max_words]\n self.compute_neighbors_if_necessary(thresh, max_words)\n rows, cols, vecs = self._neighbors\n scores = vecs.dot(v/np.linalg.norm(v))\n pi = np.argsort(-abs(scores))\n\n ans = []\n usedL = set()\n usedR = set()\n for i in pi:\n if abs(scores[i])<0.001:\n break\n row = rows[i] if scores[i] > 0 else cols[i]\n col = cols[i] if scores[i] > 0 else rows[i]\n if row in usedL or col in usedR:\n continue\n usedL.add(row)\n usedR.add(col)\n ans.append((vocab[row], vocab[col], abs(scores[i])))\n if len(ans)==topn:\n break\n\n return ans\n\n\ndef viz(analogies):\n print(\"\\n\".join(str(i).rjust(4)+a[0].rjust(29) + \" | \" + a[1].ljust(29) + (str(a[2]))[:4] for i, a in enumerate(analogies)))\n\n\ndef text_plot_words(xs, ys, words, width = 90, height = 40, filename=None):\n PADDING = 10 # num chars on left and right in case words spill over\n res = [[' ' for i in range(width)] for j in range(height)]\n def rescale(nums):\n a = min(nums)\n b = max(nums)\n return [(x-a)/(b-a) for x in nums]\n print(\"x:\", (min(xs), max(xs)), \"y:\",(min(ys),max(ys)))\n xs = rescale(xs)\n ys = rescale(ys)\n for (x, y, word) in zip(xs, ys, words):\n i = int(x*(width - 1 - PADDING))\n j = int(y*(height-1))\n row = res[j]\n z = list(row[i2] != ' ' for i2 in range(max(i-1, 0), min(width, i + len(word) + 1)))\n if any(z):\n continue\n for k in range(len(word)):\n if i+k>=width:\n break\n row[i+k] = word[k]\n string = \"\\n\".join(\"\".join(r) for r in res)\n# return string\n if filename:\n with open(filename, \"w\", encoding=\"utf8\") as f:\n f.write(string)\n print(\"Wrote to\", filename)\n else:\n print(string)\n\n\ndef doPCA(pairs, embedding, num_components = 10):\n matrix = []\n for a, b in pairs:\n center = (embedding.v(a) + embedding.v(b))/2\n matrix.append(embedding.v(a) - center)\n matrix.append(embedding.v(b) - center)\n matrix = np.array(matrix)\n pca = PCA(n_components = num_components)\n pca.fit(matrix)\n # bar(range(num_components), pca.explained_variance_ratio_)\n return pca\n\n\ndef drop(u, v):\n return u - v * u.dot(v) / v.dot(v)\n" ]
[ [ "numpy.median", "numpy.linalg.norm", "numpy.mean", "numpy.load", "numpy.array", "sklearn.decomposition.PCA" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
seanlam97/PDK_Generator
[ "15c1f4f56575f8e21ea874443d06ef740ccb5aa5", "15c1f4f56575f8e21ea874443d06ef740ccb5aa5", "15c1f4f56575f8e21ea874443d06ef740ccb5aa5", "15c1f4f56575f8e21ea874443d06ef740ccb5aa5" ]
[ "PDK_Generator/inverse_design_y_branch/lumopt/geometries/parameterized_geometry.py", "PDK_Generator/inverse_design_y_branch/lumopt/geometries/geometry.py", "PDK_Generator/inverse_design_y_branch/lumopt/optimizers/generic_optimizers.py", "PDK_Generator/design_automation/polarization_splitter_rotator/psr_bitaper/neff_taper_width_sweep.py" ]
[ "import numpy as np\nimport inspect\n\nfrom lumopt.geometries.geometry import Geometry\n\nclass ParameterizedGeometry(Geometry):\n \"\"\" \n Defines a parametrized geometry using any of the built-in geometric structures available in the FDTD CAD.\n Users must provide a Python function with the signature ('params', 'fdtd', 'only_update'). The function\n must take the optimization parameters and a handle to the FDTD CAD to build the geometry under optimization\n (material assignments included). The flag 'only_update' is used to avoid frequent recreations of the parameterized\n geometry: when the flag is true, it is assumed that the geometry was already added at least once to the CAD.\n\n Parameters\n ----------\n :param func: function with the signature ('params', 'fdtd', 'only_update', **kwargs).\n :param initial_params: flat array with the initial optimization parameter values.\n :param bounds: bounding ranges (min/max pairs) for each optimization parameter.\n :param dx: step size for computing the figure of merit gradient using permittivity perturbations.\n \"\"\"\n \n def __init__(self, func, initial_params, bounds, dx, deps_num_threads=1):\n self.deps_num_threads=deps_num_threads\n self.func = func\n self.current_params = np.array(initial_params).flatten()\n self.bounds = bounds\n self.dx = float(dx)\n\n if inspect.isfunction(self.func):\n bound_args = inspect.signature(self.func).bind('params', 'fdtd', 'only_update')\n if bound_args.args != ('params', 'fdtd', 'only_update'):\n raise UserWarning(\"user defined function does not take three positional arguments.\")\n else:\n raise UserWarning(\"argument 'func' must be a Python function.\")\n if self.dx <= 0.0:\n raise UserWarning(\"step size must be positive.\")\n\n self.params_hist = list(self.current_params)\n\n def update_geometry(self, params, sim):\n self.current_params = params\n self.params_hist.append(params)\n\n def get_current_params(self):\n return self.current_params\n\n def calculate_gradients(self, gradient_fields):\n raise UserWarning(\"unsupported gradient calculation method.\")\n\n def add_geo(self, sim, params, only_update):\n sim.fdtd.switchtolayout()\n if params is None:\n return self.func(self.current_params, sim.fdtd, only_update)\n else:\n return self.func(params, sim.fdtd, only_update)\n", "\"\"\" Copyright chriskeraly\n Copyright (c) 2019 Lumerical Inc. \"\"\"\n\nimport sys\nimport numpy as np\nimport lumapi\n\nclass Geometry(object):\n\n self_update=False\n unfold_symmetry = True #< By default, we do want monitors to unfold symmetry\n use_central_differences=False\n deps_num_threads = 1\n\n def use_interpolation(self):\n return False\n \n def check_license_requirements(self, sim):\n return True\n\n def __init__(self,geometries,operation):\n self.geometries=geometries\n self.operation=operation\n if self.operation=='mul':\n self.bounds=geometries[0].bounds\n if self.operation=='add':\n self.bounds=np.concatenate((np.array(geometries[0].bounds),np.array(geometries[1].bounds)))\n self.dx=max([geo.dx for geo in self.geometries])\n\n return\n\n def __add__(self,other):\n '''Two geometries with independent parameters'''\n geometries=[self,other]\n return Geometry(geometries,'add')\n\n def __mul__(self,other):\n '''Two geometries with common parameters'''\n geometries = [self, other]\n return Geometry(geometries, 'mul')\n\n def add_geo(self, sim, params, only_update):\n for geometry in self.geometries:\n geometry.add_geo(sim, params, only_update)\n\n def initialize(self,wavelengths,opt):\n for geometry in self.geometries:\n geometry.initialize(wavelengths,opt)\n self.opt=opt\n\n def update_geometry(self, params, sim = None):\n if self.operation=='mul':\n for geometry in self.geometries:\n geometry.update_geometry(params,sim)\n\n if self.operation=='add':\n n1=len(self.geometries[0].get_current_params())\n self.geometries[0].update_geometry(params[:n1],sim)\n self.geometries[1].update_geometry(params[n1:],sim)\n\n def calculate_gradients(self, gradient_fields):\n derivs1 = np.array(self.geometries[0].calculate_gradients(gradient_fields))\n derivs2 = np.array(self.geometries[1].calculate_gradients(gradient_fields))\n\n if self.operation=='mul':\n return derivs1+derivs2\n if self.operation=='add':\n np.concatenate(derivs1,derivs2)\n\n def get_current_params(self):\n params1=np.array(self.geometries[0].get_current_params())\n if self.operation=='mul':\n return params1\n if self.operation=='add':\n return params1+np.array(self.geometries[1].get_current_params())\n\n def plot(self,*args):\n return False\n\n def add_geo(self, sim, params, only_update):\n for geometry in self.geometries:\n geometry.add_geo(sim, params, only_update)\n\n @staticmethod\n def get_eps_from_index_monitor(fdtd, eps_result_name, monitor_name = 'opt_fields'):\n index_monitor_name = monitor_name + '_index'\n fdtd.eval(\"{0}_data_set = getresult('{0}','index');\".format(index_monitor_name) +\n \"{0} = matrix(length({1}_data_set.x), length({1}_data_set.y), length({1}_data_set.z), length({1}_data_set.f), 3);\".format(eps_result_name, index_monitor_name) +\n \"{0}(:, :, :, :, 1) = {1}_data_set.index_x^2;\".format(eps_result_name, index_monitor_name) +\n \"{0}(:, :, :, :, 2) = {1}_data_set.index_y^2;\".format(eps_result_name, index_monitor_name) +\n \"{0}(:, :, :, :, 3) = {1}_data_set.index_z^2;\".format(eps_result_name, index_monitor_name) +\n \"clear({0}_data_set);\".format(index_monitor_name))\n\n def d_eps_on_cad_parallel(self, sim):\n ## Since meshing is single-threaded for now, we need to add as many processes as there are cores.\n sim.fdtd.eval( (\"num_originally_active_resource_config=getresource('FDTD');\"+\n \"originally_active_resource_config = zeros(1,num_originally_active_resource_config);\"+\n \"for(i=1:num_originally_active_resource_config) {\"+\n \" cur_act = getresource('FDTD',i,'active');\"+\n \" originally_active_resource_config(i) = str2num(cur_act);\"+\n \" setresource('FDTD',i,'active',false); \"+\n \"}\"+\n \"for(i=1:{0}) \".format(self.deps_num_threads)+\n \"{ addresource('FDTD'); \"+\n \" setresource('FDTD',num_originally_active_resource_config+i,'mpi bypass on localhost',true);\"+\n \"}\"))\n\n current_params = self.get_current_params()\n cur_dx = self.dx/2 if self.use_central_differences else self.dx\n lumapi.putDouble(sim.fdtd.handle, \"dx\", cur_dx)\n\n ## If we don't use central differences, we need to calculate the current mesh\n if not self.use_central_differences:\n Geometry.get_eps_from_index_monitor(sim.fdtd, 'original_eps_data')\n\n ## Generate the various files and add them to the queue\n for i,param in enumerate(current_params):\n \n d_params = current_params.copy()\n d_params[i] = param + cur_dx\n self.add_geo(sim, d_params, only_update = True)\n\n filename = 'TempFileMesh_p{}'.format(i)\n sim.fdtd.save(filename)\n sim.fdtd.addjob(filename,'FDTD','-mesh-only')\n\n if self.use_central_differences:\n d_params[i] = param - cur_dx\n self.add_geo(sim, d_params, only_update = True)\n filename = 'TempFileMesh_m{}'.format(i)\n sim.fdtd.save(filename)\n sim.fdtd.addjob(filename,'FDTD','-mesh-only')\n\n ## Run the queue\n sim.fdtd.runjobs()\n\n sim.fdtd.eval(\"d_epses = cell({});\".format(current_params.size))\n \n ## Load the various files, extract the mesh data \n for i,param in enumerate(current_params):\n filename = 'TempFileMesh_p{}'.format(i)\n sim.fdtd.load(filename)\n Geometry.get_eps_from_index_monitor(sim.fdtd, 'eps_data1')\n\n if self.use_central_differences:\n filename = 'TempFileMesh_m{}'.format(i)\n sim.fdtd.load(filename)\n Geometry.get_eps_from_index_monitor(sim.fdtd, 'eps_data2')\n\n sim.fdtd.eval(\"d_epses{\"+str(i+1)+\"} = (eps_data1 - eps_data2) / (2*dx);\")\n else:\n sim.fdtd.eval(\"d_epses{\"+str(i+1)+\"} = (eps_data1 - original_eps_data) / dx;\")\n \n sys.stdout.write('.'), sys.stdout.flush()\n\n ## Restore the original resource configuration\n sim.fdtd.eval( (\"for(i=1:num_originally_active_resource_config) {\"+\n \" setresource('FDTD',i,'active',originally_active_resource_config(i));\"+\n \"}\"+\n \"for(i={0}:-1:1) \".format(self.deps_num_threads) +\n \"{ deleteresource('FDTD',num_originally_active_resource_config+i);\"+\n \"}\"+\n \"clear(num_originally_active_resource_config,originally_active_resource_config);\"))\n\n\n sim.fdtd.eval(\"clear(eps_data1, dx);\")\n print('')\n if self.use_central_differences:\n sim.fdtd.eval(\"clear(eps_data2);\")\n else:\n sim.fdtd.eval(\"clear(original_eps_data);\")\n sim.fdtd.redrawon()\n\n\n\n def d_eps_on_cad_serial(self, sim):\n sim.fdtd.redrawoff()\n\n Geometry.get_eps_from_index_monitor(sim.fdtd, 'original_eps_data')\n current_params = self.get_current_params()\n sim.fdtd.eval(\"d_epses = cell({});\".format(current_params.size))\n \n cur_dx = self.dx/2 if self.use_central_differences else self.dx\n \n lumapi.putDouble(sim.fdtd.handle, \"dx\", cur_dx)\n print('Getting d eps: dx = ' + str(cur_dx))\n\n for i,param in enumerate(current_params):\n d_params = current_params.copy()\n d_params[i] = param + cur_dx\n self.add_geo(sim, d_params, only_update = True)\n Geometry.get_eps_from_index_monitor(sim.fdtd, 'current_eps_data')\n\n if self.use_central_differences:\n d_params[i] = param - cur_dx\n self.add_geo(sim, d_params, only_update = True)\n Geometry.get_eps_from_index_monitor(sim.fdtd, 'eps_data2')\n sim.fdtd.eval(\"d_epses{\"+str(i+1)+\"} = (current_eps_data - eps_data2) / (2*dx);\")\n else:\n sim.fdtd.eval(\"d_epses{\"+str(i+1)+\"} = (current_eps_data - original_eps_data) / dx;\")\n \n sys.stdout.write('.'), sys.stdout.flush()\n\n sim.fdtd.eval(\"clear(original_eps_data, current_eps_data, dx);\")\n print('')\n if self.use_central_differences:\n sim.fdtd.eval(\"clear(eps_data2);\")\n sim.fdtd.redrawon()\n\n def d_eps_on_cad(self,sim):\n if self.deps_num_threads>1:\n self.d_eps_on_cad_parallel(sim)\n else:\n self.d_eps_on_cad_serial(sim)\n\n", "\"\"\" Copyright chriskeraly\n Copyright (c) 2019 Lumerical Inc. \"\"\"\n\nimport numpy as np\nimport scipy as sp\nimport scipy.optimize as spo\n\nfrom lumopt.optimizers.minimizer import Minimizer\n\nclass ScipyOptimizers(Minimizer):\n \"\"\" Wrapper for the optimizers in SciPy's optimize package: \n\n https://docs.scipy.org/doc/scipy/reference/optimize.html#module-scipy.optimize\n\n Some of the optimization algorithms available in the optimize package ('L-BFGS-G' in particular) can approximate the Hessian from the \n different optimization steps (also called Quasi-Newton Optimization). While this is very powerfull, the figure of merit gradient calculated \n from a simulation using a continuous adjoint method can be noisy. This can point Quasi-Newton methods in the wrong direction, so use them \n with caution.\n\n Parameters\n ----------\n :param max_iter: maximum number of iterations; each iteration can make multiple figure of merit and gradient evaluations.\n :param method: string with the chosen minimization algorithm.\n :param scaling_factor: scalar or a vector of the same length as the optimization parameters; typically used to scale the optimization\n parameters so that they have magnitudes in the range zero to one.\n :param pgtol: projected gradient tolerance paramter 'gtol' (see 'BFGS' or 'L-BFGS-G' documentation).\n :param ftol: tolerance paramter 'ftol' which allows to stop optimization when changes in the FOM are less than this\n :param scale_initial_gradient_to: enforces a rescaling of the gradient to change the optimization parameters by at least this much;\n the default value of zero disables automatic scaling.\n :param: penalty_fun: penalty function to be added to the figure of merit; it must be a function that takes a vector with the\n optimization parameters and returns a single value.\n :param: penalty_jac: gradient of the penalty function; must be a function that takes a vector with the optimization parameters\n and returns a vector of the same length.\n \"\"\"\n\n def __init__(self, max_iter, method = 'L-BFGS-B', scaling_factor = 1.0, pgtol = 1.0e-5, ftol = 1.0e-12, scale_initial_gradient_to = 0, penalty_fun = None, penalty_jac = None):\n super(ScipyOptimizers,self).__init__(max_iter = max_iter,\n scaling_factor = scaling_factor,\n scale_initial_gradient_to = scale_initial_gradient_to,\n penalty_fun = penalty_fun,\n penalty_jac = penalty_jac)\n self.method = str(method)\n self.pgtol = float(pgtol)\n self.ftol=float(ftol)\n \n def run(self):\n print('Running scipy optimizer')\n print('bounds = {}'.format(self.bounds))\n print('start = {}'.format(self.start_point))\n res = spo.minimize(fun = self.callable_fom,\n x0 = self.start_point,\n jac = self.callable_jac,\n bounds = self.bounds,\n callback = self.callback,\n options = {'maxiter':self.max_iter, 'disp':True, 'gtol':self.pgtol,'ftol':self.ftol},\n method = self.method)\n res.x /= self.scaling_factor\n res.fun = -res.fun\n if hasattr(res, 'jac'):\n res.jac = -res.jac*self.scaling_factor\n print('Number of FOM evaluations: {}'.format(res.nit))\n print('FINAL FOM = {}'.format(res.fun))\n print('FINAL PARAMETERS = {}'.format(res.x))\n return res\n\n def concurrent_adjoint_solves(self):\n return self.method in ['L-BFGS-B','BFGS']\n", "#Function that performs PSR Bitaper Neff - Waveguide Width Sweep\n\n#General Purpose Libaries\ntry:\n import matplotlib.pyplot as plt\nexcept:\n import pip\n pip.main(['install', 'matplotlib'])\n import matplotlib.pyplot as plt\nimport numpy as np\nimport os\nimport sys\nimport platform \n\n#Import LUMAPI\nfrom lumerical_lumapi import lumapi\n\n#Import libraries for sweep and material setup\nfrom neff_taper_width_sweep_setup import width_sweep_setup, material_setup\n\n#Output Modes\nmodes=3\n\n#Sweep range of widths (ridge waveguide)\nwidth_ridge_list=np.linspace(0.4,0.9,100)*1e-6 \n\n#Sweep range of widths (slab waveguide)\nwidth_slab_list=np.linspace(0.4,1.9,100)*1e-6\n\n#Class that performs width sweep \nclass width_sweep:\n \n @staticmethod\n def main():\n\n with lumapi.MODE(hide = False) as mode:\n \n #Adding materials, drawing photonic components and simulation recipe setup\n material = material_setup.add_material(mode)\n draw_wg = width_sweep_setup.wg_2D_draw(mode)\n sweep = width_sweep_setup.wg_2D_func(mode)\n \n mode.set(\"number of trial modes\",modes+1);\n neff = []\n TE00 = []\n TM00 = []\n TE01 = []\n\n #Finding the modes for each specified waveguide width\n for i in range (0,len(width_ridge_list)):\n mode.switchtolayout()\n mode.setnamed(\"waveguide\",\"y span\", width_ridge_list[i])\n mode.setnamed(\"mesh1\",\"y span\", width_ridge_list[i])\n mode.setnamed(\"slab\",\"y span\", width_slab_list[i])\n mode.setnamed(\"mesh2\",\"y span\", width_slab_list[i])\n n = mode.findmodes()\n mode.save(\"bitaper_mode_calculations\")\n \n #For each mode, simulate/extract the effective index for corresponding width\n for m in range(1,4):\n if m == 1:\n data = abs(mode.getdata(\"FDE::data::mode\"+str(m),\"neff\"))\n data = data[0][0]\n TE00.append(data)\n mode.selectmode(\"mode1\")\n #mode.setanalysis(\"track selected mode\",1);\n #mode.setanalysis(\"detailed dispersion calculation\",1);\n #mode.frequencysweep()\n #loss_data = mode.getdata(\"frequencysweep\",\"loss\")\n \n elif m == 2:\n data = abs(mode.getdata(\"FDE::data::mode\"+str(m),\"neff\"))\n data = data[0][0]\n TM00.append(data)\n elif m == 3:\n data = abs(mode.getdata(\"FDE::data::mode\"+str(m),\"neff\"))\n data = data[0][0]\n TE01.append(data)\n\n #Append to arrays for data visualization\n neff.append(TE00)\n neff.append(TM00)\n neff.append(TE01)\n \n neff_plot = plt.plot(width_ridge_list, TE00, label = \"TE00\")\n neff_plot = plt.plot(width_ridge_list, TM00, label = \"TM00\")\n neff_plot = plt.plot(width_ridge_list, TE01, label = \"TE01\")\n neff_plot = plt.title('Neff vs Waveguide Width')\n neff_plot = plt.xlabel('Width (10e-7 m)')\n neff_plot = plt.ylabel(\"Neff\") \n neff_plot = plt.legend()\n neff_plot = plt.show()\n \n #Find starting width: Find the width that is closest to the neff cutoff of the fundamental mode (1.465)\n width_begin = 0\n for x, y in zip(width_ridge_list, TE01):\n if x < 5e-07 and x > 4e-07:\n if y<1.467 and y >1.463:\n width_begin = x\n \n #Find hybrid point to determine hybrid region \n hybrid_point = 0\n max_differ = sys.maxsize\n for x, y, z in zip(width_ridge_list, TE01, TM00):\n if z - y < max_differ:\n max_differ = z - y\n hybrid_point = x\n \n #Find middle width: Scans a range between (+-50nm) of the hybrid region to find the point that has the most gentle slope\n maxslope = 1\n difference = 1\n width_middle = 0\n for x, y in zip(width_ridge_list, TE01):\n if x < hybrid_point + 50e-9 and x> hybrid_point - 50e-9:\n if y - difference <maxslope:\n maxslope = y - difference\n width_middle = x\n difference = y \n \n #Find end width: find largest discrepancy between TM00, TE01 \n #Ensures most efficient mode conversion\n width_end = 0\n max_diff = 0\n for x, y, z in zip(width_ridge_list, TE01, TM00):\n if x < 9e-07 and x> 6.5e-07:\n if z - y > max_diff:\n max_diff = z - y\n width_end = x\n \n #Returns widths as an array\n widths = [width_begin, width_middle, width_end]\n mode.save(\"bitaper_mode_calculations\")\n return widths\n \n#plot = width_sweep.main()\n\n\n" ]
[ [ "numpy.array" ], [ "numpy.concatenate", "numpy.array" ], [ "scipy.optimize.minimize" ], [ "matplotlib.pyplot.legend", "matplotlib.pyplot.title", "numpy.linspace", "matplotlib.pyplot.plot", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.show", "matplotlib.pyplot.ylabel" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "0.16", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "1.3", "1.8" ], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
szhaofelicia/sgan
[ "ead42d4bb3b1278c4c9ffcae8fa9c2dc036a52ff", "ead42d4bb3b1278c4c9ffcae8fa9c2dc036a52ff" ]
[ "vis/visualize_court.py", "sgan/data/trajectories_basketball_0427.py" ]
[ "import numpy as np\n# import plotly\nimport plotly.graph_objects as go\n\n\n\ndef draw_plotly_half_court(fig, fig_width=600, margins=10):\n # From: https://community.plot.ly/t/arc-shape-with-path/7205/5\n def ellipse_arc(x_center=0.0, y_center=0.0, a=10.5, b=10.5, start_angle=0.0, end_angle=2 * np.pi, N=200, closed=False):\n t = np.linspace(start_angle, end_angle, N)\n x = x_center + a * np.cos(t)\n y = y_center + b * np.sin(t)\n path = f'M {x[0]}, {y[0]}'\n for k in range(1, len(t)):\n path += f'L{x[k]}, {y[k]}'\n if closed:\n path += ' Z'\n return path\n\n fig_height = fig_width * (470 + 2 * margins) / (500 + 2 * margins)\n fig.update_layout(width=fig_width, height=fig_height)\n\n # Set axes ranges\n fig.update_xaxes(range=[-250 - margins, 250 + margins])\n fig.update_yaxes(range=[-52.5 - margins, 417.5 + margins])\n\n threept_break_y = 89.47765084\n three_line_col = \"#777777\"\n main_line_col = \"#777777\"\n\n fig.update_layout(\n # Line Horizontal\n margin=dict(l=20, r=20, t=20, b=20),\n paper_bgcolor=\"white\",\n plot_bgcolor=\"white\",\n yaxis=dict(\n scaleanchor=\"x\",\n scaleratio=1,\n showgrid=False,\n zeroline=False,\n showline=False,\n ticks='',\n showticklabels=False,\n fixedrange=True,\n ),\n xaxis=dict(\n showgrid=False,\n zeroline=False,\n showline=False,\n ticks='',\n showticklabels=False,\n fixedrange=True,\n ),\n\n shapes=[\n # half_layout=[\n dict(\n type=\"rect\", x0=-250, y0=-52.5, x1=250, y1=417.5,\n line=dict(color=main_line_col, width=1),\n # fillcolor='#333333',\n layer='below'\n ), ## sideline rect\n dict(\n type=\"rect\", x0=-80, y0=-52.5, x1=80, y1=137.5,\n line=dict(color=main_line_col, width=1),\n # fillcolor='#333333',\n layer='below'\n ),# lane line rect\n dict(\n type=\"rect\", x0=-60, y0=-52.5, x1=60, y1=137.5,\n line=dict(color=main_line_col, width=1),\n # fillcolor='#333333',\n layer='below'\n ), # foul line rect\n dict(\n type=\"circle\", x0=-60, y0=77.5, x1=60, y1=197.5, xref=\"x\", yref=\"y\",\n line=dict(color=main_line_col, width=1),\n # fillcolor='#dddddd',\n layer='below'\n ), # free-throw circle\n dict(\n type=\"line\", x0=-60, y0=137.5, x1=60, y1=137.5,\n line=dict(color=main_line_col, width=1),\n layer='below'\n ), # foul line\n\n dict(\n type=\"rect\", x0=-2, y0=-7.25, x1=2, y1=-12.5,\n line=dict(color=\"#ec7607\", width=1),\n fillcolor='#ec7607',\n ), # hoop rect\n dict(\n type=\"circle\", x0=-7.5, y0=-7.5, x1=7.5, y1=7.5, xref=\"x\", yref=\"y\",\n line=dict(color=\"#ec7607\", width=1),\n ), # hoop circle\n dict(\n type=\"line\", x0=-30, y0=-12.5, x1=30, y1=-12.5,\n line=dict(color=\"#ec7607\", width=1),\n ), # backboard\n\n dict(type=\"path\",\n path=ellipse_arc(a=40, b=40, start_angle=0, end_angle=np.pi),\n line=dict(color=main_line_col, width=1), layer='below'), # no-change semi-circle\n dict(type=\"path\",\n path=ellipse_arc(a=237.5, b=237.5, start_angle=0.386283101, end_angle=np.pi - 0.386283101),\n line=dict(color=main_line_col, width=1), layer='below'), # three-point line:arc\n dict(\n type=\"line\", x0=-220, y0=-52.5, x1=-220, y1=threept_break_y,\n line=dict(color=three_line_col, width=1), layer='below'\n ), # three-point line:left edge\n # dict(\n # type=\"line\", x0=-220, y0=-52.5, x1=-220, y1=threept_break_y,\n # line=dict(color=three_line_col, width=1), layer='below'\n # ),\n dict(\n type=\"line\", x0=220, y0=-52.5, x1=220, y1=threept_break_y,\n line=dict(color=three_line_col, width=1), layer='below'\n ), # three-point line:right edge\n\n dict(\n type=\"line\", x0=-250, y0=227.5, x1=-220, y1=227.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # midcourt area marker:left\n dict(\n type=\"line\", x0=250, y0=227.5, x1=220, y1=227.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # midcourt area marker:right\n dict(\n type=\"line\", x0=-90, y0=17.5, x1=-80, y1=17.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=-90, y0=27.5, x1=-80, y1=27.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=-90, y0=57.5, x1=-80, y1=57.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=-90, y0=87.5, x1=-80, y1=87.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=90, y0=17.5, x1=80, y1=17.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=90, y0=27.5, x1=80, y1=27.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=90, y0=57.5, x1=80, y1=57.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=90, y0=87.5, x1=80, y1=87.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n\n dict(type=\"path\",\n path=ellipse_arc(y_center=417.5, a=60, b=60, start_angle=-0, end_angle=-np.pi),\n line=dict(color=main_line_col, width=1), layer='below'), # center circle: half\n\n ]\n )\n return True\n\ndef draw_plotly_whole_court(fig, fig_width=600, margins=10):\n # From: https://community.plot.ly/t/arc-shape-with-path/7205/5\n def ellipse_arc(x_center=0.0, y_center=0.0, a=10.5, b=10.5, start_angle=0.0, end_angle=2 * np.pi, N=200, closed=False):\n t = np.linspace(start_angle, end_angle, N)\n x = x_center + a * np.cos(t)\n y = y_center + b * np.sin(t)\n path = f'M {x[0]}, {y[0]}'\n for k in range(1, len(t)):\n path += f'L{x[k]}, {y[k]}'\n if closed:\n path += ' Z'\n return path\n\n fig_height = fig_width * (470*2 + 2 * margins) / (500 + 2 * margins)\n fig.update_layout(width=fig_width, height=fig_height)\n\n # Set axes ranges\n fig.update_xaxes(range=[-250 - margins, 250 + margins])\n fig.update_yaxes(range=[-52.5 - margins, 417.5+470 + margins])\n\n # fig.update_xaxes(range=[ margins, 500 + margins])\n # fig.update_yaxes(range=[margins, 470*2 + margins])\n\n threept_break_y = 89.47765084\n three_line_col = \"#777777\"\n main_line_col = \"#777777\"\n\n fig.update_layout(\n # Line Horizontal\n margin=dict(l=20, r=20, t=20, b=20),\n paper_bgcolor=\"white\",\n plot_bgcolor=\"white\",\n yaxis=dict(\n scaleanchor=\"x\",\n scaleratio=1,\n showgrid=False,\n zeroline=False,\n showline=False,\n ticks='',\n showticklabels=False,\n fixedrange=True,\n ),\n xaxis=dict(\n showgrid=False,\n zeroline=False,\n showline=False,\n ticks='',\n showticklabels=False,\n fixedrange=True,\n ),\n\n # width:500, height: 470\n shapes=[\n dict(\n type=\"rect\", x0=-250, y0=-52.5, x1=250, y1=417.5+470,\n line=dict(color=main_line_col, width=1),\n # fillcolor='#333333',\n layer='below'\n ), ## sideline rect\n # dict(\n # type=\"rect\", x0=-250, y0=-52.5, x1=250, y1=417.5,\n # line=dict(color=main_line_col, width=1),\n # # fillcolor='#333333',\n # layer='below'\n # ), ## sideline rect\n dict(\n type=\"rect\", x0=-80, y0=-52.5, x1=80, y1=137.5,\n line=dict(color=main_line_col, width=1),\n # fillcolor='#333333',\n layer='below'\n ),# lane line rect\n dict(\n type=\"rect\", x0=-60, y0=-52.5, x1=60, y1=137.5,\n line=dict(color=main_line_col, width=1),\n # fillcolor='#333333',\n layer='below'\n ), # foul line rect\n dict(\n type=\"circle\", x0=-60, y0=77.5, x1=60, y1=197.5, xref=\"x\", yref=\"y\",\n line=dict(color=main_line_col, width=1),\n # fillcolor='#dddddd',\n layer='below'\n ), # free-throw circle\n dict(\n type=\"line\", x0=-60, y0=137.5, x1=60, y1=137.5,\n line=dict(color=main_line_col, width=1),\n layer='below'\n ), # foul line\n\n dict(\n type=\"rect\", x0=-2, y0=-7.25, x1=2, y1=-12.5,\n line=dict(color=\"#ec7607\", width=1),\n fillcolor='#ec7607',\n ), # hoop rect\n dict(\n type=\"circle\", x0=-7.5, y0=-7.5, x1=7.5, y1=7.5, xref=\"x\", yref=\"y\",\n line=dict(color=\"#ec7607\", width=1),\n ), # hoop circle\n dict(\n type=\"line\", x0=-30, y0=-12.5, x1=30, y1=-12.5,\n line=dict(color=\"#ec7607\", width=1),\n ), # backboard\n\n dict(type=\"path\",\n path=ellipse_arc(a=40, b=40, start_angle=0, end_angle=np.pi),\n line=dict(color=main_line_col, width=1), layer='below'), # no-change semi-circle\n dict(type=\"path\",\n path=ellipse_arc(a=237.5, b=237.5, start_angle=0.386283101, end_angle=np.pi - 0.386283101),\n line=dict(color=main_line_col, width=1), layer='below'), # three-point line:arc\n dict(\n type=\"line\", x0=-220, y0=-52.5, x1=-220, y1=threept_break_y,\n line=dict(color=three_line_col, width=1), layer='below'\n ), # three-point line:left edge\n # dict(\n # type=\"line\", x0=-220, y0=-52.5, x1=-220, y1=threept_break_y,\n # line=dict(color=three_line_col, width=1), layer='below'\n # ),\n dict(\n type=\"line\", x0=220, y0=-52.5, x1=220, y1=threept_break_y,\n line=dict(color=three_line_col, width=1), layer='below'\n ), # three-point line:right edge\n\n dict(\n type=\"line\", x0=-250, y0=227.5, x1=-220, y1=227.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # midcourt area marker:left\n dict(\n type=\"line\", x0=250, y0=227.5, x1=220, y1=227.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # midcourt area marker:right\n dict(\n type=\"line\", x0=-90, y0=17.5, x1=-80, y1=17.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=-90, y0=27.5, x1=-80, y1=27.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=-90, y0=57.5, x1=-80, y1=57.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=-90, y0=87.5, x1=-80, y1=87.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=90, y0=17.5, x1=80, y1=17.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=90, y0=27.5, x1=80, y1=27.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=90, y0=57.5, x1=80, y1=57.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n dict(\n type=\"line\", x0=90, y0=87.5, x1=80, y1=87.5,\n line=dict(color=main_line_col, width=1), layer='below'\n ), # lane line marker\n\n dict(type=\"path\",\n path=ellipse_arc(y_center=417.5, a=60, b=60, start_angle=-0, end_angle=-np.pi),\n line=dict(color=main_line_col, width=1), layer='below'), # center circle: half\n\n\n ## upper\n # dict(\n # type=\"rect\", x0=-250, y0=-52.5, x1=250, y1=417.5,\n # line=dict(color=main_line_col, width=1),\n # # fillcolor='#333333',\n # layer='below'\n # ), ## sideline rect\n # dict(\n # type=\"rect\", x0=-80, y0=-52.5, x1=80, y1=137.5,\n # line=dict(color=main_line_col, width=1),\n # # fillcolor='#333333',\n # layer='below'\n # ), # lane line rect\n # dict(\n # type=\"rect\", x0=-60, y0=-52.5, x1=60, y1=137.5,\n # line=dict(color=main_line_col, width=1),\n # # fillcolor='#333333',\n # layer='below'\n # ), # foul line rect\n # dict(\n # type=\"circle\", x0=-60, y0=77.5, x1=60, y1=197.5, xref=\"x\", yref=\"y\",\n # line=dict(color=main_line_col, width=1),\n # # fillcolor='#dddddd',\n # layer='below'\n # ), # free-throw circle\n # dict(\n # type=\"line\", x0=-60, y0=137.5, x1=60, y1=137.5,\n # line=dict(color=main_line_col, width=1),\n # layer='below'\n # ), # foul line\n #\n # dict(\n # type=\"rect\", x0=-2, y0=-7.25, x1=2, y1=-12.5,\n # line=dict(color=\"#ec7607\", width=1),\n # fillcolor='#ec7607',\n # ), # hoop rect\n # dict(\n # type=\"circle\", x0=-7.5, y0=-7.5, x1=7.5, y1=7.5, xref=\"x\", yref=\"y\",\n # line=dict(color=\"#ec7607\", width=1),\n # ), # hoop circle\n # dict(\n # type=\"line\", x0=-30, y0=-12.5, x1=30, y1=-12.5,\n # line=dict(color=\"#ec7607\", width=1),\n # ), # backboard\n #\n # dict(type=\"path\",\n # path=ellipse_arc(a=40, b=40, start_angle=0, end_angle=np.pi),\n # line=dict(color=main_line_col, width=1), layer='below'), # no-change semi-circle\n # dict(type=\"path\",\n # path=ellipse_arc(a=237.5, b=237.5, start_angle=0.386283101, end_angle=np.pi - 0.386283101),\n # line=dict(color=main_line_col, width=1), layer='below'), # three-point line:arc\n # dict(\n # type=\"line\", x0=-220, y0=-52.5, x1=-220, y1=threept_break_y,\n # line=dict(color=three_line_col, width=1), layer='below'\n # ), # three-point line:left edge\n # # dict(\n # # type=\"line\", x0=-220, y0=-52.5, x1=-220, y1=threept_break_y,\n # # line=dict(color=three_line_col, width=1), layer='below'\n # # ),\n # dict(\n # type=\"line\", x0=220, y0=-52.5, x1=220, y1=threept_break_y,\n # line=dict(color=three_line_col, width=1), layer='below'\n # ), # three-point line:right edge\n #\n # dict(\n # type=\"line\", x0=-250, y0=227.5, x1=-220, y1=227.5,\n # line=dict(color=main_line_col, width=1), layer='below'\n # ), # midcourt area marker:left\n # dict(\n # type=\"line\", x0=250, y0=227.5, x1=220, y1=227.5,\n # line=dict(color=main_line_col, width=1), layer='below'\n # ), # midcourt area marker:right\n # dict(\n # type=\"line\", x0=-90, y0=17.5, x1=-80, y1=17.5,\n # line=dict(color=main_line_col, width=1), layer='below'\n # ), # lane line marker\n # dict(\n # type=\"line\", x0=-90, y0=27.5, x1=-80, y1=27.5,\n # line=dict(color=main_line_col, width=1), layer='below'\n # ), # lane line marker\n # dict(\n # type=\"line\", x0=-90, y0=57.5, x1=-80, y1=57.5,\n # line=dict(color=main_line_col, width=1), layer='below'\n # ), # lane line marker\n # dict(\n # type=\"line\", x0=-90, y0=87.5, x1=-80, y1=87.5,\n # line=dict(color=main_line_col, width=1), layer='below'\n # ), # lane line marker\n # dict(\n # type=\"line\", x0=90, y0=17.5, x1=80, y1=17.5,\n # line=dict(color=main_line_col, width=1), layer='below'\n # ), # lane line marker\n # dict(\n # type=\"line\", x0=90, y0=27.5, x1=80, y1=27.5,\n # line=dict(color=main_line_col, width=1), layer='below'\n # ), # lane line marker\n # dict(\n # type=\"line\", x0=90, y0=57.5, x1=80, y1=57.5,\n # line=dict(color=main_line_col, width=1), layer='below'\n # ), # lane line marker\n # dict(\n # type=\"line\", x0=90, y0=87.5, x1=80, y1=87.5,\n # line=dict(color=main_line_col, width=1), layer='below'\n # ), # lane line marker\n #\n # dict(type=\"path\",\n # path=ellipse_arc(y_center=417.5, a=60, b=60, start_angle=-0, end_angle=-np.pi),\n # line=dict(color=main_line_col, width=1), layer='below'), # center circle: half\n\n ]\n )\n return True\n\n\n\n\n\n\nmax_freq = 0.002\n# freq_by_hex = np.array([min(max_freq, i) for i in league_hexbin_stats['freq_by_hex']])\ncolorscale = 'YlOrRd'\nmarker_cmin = 0.1\nmarker_cmax = 0.6\nticktexts = [str(marker_cmin*100)+'%-', \"\", str(marker_cmax*100)+'%+']\n\nfig = go.Figure()\n# draw_plotly_half_court(fig)\ndraw_plotly_whole_court(fig)\n\n# fig.add_trace(go.Scatter(\n# x=xlocs, y=ylocs, mode='markers', name='markers',\n# marker=dict(\n# size=freq_by_hex, sizemode='area', sizeref=2. * max(freq_by_hex) / (11. ** 2), sizemin=2.5,\n# color=accs_by_hex, colorscale=colorscale,\n# colorbar=dict(\n# thickness=15,\n# x=0.84,\n# y=0.87,\n# yanchor='middle',\n# len=0.2,\n# title=dict(\n# text=\"<B>Accuracy</B>\",\n# font=dict(\n# size=11,\n# color='#4d4d4d'\n# ),\n# ),\n# tickvals=[marker_cmin, (marker_cmin + marker_cmax) / 2, marker_cmax],\n# ticktext=ticktexts,\n# tickfont=dict(\n# size=11,\n# color='#4d4d4d'\n# )\n# ),\n# cmin=marker_cmin, cmax=marker_cmax,\n# line=dict(width=1, color='#333333'), symbol='hexagon',\n# ),\n# ))\n# fig.show(config=dict(displayModeBar=False))\n\n# fig.show()\n\nvis_dir='/media/felicia/Data/sgan_results/vis/'\nfig.write_image(vis_dir+\"court.svg\")\n", "import logging\nimport os\nimport math\nfrom tqdm import tqdm\n\nimport numpy as np\n\nimport torch\nfrom torch.utils.data import Dataset\n\nlogger = logging.getLogger(__name__)\n\n\ndef isfloat(value):\n try:\n float(value)\n return True\n except ValueError:\n return False\n\n\ndef seq_collate(data):\n (obs_seq_list, pred_seq_list, obs_seq_rel_list, pred_seq_rel_list,\n obs_team_vec_list, obs_pos_vec_list, pred_team_vec_list, pred_pos_vec_list,\n non_linear_ped_list, loss_mask_list) = zip(*data)\n\n _len = [len(seq) for seq in obs_seq_list]\n cum_start_idx = [0] + np.cumsum(_len).tolist()\n seq_start_end = [[start, end]\n for start, end in zip(cum_start_idx, cum_start_idx[1:])]\n\n # Data format: batch, input_size, seq_len\n # LSTM input format: seq_len, batch, input_size\n obs_traj = torch.cat(obs_seq_list, dim=0).permute(2, 0, 1)\n pred_traj = torch.cat(pred_seq_list, dim=0).permute(2, 0, 1)\n obs_traj_rel = torch.cat(obs_seq_rel_list, dim=0).permute(2, 0, 1)\n pred_traj_rel = torch.cat(pred_seq_rel_list, dim=0).permute(2, 0, 1)\n\n obs_team_vec = torch.cat(obs_team_vec_list, dim=0).permute(2, 0, 1)\n obs_pos_vec = torch.cat(obs_pos_vec_list, dim=0).permute(2, 0, 1)\n pred_team_vec = torch.cat(pred_team_vec_list, dim=0).permute(2, 0, 1)\n pred_pos_vec = torch.cat(pred_pos_vec_list, dim=0).permute(2, 0, 1)\n\n non_linear_ped = torch.cat(non_linear_ped_list)\n loss_mask = torch.cat(loss_mask_list, dim=0)\n seq_start_end = torch.LongTensor(seq_start_end)\n out = [\n obs_traj, pred_traj, obs_traj_rel, pred_traj_rel,\n obs_team_vec, obs_pos_vec, pred_team_vec, pred_pos_vec,\n non_linear_ped, loss_mask, seq_start_end\n ]\n\n return tuple(out)\n\n\ndef read_file(_path, delim='\\t'):\n lines = []\n if delim == 'tab':\n delim = '\\t'\n elif delim == 'space':\n delim = ' '\n with open(_path, 'r') as f:\n next(f)\n for line in f:\n line = line.strip().split(delim)\n line = [float(i) if isfloat(i) else i for i in line]\n lines.append(line)\n return lines\n\n\ndef parse_file(_path, delim='\\t'):\n data = []\n if delim == 'tab':\n delim = '\\t'\n elif delim == 'space':\n delim = ' '\n lines = read_file(_path, delim)\n team_ids = np.unique([int(line[2]) for line in lines if isfloat(line[2])]).tolist()\n posi_ids = [\"C\", \"F\", \"G\", \"ball\"]\n\n for line in lines:\n row = []\n team_vector = [0.0] * 3 # 0 1 ball\n pos_vector = [0.0] * 4 # 0 1 2 ball\n for col, value in enumerate(line):\n if col == 2: # team_id\n if value == \"ball\":\n team_vector[2] = 1.0\n else:\n team = team_ids.index(int(value))\n team_vector[team] = 1.0\n elif col == 3: # player_id\n if value == \"ball\":\n row.append(-1.0)\n else:\n row.append(value) # float\n elif col == 6: # player_position\n positions = value.strip('\"').split(\",\")\n for pos in positions:\n pos_vector[posi_ids.index(pos)] = 1.0\n else:\n row.append(value) # float\n row += team_vector # team_id\n row += pos_vector # player_position\n\n data.append(row)\n return np.asarray(data)\n\n\ndef poly_fit(traj, traj_len, threshold):\n \"\"\"\n Input:\n - traj: Numpy array of shape (2, traj_len)\n - traj_len: Len of trajectory\n - threshold: Minimum error to be considered for non linear traj\n Output:\n - int: 1 -> Non Linear 0-> Linear\n \"\"\"\n t = np.linspace(0, traj_len - 1, traj_len)\n res_x = np.polyfit(t, traj[0, -traj_len:], 2, full=True)[1]\n res_y = np.polyfit(t, traj[1, -traj_len:], 2, full=True)[1]\n if res_x + res_y >= threshold:\n return 1.0\n else:\n return 0.0\n\n\nclass TrajectoryDataset(Dataset):\n \"\"\"Dataloder for the Trajectory datasets\"\"\"\n\n def __init__(\n self, data_dir, obs_len=8, pred_len=12, skip=1, threshold=0.002,\n min_ped=1, delim='\\t', metric=\"meter\"\n ):\n \"\"\"\n Args:\n - data_dir: Directory containing dataset files in the format\n <frame_id> <ped_id> <x> <y>\n - obs_len: Number of time-steps in input trajectories\n - pred_len: Number of time-steps in output trajectories\n - skip: Number of frames to skip while making the dataset\n - threshold: Minimum error to be considered for non linear traj\n when using a linear predictor\n - min_ped: Minimum number of pedestrians that should be in a seqeunce\n - delim: Delimiter in the dataset files\n\n columns in csv file:\n (idx), frame_id,team_id,player_id,pos_x, pos_y, player_position\n ->\n data:\n idx, frame_id,player_id,pos_x, pos_y, team_vector,position_vector\n\n \"\"\"\n super(TrajectoryDataset, self).__init__()\n\n self.data_dir = data_dir\n self.obs_len = obs_len\n self.pred_len = pred_len\n self.skip = skip\n self.seq_len = self.obs_len + self.pred_len\n self.delim = delim\n\n if metric==\"meter\":\n self.factor=0.3048 # foot to meter\n else:\n self.factor=1.0 # foot to foot\n\n all_files = os.listdir(self.data_dir)\n all_files = [os.path.join(self.data_dir, _path) for _path in all_files]\n num_peds_in_seq = []\n seq_list = []\n seq_list_rel = []\n loss_mask_list = []\n non_linear_ped = []\n team_vec_list = []\n pos_vec_list = []\n\n for path in tqdm(all_files):\n data = parse_file(path, delim)\n\n frames = np.unique(data[:, 0]).tolist()\n frame_data = []\n for frame in frames:\n frame_data.append(data[frame == data[:, 1], :]) # frame_id\n num_sequences = int(\n math.ceil((len(frames) - self.seq_len + 1) / skip))\n\n for idx in range(0, num_sequences * self.skip + 1, skip):\n curr_seq_data = np.concatenate(\n frame_data[idx:idx + self.seq_len], axis=0)\n peds_in_curr_seq = np.unique(curr_seq_data[:, 2]) # player_id\n curr_seq_rel = np.zeros((len(peds_in_curr_seq), 2,\n self.seq_len))\n curr_seq = np.zeros((len(peds_in_curr_seq), 2, self.seq_len))\n curr_loss_mask = np.zeros((len(peds_in_curr_seq),\n self.seq_len))\n # vectors\n curr_team = np.zeros((len(peds_in_curr_seq), 3, self.seq_len)) # 0 1 ball\n curr_position = np.zeros((len(peds_in_curr_seq), 4, self.seq_len)) # C F G ball\n\n num_peds_considered = 0\n _non_linear_ped = []\n\n for _, ped_id in enumerate(peds_in_curr_seq):\n curr_ped_seq_full = curr_seq_data[curr_seq_data[:, 2] == ped_id, :] # player_id\n curr_ped_seq_full = np.around(curr_ped_seq_full, decimals=4)\n pad_front = frames.index(curr_ped_seq_full[0, 1]) - idx # frame_id\n pad_end = frames.index(curr_ped_seq_full[-1, 1]) - idx + 1 # frame_id\n if pad_end - pad_front != self.seq_len or curr_ped_seq_full.shape[0] != self.seq_len:\n continue\n curr_ped_seq = np.transpose(curr_ped_seq_full[:, 3:5]) # x,y\n curr_ped_seq = curr_ped_seq * self.factor # conversion\n # Make coordinates relative\n rel_curr_ped_seq = np.zeros(curr_ped_seq.shape)\n rel_curr_ped_seq[:, 1:] = curr_ped_seq[:, 1:] - curr_ped_seq[:, :-1]\n _idx = num_peds_considered\n\n curr_seq[_idx, :, pad_front:pad_end] = curr_ped_seq\n curr_seq_rel[_idx, :, pad_front:pad_end] = rel_curr_ped_seq\n # Linear vs Non-Linear Trajectory\n _non_linear_ped.append(\n poly_fit(curr_ped_seq, pred_len, threshold))\n curr_loss_mask[_idx, pad_front:pad_end] = 1\n\n # Team vector\n curr_ped_team = np.transpose(curr_ped_seq_full[:, 5:8]) # [ 0 1 ball]\n curr_team[_idx, :, pad_front:pad_end] = curr_ped_team\n\n # Position Vector\n curr_ped_pos = np.transpose(curr_ped_seq_full[:, 8:]) # [ C F G ball]\n curr_position[_idx, :, pad_front:pad_end] = curr_ped_pos\n\n num_peds_considered += 1\n\n if num_peds_considered > min_ped:\n non_linear_ped += _non_linear_ped\n num_peds_in_seq.append(num_peds_considered)\n loss_mask_list.append(curr_loss_mask[:num_peds_considered])\n seq_list.append(curr_seq[:num_peds_considered])\n seq_list_rel.append(curr_seq_rel[:num_peds_considered])\n team_vec_list.append(curr_team[:num_peds_considered]) # team vector\n pos_vec_list.append(curr_position[:num_peds_considered]) # pos_vec_list\n\n self.num_seq = len(seq_list)\n seq_list = np.concatenate(seq_list, axis=0)\n seq_list_rel = np.concatenate(seq_list_rel, axis=0)\n\n team_vec_list = np.concatenate(team_vec_list, axis=0)\n pos_vec_list = np.concatenate(pos_vec_list, axis=0)\n\n loss_mask_list = np.concatenate(loss_mask_list, axis=0)\n non_linear_ped = np.asarray(non_linear_ped)\n\n # Convert numpy -> Torch Tensor\n self.obs_traj = torch.from_numpy(\n seq_list[:, :, :self.obs_len]).type(torch.float)\n self.pred_traj = torch.from_numpy(\n seq_list[:, :, self.obs_len:]).type(torch.float)\n self.obs_traj_rel = torch.from_numpy(\n seq_list_rel[:, :, :self.obs_len]).type(torch.float)\n self.pred_traj_rel = torch.from_numpy(\n seq_list_rel[:, :, self.obs_len:]).type(torch.float)\n\n self.obs_team_vec = torch.from_numpy(\n team_vec_list[:, :, :self.obs_len]).type(torch.float)\n self.obs_pos_vec = torch.from_numpy(\n pos_vec_list[:, :, :self.obs_len]).type(torch.float)\n\n self.obs_team_vec_pred = torch.from_numpy(\n team_vec_list[:, :, self.obs_len:]).type(torch.float)\n self.obs_pos_vec_pred = torch.from_numpy(\n pos_vec_list[:, :, self.obs_len:]).type(torch.float)\n\n self.loss_mask = torch.from_numpy(loss_mask_list).type(torch.float)\n self.non_linear_ped = torch.from_numpy(non_linear_ped).type(torch.float)\n cum_start_idx = [0] + np.cumsum(num_peds_in_seq).tolist()\n self.seq_start_end = [\n (start, end)\n for start, end in zip(cum_start_idx, cum_start_idx[1:])\n ]\n\n\n def __len__(self):\n return self.num_seq\n\n def __getitem__(self, index):\n start, end = self.seq_start_end[index]\n out = [\n self.obs_traj[start:end, :], self.pred_traj[start:end, :],\n self.obs_traj_rel[start:end, :], self.pred_traj_rel[start:end, :],\n self.obs_team_vec[start:end, :], self.obs_pos_vec[start:end, :],\n self.obs_team_vec_pred[start: end, :], self.obs_pos_vec_pred[start: end, :],\n self.non_linear_ped[start:end], self.loss_mask[start:end, :]\n ]\n return out\n" ]
[ [ "numpy.cos", "numpy.linspace", "numpy.sin" ], [ "torch.LongTensor", "numpy.polyfit", "numpy.linspace", "torch.cat", "numpy.asarray", "numpy.unique", "numpy.around", "numpy.cumsum", "torch.from_numpy", "numpy.concatenate", "numpy.transpose", "numpy.zeros" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
ChristophReich1996/Mode_Collapse
[ "937ee8bf96510fbf4070fc7e14b78276ab036b8c" ]
[ "utils.py" ]
[ "from typing import Optional\n\nimport torch\nimport torch.nn as nn\nfrom torch.nn.utils import spectral_norm\nimport numpy as np\n\n\ndef get_generator(latent_size: int, use_spectral_norm: bool) -> nn.Module:\n \"\"\"\n Returns the generator network.\n :param latent_size: (int) Size of the latent input vector\n :param use_spectral_norm: (bool) If true spectral norm is utilized\n :return: (nn.Module) Simple feed forward neural network with three layers,\n \"\"\"\n if use_spectral_norm:\n return nn.Sequential(spectral_norm(nn.Linear(latent_size, 256, bias=True)),\n nn.LeakyReLU(),\n spectral_norm(nn.Linear(256, 256, bias=True)),\n nn.LeakyReLU(),\n spectral_norm(nn.Linear(256, 256, bias=True)),\n nn.LeakyReLU(),\n spectral_norm(nn.Linear(256, 256, bias=True)),\n nn.Tanh(),\n spectral_norm(nn.Linear(256, 2, bias=True)))\n return nn.Sequential(nn.Linear(latent_size, 256, bias=True),\n nn.LeakyReLU(),\n nn.Linear(256, 256, bias=True),\n nn.LeakyReLU(),\n nn.Linear(256, 256, bias=True),\n nn.LeakyReLU(),\n nn.Linear(256, 256, bias=True),\n nn.Tanh(),\n nn.Linear(256, 2, bias=True))\n\n\ndef get_discriminator(use_spectral_norm: bool) -> nn.Module:\n \"\"\"\n Returns the discriminator network.\n :param use_spectral_norm: (bool) If true spectral norm is utilized\n :return: (nn.Module) Simple feed forward neural network with three layers and probability output.\n \"\"\"\n if use_spectral_norm:\n return nn.Sequential(spectral_norm(nn.Linear(2, 256, bias=True)),\n nn.LeakyReLU(),\n spectral_norm(nn.Linear(256, 256, bias=True)),\n nn.LeakyReLU(),\n spectral_norm(nn.Linear(256, 256, bias=True)),\n nn.LeakyReLU(),\n spectral_norm(nn.Linear(256, 256, bias=True)),\n nn.LeakyReLU(),\n spectral_norm(nn.Linear(256, 1, bias=True)))\n return nn.Sequential(nn.Linear(2, 256, bias=True),\n nn.LeakyReLU(),\n nn.Linear(256, 256, bias=True),\n nn.LeakyReLU(),\n nn.Linear(256, 256, bias=True),\n nn.LeakyReLU(),\n nn.Linear(256, 256, bias=True),\n nn.LeakyReLU(),\n nn.Linear(256, 1, bias=True))\n\n\ndef get_data(samples: Optional[int] = 400, variance: Optional[float] = 0.05) -> torch.Tensor:\n \"\"\"\n Function generates a 2d ring of 8 Gaussians\n :param samples: (Optional[int]) Number of samples including in the resulting dataset. Must be a multiple of 8.\n :param variance: (Optional[float]) Variance of the gaussian\n :return: (torch.Tensor) generated data\n \"\"\"\n assert samples % 8 == 0 and samples > 0, \"Number of samples must be a multiple of 8 and bigger than 0\"\n # Init angels of the means\n angels = torch.cumsum((2 * np.pi / 8) * torch.ones((8)), dim=0)\n # Convert angles to 2D coordinates\n means = torch.stack([torch.cos(angels), torch.sin(angels)], dim=0)\n # Generate data\n data = torch.empty((2, samples))\n counter = 0\n for gaussian in range(means.shape[1]):\n for sample in range(int(samples / 8)):\n data[:, counter] = torch.normal(means[:, gaussian], variance)\n counter += 1\n # Reshape data\n data = data.T\n # Shuffle data\n data = data[torch.randperm(data.shape[0])]\n # Convert numpy array to tensor\n return data.float()\n" ]
[ [ "torch.normal", "torch.ones", "torch.empty", "torch.sin", "torch.randperm", "torch.nn.Tanh", "torch.nn.Linear", "torch.nn.LeakyReLU", "torch.cos" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
galad-loth/LearnDescriptor
[ "30552a699597415a13793eb85d21b5e33a296a99" ]
[ "symbols/symbol_ssdh.py" ]
[ "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Tue Mar 07 21:00:11 2017\r\n\r\n@author: galad-loth\r\n\"\"\"\r\nimport numpy as npy\r\nimport mxnet as mx\r\n\r\nclass HashLossLayer(mx.operator.NumpyOp):\r\n def __init__(self, w_bin,w_balance):\r\n super(HashLossLayer, self).__init__(False)\r\n self.w_bin=w_bin\r\n self.w_balance=w_balance\r\n \r\n def list_arguments(self):\r\n return ['data']\r\n \r\n def list_outputs(self):\r\n return ['output']\r\n \r\n def infer_shape(self, in_shape):\r\n data_shape=in_shape[0]\r\n return [data_shape],[data_shape]\r\n \r\n def forward(self, in_data, out_data):\r\n x=in_data[0]\r\n# l=in_data[1]\r\n y=out_data[0]\r\n xs=x-0.5\r\n y[:]=1\r\n y[xs<0]=0\r\n# y[:]=npy.ones((x.shape[0],1))-l.reshape((x.shape[0],1))*x \r\n \r\n def backward(self, out_grad, in_data, out_data, in_grad):\r\n x=in_data[0]\r\n dx=in_grad[0]\r\n \r\n grad1=-2*(x-0.5)/x.shape[1]\r\n mu=npy.mean(x,axis=1)\r\n grad2=2*(mu-0.5)/x.shape[1]\r\n \r\n grad=self.w_bin*grad1+self.w_balance*grad2\r\n dx[:]=grad\r\n\r\n\r\ndef get_finetune_symbol(net_pre,arg_params, \r\n num_latent, num_class,layer_name='flatten'):\r\n \"\"\"\r\n net_pre: the pre-trained network symbol\r\n arg_params: the argument parameters of the pre-trained model\r\n num_latent: the number of latent layer units for the fine-tune datasets\r\n layer_name: the layer name before the last fully-connected layer\r\n \"\"\"\r\n all_layers = net_pre.get_internals()\r\n load_net = all_layers[layer_name+'_output']\r\n latent = mx.symbol.FullyConnected(data=load_net, num_hidden=num_latent, name='latent_ssdh')\r\n latent = mx.sym.Activation(data=latent, act_type=\"sigmoid\", name=\"sigmoid_ssdh\")\r\n class_net = mx.symbol.FullyConnected(data=latent, num_hidden=num_class, name='fc_ssdh')\r\n class_net = mx.symbol.SoftmaxOutput(data=class_net, name='softmax')\r\n hash_loss=HashLossLayer(0.1,0.1)\r\n hash_net=hash_loss(data=latent, name=\"hash\")\r\n net = mx.sym.Group([class_net,hash_net])\r\n new_args = dict({k:arg_params[k] for k in arg_params if 'fc' not in k})\r\n return (net, new_args)\r\n " ]
[ [ "numpy.mean" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
stevenvandenberghe/pandas
[ "8cbee356da1161c56c64f6f89cb5548bcadc3e44" ]
[ "pandas/tests/reshape/test_tile.py" ]
[ "import os\nimport pytest\n\nimport numpy as np\nfrom pandas.compat import zip\n\nfrom pandas import (Series, isna, to_datetime, DatetimeIndex,\n Timestamp, Interval, IntervalIndex, Categorical,\n cut, qcut, date_range)\nimport pandas.util.testing as tm\nfrom pandas.api.types import CategoricalDtype as CDT\n\nfrom pandas.core.algorithms import quantile\nimport pandas.core.reshape.tile as tmod\n\n\nclass TestCut(object):\n\n def test_simple(self):\n data = np.ones(5, dtype='int64')\n result = cut(data, 4, labels=False)\n expected = np.array([1, 1, 1, 1, 1])\n tm.assert_numpy_array_equal(result, expected,\n check_dtype=False)\n\n def test_bins(self):\n data = np.array([.2, 1.4, 2.5, 6.2, 9.7, 2.1])\n result, bins = cut(data, 3, retbins=True)\n\n intervals = IntervalIndex.from_breaks(bins.round(3))\n intervals = intervals.take([0, 0, 0, 1, 2, 0])\n expected = Categorical(intervals, ordered=True)\n tm.assert_categorical_equal(result, expected)\n tm.assert_almost_equal(bins, np.array([0.1905, 3.36666667,\n 6.53333333, 9.7]))\n\n def test_right(self):\n data = np.array([.2, 1.4, 2.5, 6.2, 9.7, 2.1, 2.575])\n result, bins = cut(data, 4, right=True, retbins=True)\n intervals = IntervalIndex.from_breaks(bins.round(3))\n expected = Categorical(intervals, ordered=True)\n expected = expected.take([0, 0, 0, 2, 3, 0, 0])\n tm.assert_categorical_equal(result, expected)\n tm.assert_almost_equal(bins, np.array([0.1905, 2.575, 4.95,\n 7.325, 9.7]))\n\n def test_noright(self):\n data = np.array([.2, 1.4, 2.5, 6.2, 9.7, 2.1, 2.575])\n result, bins = cut(data, 4, right=False, retbins=True)\n intervals = IntervalIndex.from_breaks(bins.round(3), closed='left')\n intervals = intervals.take([0, 0, 0, 2, 3, 0, 1])\n expected = Categorical(intervals, ordered=True)\n tm.assert_categorical_equal(result, expected)\n tm.assert_almost_equal(bins, np.array([0.2, 2.575, 4.95,\n 7.325, 9.7095]))\n\n def test_arraylike(self):\n data = [.2, 1.4, 2.5, 6.2, 9.7, 2.1]\n result, bins = cut(data, 3, retbins=True)\n intervals = IntervalIndex.from_breaks(bins.round(3))\n intervals = intervals.take([0, 0, 0, 1, 2, 0])\n expected = Categorical(intervals, ordered=True)\n tm.assert_categorical_equal(result, expected)\n tm.assert_almost_equal(bins, np.array([0.1905, 3.36666667,\n 6.53333333, 9.7]))\n\n def test_bins_from_intervalindex(self):\n c = cut(range(5), 3)\n expected = c\n result = cut(range(5), bins=expected.categories)\n tm.assert_categorical_equal(result, expected)\n\n expected = Categorical.from_codes(np.append(c.codes, -1),\n categories=c.categories,\n ordered=True)\n result = cut(range(6), bins=expected.categories)\n tm.assert_categorical_equal(result, expected)\n\n # doc example\n # make sure we preserve the bins\n ages = np.array([10, 15, 13, 12, 23, 25, 28, 59, 60])\n c = cut(ages, bins=[0, 18, 35, 70])\n expected = IntervalIndex.from_tuples([(0, 18), (18, 35), (35, 70)])\n tm.assert_index_equal(c.categories, expected)\n\n result = cut([25, 20, 50], bins=c.categories)\n tm.assert_index_equal(result.categories, expected)\n tm.assert_numpy_array_equal(result.codes,\n np.array([1, 1, 2], dtype='int8'))\n\n def test_bins_not_monotonic(self):\n data = [.2, 1.4, 2.5, 6.2, 9.7, 2.1]\n pytest.raises(ValueError, cut, data, [0.1, 1.5, 1, 10])\n\n def test_wrong_num_labels(self):\n data = [.2, 1.4, 2.5, 6.2, 9.7, 2.1]\n pytest.raises(ValueError, cut, data, [0, 1, 10],\n labels=['foo', 'bar', 'baz'])\n\n def test_cut_corner(self):\n # h3h\n pytest.raises(ValueError, cut, [], 2)\n\n pytest.raises(ValueError, cut, [1, 2, 3], 0.5)\n\n def test_cut_out_of_range_more(self):\n # #1511\n s = Series([0, -1, 0, 1, -3], name='x')\n ind = cut(s, [0, 1], labels=False)\n exp = Series([np.nan, np.nan, np.nan, 0, np.nan], name='x')\n tm.assert_series_equal(ind, exp)\n\n def test_labels(self):\n arr = np.tile(np.arange(0, 1.01, 0.1), 4)\n\n result, bins = cut(arr, 4, retbins=True)\n ex_levels = IntervalIndex.from_breaks([-1e-3, 0.25, 0.5, 0.75, 1])\n tm.assert_index_equal(result.categories, ex_levels)\n\n result, bins = cut(arr, 4, retbins=True, right=False)\n ex_levels = IntervalIndex.from_breaks([0, 0.25, 0.5, 0.75, 1 + 1e-3],\n closed='left')\n tm.assert_index_equal(result.categories, ex_levels)\n\n def test_cut_pass_series_name_to_factor(self):\n s = Series(np.random.randn(100), name='foo')\n\n factor = cut(s, 4)\n assert factor.name == 'foo'\n\n def test_label_precision(self):\n arr = np.arange(0, 0.73, 0.01)\n\n result = cut(arr, 4, precision=2)\n ex_levels = IntervalIndex.from_breaks([-0.00072, 0.18, 0.36,\n 0.54, 0.72])\n tm.assert_index_equal(result.categories, ex_levels)\n\n def test_na_handling(self):\n arr = np.arange(0, 0.75, 0.01)\n arr[::3] = np.nan\n\n result = cut(arr, 4)\n\n result_arr = np.asarray(result)\n\n ex_arr = np.where(isna(arr), np.nan, result_arr)\n\n tm.assert_almost_equal(result_arr, ex_arr)\n\n result = cut(arr, 4, labels=False)\n ex_result = np.where(isna(arr), np.nan, result)\n tm.assert_almost_equal(result, ex_result)\n\n def test_inf_handling(self):\n data = np.arange(6)\n data_ser = Series(data, dtype='int64')\n\n bins = [-np.inf, 2, 4, np.inf]\n result = cut(data, bins)\n result_ser = cut(data_ser, bins)\n\n ex_uniques = IntervalIndex.from_breaks(bins)\n tm.assert_index_equal(result.categories, ex_uniques)\n assert result[5] == Interval(4, np.inf)\n assert result[0] == Interval(-np.inf, 2)\n assert result_ser[5] == Interval(4, np.inf)\n assert result_ser[0] == Interval(-np.inf, 2)\n\n def test_qcut(self):\n arr = np.random.randn(1000)\n\n # We store the bins as Index that have been rounded\n # to comparisons are a bit tricky.\n labels, bins = qcut(arr, 4, retbins=True)\n ex_bins = quantile(arr, [0, .25, .5, .75, 1.])\n result = labels.categories.left.values\n assert np.allclose(result, ex_bins[:-1], atol=1e-2)\n result = labels.categories.right.values\n assert np.allclose(result, ex_bins[1:], atol=1e-2)\n\n ex_levels = cut(arr, ex_bins, include_lowest=True)\n tm.assert_categorical_equal(labels, ex_levels)\n\n def test_qcut_bounds(self):\n arr = np.random.randn(1000)\n\n factor = qcut(arr, 10, labels=False)\n assert len(np.unique(factor)) == 10\n\n def test_qcut_specify_quantiles(self):\n arr = np.random.randn(100)\n\n factor = qcut(arr, [0, .25, .5, .75, 1.])\n expected = qcut(arr, 4)\n tm.assert_categorical_equal(factor, expected)\n\n def test_qcut_all_bins_same(self):\n tm.assert_raises_regex(ValueError, \"edges.*unique\", qcut,\n [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 3)\n\n def test_cut_out_of_bounds(self):\n arr = np.random.randn(100)\n\n result = cut(arr, [-1, 0, 1])\n\n mask = isna(result)\n ex_mask = (arr < -1) | (arr > 1)\n tm.assert_numpy_array_equal(mask, ex_mask)\n\n def test_cut_pass_labels(self):\n arr = [50, 5, 10, 15, 20, 30, 70]\n bins = [0, 25, 50, 100]\n labels = ['Small', 'Medium', 'Large']\n\n result = cut(arr, bins, labels=labels)\n exp = Categorical(['Medium'] + 4 * ['Small'] + ['Medium', 'Large'],\n categories=labels,\n ordered=True)\n tm.assert_categorical_equal(result, exp)\n\n result = cut(arr, bins, labels=Categorical.from_codes([0, 1, 2],\n labels))\n exp = Categorical.from_codes([1] + 4 * [0] + [1, 2], labels)\n tm.assert_categorical_equal(result, exp)\n\n # issue 16459\n labels = ['Good', 'Medium', 'Bad']\n result = cut(arr, 3, labels=labels)\n exp = cut(arr, 3, labels=Categorical(labels, categories=labels,\n ordered=True))\n tm.assert_categorical_equal(result, exp)\n\n def test_qcut_include_lowest(self):\n values = np.arange(10)\n\n ii = qcut(values, 4)\n\n ex_levels = IntervalIndex(\n [Interval(-0.001, 2.25),\n Interval(2.25, 4.5),\n Interval(4.5, 6.75),\n Interval(6.75, 9)])\n tm.assert_index_equal(ii.categories, ex_levels)\n\n def test_qcut_nas(self):\n arr = np.random.randn(100)\n arr[:20] = np.nan\n\n result = qcut(arr, 4)\n assert isna(result[:20]).all()\n\n def test_qcut_index(self):\n result = qcut([0, 2], 2)\n intervals = [Interval(-0.001, 1), Interval(1, 2)]\n expected = Categorical(intervals, ordered=True)\n tm.assert_categorical_equal(result, expected)\n\n def test_round_frac(self):\n # it works\n result = cut(np.arange(11.), 2)\n\n result = cut(np.arange(11.) / 1e10, 2)\n\n # #1979, negative numbers\n\n result = tmod._round_frac(-117.9998, precision=3)\n assert result == -118\n result = tmod._round_frac(117.9998, precision=3)\n assert result == 118\n\n result = tmod._round_frac(117.9998, precision=2)\n assert result == 118\n result = tmod._round_frac(0.000123456, precision=2)\n assert result == 0.00012\n\n def test_qcut_binning_issues(self):\n # #1978, 1979\n path = os.path.join(tm.get_data_path(), 'cut_data.csv')\n arr = np.loadtxt(path)\n\n result = qcut(arr, 20)\n\n starts = []\n ends = []\n for lev in np.unique(result):\n s = lev.left\n e = lev.right\n assert s != e\n\n starts.append(float(s))\n ends.append(float(e))\n\n for (sp, sn), (ep, en) in zip(zip(starts[:-1], starts[1:]),\n zip(ends[:-1], ends[1:])):\n assert sp < sn\n assert ep < en\n assert ep <= sn\n\n def test_cut_return_intervals(self):\n s = Series([0, 1, 2, 3, 4, 5, 6, 7, 8])\n res = cut(s, 3)\n exp_bins = np.linspace(0, 8, num=4).round(3)\n exp_bins[0] -= 0.008\n exp = Series(IntervalIndex.from_breaks(exp_bins, closed='right').take(\n [0, 0, 0, 1, 1, 1, 2, 2, 2])).astype(CDT(ordered=True))\n tm.assert_series_equal(res, exp)\n\n def test_qcut_return_intervals(self):\n s = Series([0, 1, 2, 3, 4, 5, 6, 7, 8])\n res = qcut(s, [0, 0.333, 0.666, 1])\n exp_levels = np.array([Interval(-0.001, 2.664),\n Interval(2.664, 5.328), Interval(5.328, 8)])\n exp = Series(exp_levels.take([0, 0, 0, 1, 1, 1, 2, 2, 2])).astype(\n CDT(ordered=True))\n tm.assert_series_equal(res, exp)\n\n def test_series_retbins(self):\n # GH 8589\n s = Series(np.arange(4))\n result, bins = cut(s, 2, retbins=True)\n expected = Series(IntervalIndex.from_breaks(\n [-0.003, 1.5, 3], closed='right').repeat(2)).astype(\n CDT(ordered=True))\n tm.assert_series_equal(result, expected)\n\n result, bins = qcut(s, 2, retbins=True)\n expected = Series(IntervalIndex.from_breaks(\n [-0.001, 1.5, 3], closed='right').repeat(2)).astype(\n CDT(ordered=True))\n tm.assert_series_equal(result, expected)\n\n def test_qcut_duplicates_bin(self):\n # GH 7751\n values = [0, 0, 0, 0, 1, 2, 3]\n expected = IntervalIndex([Interval(-0.001, 1), Interval(1, 3)])\n\n result = qcut(values, 3, duplicates='drop')\n tm.assert_index_equal(result.categories, expected)\n\n pytest.raises(ValueError, qcut, values, 3)\n pytest.raises(ValueError, qcut, values, 3, duplicates='raise')\n\n # invalid\n pytest.raises(ValueError, qcut, values, 3, duplicates='foo')\n\n def test_single_quantile(self):\n # issue 15431\n expected = Series([0, 0])\n\n s = Series([9., 9.])\n result = qcut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n result = qcut(s, 1)\n intervals = IntervalIndex([Interval(8.999, 9.0),\n Interval(8.999, 9.0)], closed='right')\n expected = Series(intervals).astype(CDT(ordered=True))\n tm.assert_series_equal(result, expected)\n\n s = Series([-9., -9.])\n expected = Series([0, 0])\n result = qcut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n result = qcut(s, 1)\n intervals = IntervalIndex([Interval(-9.001, -9.0),\n Interval(-9.001, -9.0)], closed='right')\n expected = Series(intervals).astype(CDT(ordered=True))\n tm.assert_series_equal(result, expected)\n\n s = Series([0., 0.])\n expected = Series([0, 0])\n result = qcut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n result = qcut(s, 1)\n intervals = IntervalIndex([Interval(-0.001, 0.0),\n Interval(-0.001, 0.0)], closed='right')\n expected = Series(intervals).astype(CDT(ordered=True))\n tm.assert_series_equal(result, expected)\n\n s = Series([9])\n expected = Series([0])\n result = qcut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n result = qcut(s, 1)\n intervals = IntervalIndex([Interval(8.999, 9.0)], closed='right')\n expected = Series(intervals).astype(CDT(ordered=True))\n tm.assert_series_equal(result, expected)\n\n s = Series([-9])\n expected = Series([0])\n result = qcut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n result = qcut(s, 1)\n intervals = IntervalIndex([Interval(-9.001, -9.0)], closed='right')\n expected = Series(intervals).astype(CDT(ordered=True))\n tm.assert_series_equal(result, expected)\n\n s = Series([0])\n expected = Series([0])\n result = qcut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n result = qcut(s, 1)\n intervals = IntervalIndex([Interval(-0.001, 0.0)], closed='right')\n expected = Series(intervals).astype(CDT(ordered=True))\n tm.assert_series_equal(result, expected)\n\n def test_single_bin(self):\n # issue 14652\n expected = Series([0, 0])\n\n s = Series([9., 9.])\n result = cut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n\n s = Series([-9., -9.])\n result = cut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n\n expected = Series([0])\n\n s = Series([9])\n result = cut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n\n s = Series([-9])\n result = cut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n\n # issue 15428\n expected = Series([0, 0])\n\n s = Series([0., 0.])\n result = cut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n\n expected = Series([0])\n\n s = Series([0])\n result = cut(s, 1, labels=False)\n tm.assert_series_equal(result, expected)\n\n def test_datetime_cut(self):\n # GH 14714\n # testing for time data to be present as series\n data = to_datetime(Series(['2013-01-01', '2013-01-02', '2013-01-03']))\n\n result, bins = cut(data, 3, retbins=True)\n expected = (\n Series(IntervalIndex([\n Interval(Timestamp('2012-12-31 23:57:07.200000'),\n Timestamp('2013-01-01 16:00:00')),\n Interval(Timestamp('2013-01-01 16:00:00'),\n Timestamp('2013-01-02 08:00:00')),\n Interval(Timestamp('2013-01-02 08:00:00'),\n Timestamp('2013-01-03 00:00:00'))]))\n .astype(CDT(ordered=True)))\n\n tm.assert_series_equal(result, expected)\n\n # testing for time data to be present as list\n data = [np.datetime64('2013-01-01'), np.datetime64('2013-01-02'),\n np.datetime64('2013-01-03')]\n result, bins = cut(data, 3, retbins=True)\n tm.assert_series_equal(Series(result), expected)\n\n # testing for time data to be present as ndarray\n data = np.array([np.datetime64('2013-01-01'),\n np.datetime64('2013-01-02'),\n np.datetime64('2013-01-03')])\n result, bins = cut(data, 3, retbins=True)\n tm.assert_series_equal(Series(result), expected)\n\n # testing for time data to be present as datetime index\n data = DatetimeIndex(['2013-01-01', '2013-01-02', '2013-01-03'])\n result, bins = cut(data, 3, retbins=True)\n tm.assert_series_equal(Series(result), expected)\n\n def test_datetime_bin(self):\n data = [np.datetime64('2012-12-13'), np.datetime64('2012-12-15')]\n bin_data = ['2012-12-12', '2012-12-14', '2012-12-16']\n expected = (\n Series(IntervalIndex([\n Interval(Timestamp(bin_data[0]), Timestamp(bin_data[1])),\n Interval(Timestamp(bin_data[1]), Timestamp(bin_data[2]))]))\n .astype(CDT(ordered=True)))\n\n for conv in [Timestamp, Timestamp, np.datetime64]:\n bins = [conv(v) for v in bin_data]\n result = cut(data, bins=bins)\n tm.assert_series_equal(Series(result), expected)\n\n bin_pydatetime = [Timestamp(v).to_pydatetime() for v in bin_data]\n result = cut(data, bins=bin_pydatetime)\n tm.assert_series_equal(Series(result), expected)\n\n bins = to_datetime(bin_data)\n result = cut(data, bins=bin_pydatetime)\n tm.assert_series_equal(Series(result), expected)\n\n def test_datetime_nan(self):\n\n def f():\n cut(date_range('20130101', periods=3), bins=[0, 2, 4])\n pytest.raises(ValueError, f)\n\n result = cut(date_range('20130102', periods=5),\n bins=date_range('20130101', periods=2))\n mask = result.categories.isna()\n tm.assert_numpy_array_equal(mask, np.array([False]))\n mask = result.isna()\n tm.assert_numpy_array_equal(\n mask, np.array([False, True, True, True, True]))\n\n @pytest.mark.parametrize(\n \"array_1_writeable, array_2_writeable\",\n [(True, True), (True, False), (False, False)])\n def test_cut_read_only(self, array_1_writeable, array_2_writeable):\n # issue 18773\n array_1 = np.arange(0, 100, 10)\n array_1.flags.writeable = array_1_writeable\n\n array_2 = np.arange(0, 100, 10)\n array_2.flags.writeable = array_2_writeable\n\n hundred_elements = np.arange(100)\n\n tm.assert_categorical_equal(cut(hundred_elements, array_1),\n cut(hundred_elements, array_2))\n" ]
[ [ "pandas.to_datetime", "pandas.Series", "numpy.linspace", "numpy.asarray", "pandas.util.testing.assert_index_equal", "numpy.random.randn", "pandas.isna", "pandas.util.testing.assert_numpy_array_equal", "numpy.allclose", "pandas.util.testing.assert_categorical_equal", "numpy.unique", "numpy.arange", "pandas.util.testing.assert_series_equal", "pandas.DatetimeIndex", "pandas.cut", "pandas.core.reshape.tile._round_frac", "pandas.Categorical.from_codes", "pandas.core.algorithms.quantile", "pandas.Categorical", "pandas.util.testing.assert_almost_equal", "numpy.append", "pandas.Interval", "pandas.date_range", "numpy.array", "pandas.api.types.CategoricalDtype", "pandas.util.testing.assert_raises_regex", "pandas.util.testing.get_data_path", "numpy.ones", "pandas.IntervalIndex.from_tuples", "pandas.IntervalIndex.from_breaks", "numpy.datetime64", "pandas.compat.zip", "pandas.Timestamp", "pandas.qcut", "numpy.loadtxt" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] } ]
Jokeren/RzLinear
[ "d318d95254cd5c3dcf814774d22dc71179450aa0" ]
[ "python/rz_linear/impl/RzLinearBackward.py" ]
[ "from typing import Tuple\nimport torch\nimport triton\nimport triton.language as tl\n\n\ndef rz_linear_backward_tl(input: torch.tensor, hashed_weight: torch.tensor, output_grad: torch.tensor,\n M: int, K: int, N: int, H: int,\n R3: int, R2: int, R1: int, R0: int,\n allow_tf32: bool = True, allow_autotune: bool = False,\n BLOCK_SIZE_M: int = 64, BLOCK_SIZE_N: int = 64, BLOCK_SIZE_K: int = 32,\n GROUP_SIZE: int = 4) -> Tuple[torch.tensor, torch.tensor]:\n input_grad = rz_linear_backward_input_grad_tl(output_grad, hashed_weight, M, K, N, H, R3, R2, R1, R0, allow_tf32=allow_tf32, allow_autotune=allow_autotune,\n BLOCK_SIZE_M=BLOCK_SIZE_M, BLOCK_SIZE_N=BLOCK_SIZE_N, BLOCK_SIZE_K=BLOCK_SIZE_K,\n GROUP_SIZE=GROUP_SIZE)\n weight_grad = rz_linear_backward_weight_grad_tl(input, output_grad, M, K, N, H, R3, R2, R1, R0, allow_tf32=allow_tf32, allow_autotune=allow_autotune,\n BLOCK_SIZE_M=BLOCK_SIZE_M, BLOCK_SIZE_N=BLOCK_SIZE_N, BLOCK_SIZE_K=BLOCK_SIZE_K,\n GROUP_SIZE=GROUP_SIZE)\n return input_grad, weight_grad\n\n\ndef rz_linear_backward_weight_grad_tl(input: torch.tensor, output_grad: torch.tensor,\n M: int, K: int, N: int, H: int,\n R3: int, R2: int, R1: int, R0: int,\n allow_tf32: bool = True, allow_autotune: bool = True,\n BLOCK_SIZE_M: int = 64, BLOCK_SIZE_N: int = 64, BLOCK_SIZE_K: int = 32,\n GROUP_SIZE: int = 8) -> torch.tensor:\n '''\n Compute input^T x output_grad and return a weight_grad tensor\n\n Args:\n input (Tensor): A MxK tensor\n output_grad (Tensor): A MxN tensor\n M, K, N, H (int): Matrix dimensions\n R3, R2, R1, R0 (int): Random numbers\n allow_tf32 (bool): If tensor core is allowed\n BLOCK_SIZE_M, BLOCK_SIZE_N, BLOCK_SIZE_K, GROUP_SIZE: Matrix tiling parameters for performance tunning\n\n Returns:\n hashed_weight_grad (Tensor): A 1xH tensor\n '''\n assert (K % 4 == 0)\n assert (N % 4 == 0)\n # allocates output\n hashed_weight_grad = torch.zeros(\n (H), device=output_grad.device, dtype=output_grad.dtype)\n # 1D launch kernel where each block gets its own program.\n\n def grid(META): return (\n triton.cdiv(K, META['BLOCK_SIZE_K']) *\n triton.cdiv(N, META['BLOCK_SIZE_N']),\n )\n\n if allow_tf32:\n assert (M % 32 == 0)\n else:\n assert (M % 8 == 0)\n\n if allow_autotune:\n if allow_tf32:\n rz_linear_backward_weight_grad_kernel_tf32[grid](\n input, output_grad, hashed_weight_grad,\n M, N, K, H,\n input.stride(1), input.stride(0),\n output_grad.stride(0), output_grad.stride(1),\n R3=R3, R2=R2, R1=R1, R0=R0,\n GROUP_SIZE=GROUP_SIZE\n )\n else:\n rz_linear_backward_weight_grad_kernel_fp32[grid](\n input, output_grad, hashed_weight_grad,\n M, N, K, H,\n input.stride(1), input.stride(0),\n output_grad.stride(0), output_grad.stride(1),\n R3=R3, R2=R2, R1=R1, R0=R0,\n GROUP_SIZE=GROUP_SIZE\n )\n else:\n rz_linear_backward_weight_grad_kernel_notune[grid](\n input, output_grad, hashed_weight_grad,\n M, N, K, H,\n input.stride(1), input.stride(0),\n output_grad.stride(0), output_grad.stride(1),\n R3=R3, R2=R2, R1=R1, R0=R0,\n allow_tf32=allow_tf32,\n GROUP_SIZE=GROUP_SIZE,\n BLOCK_SIZE_K=BLOCK_SIZE_K,\n BLOCK_SIZE_M=BLOCK_SIZE_M,\n BLOCK_SIZE_N=BLOCK_SIZE_N\n )\n\n return hashed_weight_grad\n\n\[email protected](\n configs=[\n # basic configs for compute-bound matmuls\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=8),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=8),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 16}, num_stages=2, num_warps=8),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 32,\n 'BLOCK_SIZE_M': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 32, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 32,\n 'BLOCK_SIZE_M': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 32, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 16}, num_stages=2, num_warps=4),\n ],\n key=['M', 'N', 'K'],\n)\[email protected]\ndef rz_linear_backward_weight_grad_kernel_fp32(\n # Pointers to matrices\n a_ptr, b_ptr, c_ptr,\n # Matrix dimensions\n M, N, K, H,\n # The stride variables represent how much to increase the ptr by when moving by 1\n # element in a particular dimension.\n stride_am, stride_ak,\n stride_bm, stride_bn,\n # Random numbers\n R3, R2, R1, R0,\n # Meta-parameters\n BLOCK_SIZE_M: tl.constexpr, BLOCK_SIZE_N: tl.constexpr, BLOCK_SIZE_K: tl.constexpr,\n GROUP_SIZE: tl.constexpr\n):\n rz_linear_backward_weight_grad_core(a_ptr=a_ptr, b_ptr=b_ptr, c_ptr=c_ptr, M=M, N=N, K=K, H=H,\n stride_am=stride_am, stride_ak=stride_ak, stride_bm=stride_bm, stride_bn=stride_bn,\n R3=R3, R2=R2, R1=R1, R0=R0, allow_tf32=False,\n BLOCK_SIZE_M=BLOCK_SIZE_M, BLOCK_SIZE_N=BLOCK_SIZE_N, BLOCK_SIZE_K=BLOCK_SIZE_K,\n GROUP_SIZE=GROUP_SIZE)\n\n\[email protected](\n configs=[\n # basic configs for compute-bound matmuls\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=8),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=8),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=8),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=8),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 32}, num_stages=2, num_warps=8),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=2, num_warps=8),\n triton.Config({'BLOCK_SIZE_N': 256, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_M': 32}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_M': 32}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 128, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_N': 64, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_M': 32}, num_stages=2, num_warps=4),\n ], key=['M', 'N', 'K'],\n)\[email protected]\ndef rz_linear_backward_weight_grad_kernel_tf32(\n # Pointers to matrices\n a_ptr, b_ptr, c_ptr,\n # Matrix dimensions\n M, N, K, H,\n # The stride variables represent how much to increase the ptr by when moving by 1\n # element in a particular dimension.\n stride_am, stride_ak,\n stride_bm, stride_bn,\n # Random numbers\n R3, R2, R1, R0,\n # Meta-parameters\n BLOCK_SIZE_M: tl.constexpr, BLOCK_SIZE_N: tl.constexpr, BLOCK_SIZE_K: tl.constexpr,\n GROUP_SIZE: tl.constexpr\n):\n rz_linear_backward_weight_grad_core(a_ptr=a_ptr, b_ptr=b_ptr, c_ptr=c_ptr, M=M, N=N, K=K, H=H,\n stride_am=stride_am, stride_ak=stride_ak, stride_bm=stride_bm, stride_bn=stride_bn,\n R3=R3, R2=R2, R1=R1, R0=R0, allow_tf32=True,\n BLOCK_SIZE_M=BLOCK_SIZE_M, BLOCK_SIZE_N=BLOCK_SIZE_N, BLOCK_SIZE_K=BLOCK_SIZE_K,\n GROUP_SIZE=GROUP_SIZE)\n\n\[email protected]\ndef rz_linear_backward_weight_grad_kernel_notune(\n # Pointers to matrices\n a_ptr, b_ptr, c_ptr,\n # Matrix dimensions\n M, N, K, H,\n # The stride variables represent how much to increase the ptr by when moving by 1\n # element in a particular dimension.\n stride_am, stride_ak,\n stride_bm, stride_bn,\n # Random numbers\n R3, R2, R1, R0,\n allow_tf32: tl.constexpr,\n # Meta-parameters\n BLOCK_SIZE_M: tl.constexpr, BLOCK_SIZE_N: tl.constexpr, BLOCK_SIZE_K: tl.constexpr,\n GROUP_SIZE: tl.constexpr\n):\n rz_linear_backward_weight_grad_core(a_ptr=a_ptr, b_ptr=b_ptr, c_ptr=c_ptr, M=M, N=N, K=K, H=H,\n stride_am=stride_am, stride_ak=stride_ak, stride_bm=stride_bm, stride_bn=stride_bn,\n R3=R3, R2=R2, R1=R1, R0=R0, allow_tf32=allow_tf32,\n BLOCK_SIZE_M=BLOCK_SIZE_M, BLOCK_SIZE_N=BLOCK_SIZE_N, BLOCK_SIZE_K=BLOCK_SIZE_K,\n GROUP_SIZE=GROUP_SIZE)\n\n\[email protected]\ndef rz_linear_backward_weight_grad_core(\n # Pointers to matrices\n a_ptr, b_ptr, c_ptr,\n # Matrix dimensions\n M, N, K, H,\n # The stride variables represent how much to increase the ptr by when moving by 1\n # element in a particular dimension.\n stride_am, stride_ak,\n stride_bm, stride_bn,\n # Random numbers\n R3, R2, R1, R0,\n allow_tf32: tl.constexpr,\n # Meta-parameters\n BLOCK_SIZE_M: tl.constexpr, BLOCK_SIZE_N: tl.constexpr, BLOCK_SIZE_K: tl.constexpr,\n GROUP_SIZE: tl.constexpr\n):\n \"\"\"Kernel for computing the matmul C = A^T x B.\n A has shape (M, K), B has shape (M, N) and C has shape (K, N)\n \"\"\"\n pid = tl.program_id(axis=0)\n num_pid_k = tl.cdiv(K, BLOCK_SIZE_K)\n num_pid_n = tl.cdiv(N, BLOCK_SIZE_N)\n num_pid_in_group = GROUP_SIZE * num_pid_n\n group_id = pid // num_pid_in_group\n first_pid_k = group_id * GROUP_SIZE\n group_size_k = min(num_pid_k - first_pid_k, GROUP_SIZE)\n pid_k = first_pid_k + (pid % group_size_k)\n pid_n = (pid % num_pid_in_group) // group_size_k\n\n # [BLOCK_SIZE_K, BLOCK_SIZE_M]\n offs_ak = pid_k * BLOCK_SIZE_K + tl.arange(0, BLOCK_SIZE_K)\n offs_am = tl.arange(0, BLOCK_SIZE_M)\n a_ptrs = a_ptr + offs_ak[:, None] * \\\n stride_am + offs_am[None, :] * stride_ak\n\n # [BLOCK_SIZE_M, BLOCK_SIZE_N]\n offs_bn = pid_n * BLOCK_SIZE_N + tl.arange(0, BLOCK_SIZE_N)\n offs_bm = tl.arange(0, BLOCK_SIZE_M)\n b_ptrs = b_ptr + offs_bm[:, None] * \\\n stride_bm + offs_bn[None, :] * stride_bn\n\n # [BLOCK_SIZE_K, BLOCK_SIZE_N]\n c = tl.zeros((BLOCK_SIZE_K, BLOCK_SIZE_N), dtype=tl.float32)\n for _ in range(0, M//BLOCK_SIZE_M):\n # Note that for simplicity, we don't apply a mask here.\n # This means that if M is not a multiple of BLOCK_SIZE_M,\n # this will access out-of-bounds memory and produce an\n # error or (worse!) incorrect results.\n # TODO(Keren): Add M checks\n a = tl.load(a_ptrs)\n b = tl.load(b_ptrs)\n # We accumulate along the M dimension\n c += tl.dot(a, b, allow_tf32=allow_tf32)\n # Advance the ptrs to the next M block\n a_ptrs += BLOCK_SIZE_M * stride_ak\n b_ptrs += BLOCK_SIZE_M * stride_bm\n\n # -----------------------------------------------------------\n # Write back the block of the output matrix C\n c_offset = c_ptr + tl.arange(0, BLOCK_SIZE_K)[:, None] * \\\n BLOCK_SIZE_N + tl.arange(0, BLOCK_SIZE_N)[None, :]\n c_ptrs = c_offset + (pid_k * R3 + pid_n * R2 +\n R1) % R0 % (H - BLOCK_SIZE_K * BLOCK_SIZE_N)\n tl.atomic_add(c_ptrs, c)\n\n\ndef rz_linear_backward_input_grad_tl(output_grad: torch.tensor, hashed_weight: torch.tensor,\n M: int, K: int, N: int, H: int,\n R3: int, R2: int, R1: int, R0: int,\n allow_tf32: bool = True, allow_autotune: bool = True,\n BLOCK_SIZE_M: int = 64, BLOCK_SIZE_N: int = 64, BLOCK_SIZE_K: int = 32,\n GROUP_SIZE: int = 4) -> torch.tensor:\n '''\n Compute output_grad x hashed_weight^T and return an input_grad tensor\n\n Args:\n output_grad (Tensor): A MxN tensor\n hashed_weight (Tensor): A 1xH (KxN) tensor\n M, K, N, H (int): matrix dimensions\n R3, R2, R1, R0 (int): random numbers\n allow_tf32 (bool): If tensor core is allowed\n BLOCK_SIZE_M, BLOCK_SIZE_N, BLOCK_SIZE_K, GROUP_SIZE: Matrix tiling parameters for performance tunning\n\n Returns:\n input_grad (Tensor): A MxK tensor\n '''\n assert (M % 4 == 0)\n assert (K % 4 == 0)\n # allocates output\n input_grad = torch.empty(\n (M, K), device=output_grad.device, dtype=output_grad.dtype)\n\n if allow_tf32:\n assert (N % 32 == 0)\n else:\n assert (N % 8 == 0)\n\n # 1D launch kernel where each block gets its own program.\n def grid(META): return (\n triton.cdiv(M, META['BLOCK_SIZE_M']) *\n triton.cdiv(K, META['BLOCK_SIZE_K']),\n )\n\n if allow_autotune:\n if allow_tf32:\n rz_linear_backward_input_grad_kernel_tf32[grid](\n output_grad, hashed_weight, input_grad,\n M, N, K, H,\n output_grad.stride(0), output_grad.stride(1),\n input_grad.stride(0), input_grad.stride(1),\n R3=R3, R2=R2, R1=R1, R0=R0,\n GROUP_SIZE=GROUP_SIZE\n )\n else:\n rz_linear_backward_input_grad_kernel_fp32[grid](\n output_grad, hashed_weight, input_grad,\n M, N, K, H,\n output_grad.stride(0), output_grad.stride(1),\n input_grad.stride(0), input_grad.stride(1),\n R3=R3, R2=R2, R1=R1, R0=R0,\n GROUP_SIZE=GROUP_SIZE\n )\n else:\n rz_linear_backward_input_grad_kernel_notune[grid](\n output_grad, hashed_weight, input_grad,\n M, N, K, H,\n output_grad.stride(0), output_grad.stride(1),\n input_grad.stride(0), input_grad.stride(1),\n R3=R3, R2=R2, R1=R1, R0=R0,\n allow_tf32=allow_tf32,\n num_warps=4,\n num_stages=3,\n BLOCK_SIZE_M=BLOCK_SIZE_M,\n BLOCK_SIZE_N=BLOCK_SIZE_N,\n BLOCK_SIZE_K=BLOCK_SIZE_K,\n GROUP_SIZE=GROUP_SIZE\n )\n return input_grad\n\n\[email protected](\n configs=[\n # basic configs for compute-bound matmuls\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=8),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 32,\n 'BLOCK_SIZE_N': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 32, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 32,\n 'BLOCK_SIZE_N': 16}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 32, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 16}, num_stages=2, num_warps=4),\n ],\n key=['M', 'N', 'K'],\n)\[email protected]\ndef rz_linear_backward_input_grad_kernel_fp32(\n # Pointers to matrices\n a_ptr, b_ptr, c_ptr,\n # Matrix dimensions\n M, N, K, H,\n # The stride variables represent how much to increase the ptr by when moving by 1\n # element in a particular dimension.\n stride_am, stride_an,\n stride_cm, stride_ck,\n # Random numbers\n R3, R2, R1, R0,\n # Meta-parameters\n BLOCK_SIZE_M: tl.constexpr, BLOCK_SIZE_N: tl.constexpr, BLOCK_SIZE_K: tl.constexpr,\n GROUP_SIZE: tl.constexpr\n):\n rz_linear_backward_input_grad_core(a_ptr=a_ptr, b_ptr=b_ptr, c_ptr=c_ptr,\n M=M, N=N, K=K, H=H,\n stride_am=stride_am, stride_an=stride_an,\n stride_cm=stride_cm, stride_ck=stride_ck,\n R3=R3, R2=R2, R1=R1, R0=R0,\n allow_tf32=False,\n BLOCK_SIZE_M=BLOCK_SIZE_M, BLOCK_SIZE_N=BLOCK_SIZE_N, BLOCK_SIZE_K=BLOCK_SIZE_K,\n GROUP_SIZE=GROUP_SIZE)\n\n\[email protected](\n configs=[\n # basic configs for compute-bound matmuls\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=8),\n triton.Config({'BLOCK_SIZE_M': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=8),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 256, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 32}, num_stages=4, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=8),\n triton.Config({'BLOCK_SIZE_M': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=8),\n triton.Config({'BLOCK_SIZE_M': 256, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 32}, num_stages=3, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 32}, num_stages=2, num_warps=8),\n triton.Config({'BLOCK_SIZE_M': 256, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=2, num_warps=8),\n triton.Config({'BLOCK_SIZE_M': 256, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 32}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 256,\n 'BLOCK_SIZE_N': 32}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 128,\n 'BLOCK_SIZE_N': 32}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 128, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 32}, num_stages=2, num_warps=4),\n triton.Config({'BLOCK_SIZE_M': 64, 'BLOCK_SIZE_K': 64,\n 'BLOCK_SIZE_N': 32}, num_stages=2, num_warps=4),\n ], key=['M', 'N', 'K'],\n)\[email protected]\ndef rz_linear_backward_input_grad_kernel_tf32(\n # Pointers to matrices\n a_ptr, b_ptr, c_ptr,\n # Matrix dimensions\n M, N, K, H,\n # The stride variables represent how much to increase the ptr by when moving by 1\n # element in a particular dimension.\n stride_am, stride_an,\n stride_cm, stride_ck,\n # Random numbers\n R3, R2, R1, R0,\n # Meta-parameters\n BLOCK_SIZE_M: tl.constexpr, BLOCK_SIZE_N: tl.constexpr, BLOCK_SIZE_K: tl.constexpr,\n GROUP_SIZE: tl.constexpr\n):\n rz_linear_backward_input_grad_core(a_ptr=a_ptr, b_ptr=b_ptr, c_ptr=c_ptr,\n M=M, N=N, K=K, H=H,\n stride_am=stride_am, stride_an=stride_an,\n stride_cm=stride_cm, stride_ck=stride_ck,\n R3=R3, R2=R2, R1=R1, R0=R0,\n allow_tf32=True,\n BLOCK_SIZE_M=BLOCK_SIZE_M, BLOCK_SIZE_N=BLOCK_SIZE_N, BLOCK_SIZE_K=BLOCK_SIZE_K,\n GROUP_SIZE=GROUP_SIZE)\n\n\[email protected]\ndef rz_linear_backward_input_grad_kernel_notune(\n # Pointers to matrices\n a_ptr, b_ptr, c_ptr,\n # Matrix dimensions\n M, N, K, H,\n # The stride variables represent how much to increase the ptr by when moving by 1\n # element in a particular dimension.\n stride_am, stride_an,\n stride_cm, stride_ck,\n # Random numbers\n R3, R2, R1, R0,\n allow_tf32: tl.constexpr,\n # Meta-parameters\n BLOCK_SIZE_M: tl.constexpr, BLOCK_SIZE_N: tl.constexpr, BLOCK_SIZE_K: tl.constexpr,\n GROUP_SIZE: tl.constexpr\n):\n rz_linear_backward_input_grad_core(a_ptr=a_ptr, b_ptr=b_ptr, c_ptr=c_ptr,\n M=M, N=N, K=K, H=H,\n stride_am=stride_am, stride_an=stride_an,\n stride_cm=stride_cm, stride_ck=stride_ck,\n R3=R3, R2=R2, R1=R1, R0=R0,\n allow_tf32=allow_tf32,\n BLOCK_SIZE_M=BLOCK_SIZE_M, BLOCK_SIZE_N=BLOCK_SIZE_N, BLOCK_SIZE_K=BLOCK_SIZE_K,\n GROUP_SIZE=GROUP_SIZE)\n\n\[email protected]\ndef rz_linear_backward_input_grad_core(\n # Pointers to matrices\n a_ptr, b_ptr, c_ptr,\n # Matrix dimensions\n M, N, K, H,\n # The stride variables represent how much to increase the ptr by when moving by 1\n # element in a particular dimension.\n stride_am, stride_an,\n stride_cm, stride_ck,\n # Random numbers\n R3, R2, R1, R0,\n allow_tf32: tl.constexpr,\n # Meta-parameters\n BLOCK_SIZE_M: tl.constexpr, BLOCK_SIZE_N: tl.constexpr, BLOCK_SIZE_K: tl.constexpr,\n GROUP_SIZE: tl.constexpr\n):\n \"\"\"Kernel for computing the matmul C = (A x B^T)\n A has shape (M, N), B has shape H->(K, N) and C has shape (M, K)\n \"\"\"\n pid = tl.program_id(axis=0)\n num_pid_k = tl.cdiv(K, BLOCK_SIZE_K)\n num_pid_m = tl.cdiv(M, BLOCK_SIZE_M)\n pid_m = pid // num_pid_k\n pid_k = pid % num_pid_k\n\n # [BLOCK_SIZE_M, BLOCK_SIZE_N]\n offs_am = pid_m * BLOCK_SIZE_M + tl.arange(0, BLOCK_SIZE_M)\n offs_an = tl.arange(0, BLOCK_SIZE_N)\n a_ptrs = a_ptr + offs_am[:, None] * \\\n stride_am + offs_an[None, :] * stride_an\n\n # [BLOCK_SIZE_N, BLOCK_SIZE_K]\n # Compute hash\n b_offset = b_ptr + \\\n tl.arange(0, BLOCK_SIZE_N)[\n :, None] + tl.arange(0, BLOCK_SIZE_K)[None, :] * BLOCK_SIZE_N\n b_ptrs = b_offset + (pid_k * R3 + 0 * R2 +\n R1) % R0 % (H - BLOCK_SIZE_K * BLOCK_SIZE_N)\n\n # [BLOCK_SIZE_M, BLOCK_SIZE_K]\n c = tl.zeros((BLOCK_SIZE_M, BLOCK_SIZE_K), dtype=tl.float32)\n for n in range(0, N//BLOCK_SIZE_N):\n # Note that for simplicity, we don't apply a mask here.\n # This means that if N is not a multiple of BLOCK_SIZE_N,\n # this will access out-of-bounds memory and produce an\n # error or (worse!) incorrect results.\n # TODO(Keren): Add N checks\n a = tl.load(a_ptrs)\n b = tl.load(b_ptrs)\n # We accumulate along the N dimension\n c += tl.dot(a, b, allow_tf32=allow_tf32)\n # Advance the ptrs to the next N block\n a_ptrs += BLOCK_SIZE_N * stride_an\n b_ptrs = b_offset + (pid_k * R3 + (n + 1) * R2 +\n R1) % R0 % (H - BLOCK_SIZE_K * BLOCK_SIZE_N)\n\n # -----------------------------------------------------------\n # Write back the block of the output matrix C\n # [BLOCK_SIZE_M, BLOCK_SIZE_K]\n offs_ck = pid_k * BLOCK_SIZE_K + tl.arange(0, BLOCK_SIZE_K)\n offs_cm = pid_m * BLOCK_SIZE_M + tl.arange(0, BLOCK_SIZE_M)\n c_ptrs = c_ptr + stride_cm * \\\n offs_cm[:, None] + stride_ck * offs_ck[None, :]\n c_mask = (offs_cm[:, None] < M) & (offs_ck[None, :] < K)\n tl.store(c_ptrs, c, mask=c_mask)\n" ]
[ [ "torch.empty", "torch.zeros" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
cdgreenidge/gpytorch
[ "d4cc610963bd812052e43e3aed84fb8b2ec94aa6", "d4cc610963bd812052e43e3aed84fb8b2ec94aa6", "d4cc610963bd812052e43e3aed84fb8b2ec94aa6" ]
[ "test/lazy/test_added_diag_lazy_tensor.py", "test/lazy/test_toeplitz_lazy_tensor.py", "gpytorch/lazy/lazy_tensor.py" ]
[ "#!/usr/bin/env python3\n\nimport torch\nimport unittest\nfrom gpytorch.lazy import NonLazyTensor, DiagLazyTensor, AddedDiagLazyTensor\nfrom test.lazy._lazy_tensor_test_case import LazyTensorTestCase\n\n\nclass TestAddedDiagLazyTensor(LazyTensorTestCase, unittest.TestCase):\n seed = 0\n should_test_sample = True\n\n def create_lazy_tensor(self):\n tensor = torch.randn(5, 5)\n tensor = tensor.transpose(-1, -2).matmul(tensor)\n tensor.requires_grad_(True)\n diag = torch.tensor([1.0, 2.0, 4.0, 2.0, 3.0], requires_grad=True)\n return AddedDiagLazyTensor(NonLazyTensor(tensor), DiagLazyTensor(diag))\n\n def evaluate_lazy_tensor(self, lazy_tensor):\n diag = lazy_tensor._diag_tensor._diag\n tensor = lazy_tensor._lazy_tensor.tensor\n return tensor + diag.diag()\n\n\nclass TestAddedDiagLazyTensorBatch(LazyTensorTestCase, unittest.TestCase):\n seed = 4\n should_test_sample = True\n\n def create_lazy_tensor(self):\n tensor = torch.randn(3, 5, 5)\n tensor = tensor.transpose(-1, -2).matmul(tensor)\n tensor.requires_grad_(True)\n diag = torch.tensor(\n [[1.0, 2.0, 4.0, 2.0, 3.0], [2.0, 1.0, 2.0, 1.0, 4.0], [1.0, 2.0, 2.0, 3.0, 4.0]], requires_grad=True\n )\n return AddedDiagLazyTensor(NonLazyTensor(tensor), DiagLazyTensor(diag))\n\n def evaluate_lazy_tensor(self, lazy_tensor):\n diag = lazy_tensor._diag_tensor._diag\n tensor = lazy_tensor._lazy_tensor.tensor\n return tensor + torch.cat([diag[i].diag().unsqueeze(0) for i in range(3)])\n\n\nif __name__ == \"__main__\":\n unittest.main()\n", "#!/usr/bin/env python3\n\nimport torch\nimport unittest\nimport gpytorch.utils.toeplitz as toeplitz\nfrom gpytorch.lazy import ToeplitzLazyTensor\nfrom test.lazy._lazy_tensor_test_case import LazyTensorTestCase\n\n\nclass TestToeplitzLazyTensor(LazyTensorTestCase, unittest.TestCase):\n seed = 1\n\n def create_lazy_tensor(self):\n toeplitz_column = torch.tensor([4, 0.5, 0, 1], dtype=torch.float, requires_grad=True)\n return ToeplitzLazyTensor(toeplitz_column)\n\n def evaluate_lazy_tensor(self, lazy_tensor):\n return toeplitz.sym_toeplitz(lazy_tensor.column)\n\n\nclass TestToeplitzLazyTensorBatch(LazyTensorTestCase, unittest.TestCase):\n seed = 0\n\n def create_lazy_tensor(self):\n toeplitz_column = torch.tensor([[2, -1, 0.5, 0.25], [4, 0.5, 0, 1]], dtype=torch.float, requires_grad=True)\n return ToeplitzLazyTensor(toeplitz_column)\n\n def evaluate_lazy_tensor(self, lazy_tensor):\n return torch.cat(\n [\n toeplitz.sym_toeplitz(lazy_tensor.column[0]).unsqueeze(0),\n toeplitz.sym_toeplitz(lazy_tensor.column[1]).unsqueeze(0),\n ]\n )\n\n\nif __name__ == \"__main__\":\n unittest.main()\n", "#!/usr/bin/env python3\n\nimport math\nimport warnings\nimport gpytorch\nimport torch\n\nfrom .. import settings\nfrom ..functions._inv_matmul import InvMatmul\nfrom ..functions._inv_quad_log_det import InvQuadLogDet\nfrom ..functions._matmul import Matmul\nfrom ..functions._root_decomposition import RootDecomposition\nfrom ..utils import linear_cg\nfrom ..utils.broadcasting import _matmul_broadcast_shape\nfrom ..utils.deprecation import _deprecate_renamed_methods\nfrom ..utils.memoize import cached\nfrom ..utils.qr import batch_qr\nfrom ..utils.svd import batch_svd\nfrom .lazy_tensor_representation_tree import LazyTensorRepresentationTree\n\n\nclass LazyTensor(object):\n \"\"\"\n Base class for LazyTensors in GPyTorch.\n\n In GPyTorch, nearly all covariance matrices for Gaussian processes are handled internally as some variety of\n LazyTensor. A LazyTensor is an object that represents a tensor object, similar to :class:`torch.tensor`, but\n typically differs in two ways:\n\n #. A tensor represented by a LazyTensor can typically be represented more efficiently than storing a full matrix.\n For example, a LazyTensor representing :math:`K=XX^{\\\\top}` where :math:`K` is :math:`n \\\\times n` but\n :math:`X` is :math:`n \\\\times d` might store :math:`X` instead of :math:`K` directly.\n #. A LazyTensor typically defines a matmul routine that performs :math:`KM` that is more efficient than storing\n the full matrix. Using the above example, performing :math:`KM=X(X^{\\\\top}M)` requires only :math:`O(nd)` time,\n rather than the :math:`O(n^2)` time required if we were storing :math:`K` directly.\n\n In order to define a new LazyTensor class that can be used as a covariance matrix in GPyTorch, a user must define\n at a minimum the following methods (in each example, :math:`K` denotes the matrix that the LazyTensor represents)\n\n * :func:`~gpytorch.lazy.LazyTensor._get_indices`, which returns a Tensor where the entries are determined by\n LongTensors of indices.\n * :func:`~gpytorch.lazy.LazyTensor._matmul`, which performs a matrix multiplication :math:`KM`\n * :func:`~gpytorch.lazy.LazyTensor._quad_form_derivative`, which computes a quadratic form with the derivative,\n :math:`\\mathbf{v}^{\\\\top}\\\\frac{dK}{dR}\\mathbf{v}`, where :math:`R` denotes the actual tensors used to represent\n :math:`K`. In the linear kernel example, :math:`K=XX^{\\\\top}`, this would be :math:`\\\\frac{dK}{dX}`. If :math:`K`\n is a Toeplitz matrix (see :class:`gpytorch.lazy.ToeplitzLazyTensor`) represented by its first column\n :math:`\\mathbf{c}`, this would return :math:`\\mathbf{v}^{\\\\top}\\\\frac{dK}{d\\mathbf{c}}\\mathbf{v}`.\n * :func:`~gpytorch.lazy.LazyTensor._size`, which returns a :class:`torch.Size` containing the dimensions of\n :math:`K`.\n * :func:`~gpytorch.lazy.LazyTensor._transpose_nonbatch`, which returns a transposed version of the LazyTensor\n\n In addition to these, a LazyTensor may need to define the :func:`~gpytorch.lazy.LazyTensor._transpose_nonbatch`,\n :func:`~gpytorch.lazy.LazyTensor._get_indices`, and :func:`~gpytorch.lazy.LazyTensor._get_indices`\n functions in special cases. See the documentation for these methods for details.\n\n .. note::\n The base LazyTensor class provides default implementations of many other operations in order to mimic the\n behavior of a standard tensor as closely as possible. For example, we provide default implementations of\n :func:`~gpytorch.lazy.LazyTensor.__getitem__`, :func:`~gpytorch.lazy.LazyTensor.__add__`, etc that either\n make use of other lazy tensors or exploit the functions that **must** be defined above.\n\n While these implementations are provided for convenience, it is advisable in many cases to override them for the\n sake of efficiency.\n\n .. note::\n LazyTensors are designed by default to optionally represent batches of matrices. Thus, the size of a\n LazyTensor may be (for example) :math:`b \\times n \\times n`. Many of the methods are designed to efficiently\n operate on these batches if present.\n \"\"\"\n\n def _get_indices(self, left_indices, right_indices, *batch_indices):\n \"\"\"\n Returns entries of the matrix, indexed by batch, row, and column indices\n \"\"\"\n raise NotImplementedError(\"The class {} requires a _get_indices function!\".format(self.__class__.__name__))\n\n def _matmul(self, rhs):\n \"\"\"\n Performs a matrix multiplication :math:`KM` with the matrix :math:`K` that this LazyTensor represents. Should\n behave as :func:`torch.matmul`. If the LazyTensor represents a batch of matrices, this method should therefore\n operate in batch mode as well.\n\n ..note::\n This method is intended to be used only internally by various Functions that support backpropagation\n (e.g., :class:`gpytorch.functions.Matmul`). Once this method is defined, it is strongly recommended that\n one use :func:`~gpytorch.lazy.LazyTensor.matmul` instead, which makes use of this method properly.\n\n Args:\n rhs (:obj:`torch.tensor`): the matrix :math:`M` to multiply with.\n\n Returns:\n :obj:`torch.tensor`: matrix * rhs\n \"\"\"\n raise NotImplementedError(\"The class {} requires a _matmul function!\".format(self.__class__.__name__))\n\n def _probe_vectors_and_norms(self):\n return None, None\n\n def _solve(self, rhs, preconditioner, num_tridiag=None):\n return linear_cg(\n self._matmul,\n rhs,\n n_tridiag=num_tridiag,\n max_iter=settings.max_cg_iterations.value(),\n max_tridiag_iter=settings.max_lanczos_quadrature_iterations.value(),\n preconditioner=preconditioner\n )\n\n def _size(self):\n \"\"\"\n Returns the size of the resulting Tensor that the lazy tensor represents.\n\n ..note::\n This method is used internally by the related function :func:`~gpytorch.lazy.LazyTensor.size`,\n which does some additional work. Calling this method directly is discouraged.\n\n Returns:\n :obj:`torch.Size`: The size of the matrix :math:`K` represented by this LazyTensor\n \"\"\"\n raise NotImplementedError(\"The class {} requires a _size function!\".format(self.__class__.__name__))\n\n def _transpose_nonbatch(self):\n \"\"\"\n Transposes non-batch dimensions (e.g. last two)\n Implement this method, rather than transpose() or t().\n\n ..note::\n This method is used internally by the related function :func:`~gpytorch.lazy.LazyTensor.transpose`, which\n does some additional work. Calling this method directly is discouraged.\n \"\"\"\n raise NotImplementedError(\n \"The class {} requires a _transpose_nonbatch function!\".format(self.__class__.__name__)\n )\n\n def __init__(self, *args, **kwargs):\n self._args = args\n self._kwargs = kwargs\n\n @property\n def _args(self):\n return self._args_memo\n\n @_args.setter\n def _args(self, args):\n self._args_memo = args\n\n def _approx_diag(self):\n \"\"\"\n (Optional) returns an (approximate) diagonal of the matrix\n\n Sometimes computing an exact diagonal is a bit computationally slow\n When we don't need an exact diagonal (e.g. for the pivoted cholesky\n decomposition, this function is called\n\n Defaults to calling the exact diagonal function\n\n Returns:\n tensor: - the diagonal (or batch of diagonals)\n \"\"\"\n return self.diag()\n\n def _getitem(self, *indices):\n \"\"\"\n Supports subindexing of the matrix this LazyTensor represents. This may return either another\n :obj:`gpytorch.lazy.LazyTensor` or a :obj:`torch.tensor` depending on the exact implementation.\n\n ..note::\n LazyTensor.__getitem__ uses this as a helper method. If you are writing your own custom LazyTensor,\n override this method rather than __getitem__ (so that you don't have to repeat the extra work)\n\n ..note::\n This method is used internally by the related function :func:`~gpytorch.lazy.LazyTensor.__getitem__`,\n which does some additional work. Calling this method directly is discouraged.\n\n Args:\n :attr:`indices` (tuple of `int`s, `slice`s, or `LongTensor`s):\n A collection of indices for each of the dimensions. There will be exactly one index per dimension.\n \"\"\"\n if settings.debug.on():\n if len(indices) != self.dim():\n raise RuntimeError(\n \"{}._getitem() called with {} indices - expected {}. \"\n \"This is potentially a bug in GPyTorch.\".format(self.__class__.__name__, len(indices), self.dim())\n )\n\n components = list(self._args)\n indices = list(indices)\n\n # Normal case if we're indexing the LT with ints or slices\n # Also squeeze dimensions if we're indexing with tensors\n squeeze_left = False\n squeeze_right = False\n if isinstance(indices[-2], int):\n indices[-2] = slice(indices[-2], indices[-2] + 1, None)\n squeeze_left = True\n elif torch.is_tensor(indices[-2]):\n squeeze_left = True\n if isinstance(indices[-1], int):\n indices[-1] = slice(indices[-1], indices[-1] + 1, None)\n squeeze_right = True\n elif torch.is_tensor(indices[-1]):\n squeeze_right = True\n\n # Handle batch dimensions\n isbatch = self.dim() >= 3\n first_tensor_index_dim = None\n if isbatch:\n batch_index = tuple(indices[:-2])\n for i, item in enumerate(components):\n components[i] = item[batch_index]\n\n for i, idx in enumerate(batch_index):\n if torch.is_tensor(idx):\n first_tensor_index_dim = i\n break\n\n new_lazy_tensor = self.__class__(*components, **self._kwargs)\n\n # Handle index\n left_index = indices[-2]\n right_index = indices[-1]\n\n # Special case: if both row and col are not indexed, then we are done\n if (\n not torch.is_tensor(left_index)\n and left_index == slice(None, None, None)\n and not torch.is_tensor(right_index)\n and right_index == slice(None, None, None)\n ):\n return new_lazy_tensor\n\n # Special case: if both row and col are tensor indexed, then we use _get_indices\n if torch.is_tensor(left_index) and torch.is_tensor(right_index):\n if left_index.numel() != right_index.numel():\n raise RuntimeError(\n \"Expected the tensor indices to be the same size: got {} and {}\".format(\n left_index.numel(), right_index.numel()\n )\n )\n\n if new_lazy_tensor.ndimension() == 2:\n return new_lazy_tensor._get_indices(left_index, right_index)\n\n else:\n batch_index = torch.arange(0, new_lazy_tensor.size(0), dtype=torch.long, device=self.device)\n if first_tensor_index_dim is not None:\n if batch_index.numel() != left_index.numel():\n raise RuntimeError(\n \"Expected the tensor indices to be the same size: got {}, {} and {}\".format(\n batch_index.numel(), left_index.numel(), right_index.numel()\n )\n )\n return new_lazy_tensor._get_indices(left_index, right_index, batch_index)\n else:\n batch_size = batch_index.numel()\n row_col_size = left_index.numel()\n batch_index = batch_index.unsqueeze(1).repeat(1, row_col_size).view(-1)\n left_index = left_index.unsqueeze(1).repeat(batch_size, 1).view(-1)\n right_index = right_index.unsqueeze(1).repeat(batch_size, 1).view(-1)\n res = new_lazy_tensor._get_indices(left_index, right_index, batch_index)\n return res.view(batch_size, row_col_size)\n\n # Normal case: we have to do some processing on eithe rthe rows or columns\n res = new_lazy_tensor._getitem_nonbatch(left_index, right_index, first_tensor_index_dim)\n if (squeeze_left or squeeze_right) and isinstance(res, LazyTensor):\n res = res.evaluate()\n if squeeze_left:\n res = res.squeeze(-2)\n if squeeze_right:\n res = res.squeeze(-1)\n\n return res\n\n def _getitem_nonbatch(self, row_index, col_index, first_tensor_index_dim=None):\n \"\"\"\n Given an index over rows and columns, gets those items from the LazyTensor.\n Implementing this is not necessary, but it improves performance\n\n Args:\n row_index (slice or LongTensor): index over rows\n col_index (slice or LongTensor): index over columns\n first_tensor_index_dim (int or None): first batch dim to have a tensor index (default: None)\n\n Returns:\n LazyTensor\n \"\"\"\n from .interpolated_lazy_tensor import InterpolatedLazyTensor\n\n ndimension = self.ndimension()\n batch_sizes = list(self.size()[:-2])\n\n left_row_iter = torch.arange(0, self.size()[-2], dtype=torch.long, device=self.device)\n right_row_iter = torch.arange(0, self.size()[-1], dtype=torch.long, device=self.device)\n left_interp_indices = left_row_iter[row_index].unsqueeze(-1)\n right_interp_indices = right_row_iter[col_index].unsqueeze(-1)\n\n left_interp_len = len(left_interp_indices)\n right_interp_len = len(right_interp_indices)\n for _ in range(ndimension - 2):\n left_interp_indices.unsqueeze_(0)\n right_interp_indices.unsqueeze_(0)\n\n if first_tensor_index_dim is not None and torch.is_tensor(row_index):\n view_size = [1] * ndimension\n view_size[first_tensor_index_dim] = left_interp_indices.numel()\n left_interp_indices = left_interp_indices.view(*view_size).expand(*(batch_sizes + [1, 1]))\n else:\n left_interp_indices = left_interp_indices.expand(*(batch_sizes + [left_interp_len, 1]))\n left_interp_values = torch.ones(left_interp_indices.size(), dtype=self.dtype, device=self.device)\n if first_tensor_index_dim is not None and torch.is_tensor(col_index):\n view_size = [1] * ndimension\n view_size[first_tensor_index_dim] = right_interp_indices.numel()\n right_interp_indices = right_interp_indices.view(*view_size).expand(*(batch_sizes + [1, 1]))\n else:\n right_interp_indices = right_interp_indices.expand(*(batch_sizes + [right_interp_len, 1]))\n right_interp_values = torch.ones(right_interp_indices.size(), dtype=self.dtype, device=self.device)\n\n res = InterpolatedLazyTensor(\n self, left_interp_indices, left_interp_values, right_interp_indices, right_interp_values\n )\n return res\n\n def _inv_matmul_preconditioner(self):\n \"\"\"\n (Optional) define a preconditioner that can be used for linear systems, but not necessarily\n for log determinants. By default, this can call :meth:`~gpytorch.lazy.LazyTensor._preconditioner`.\n\n Returns:\n function: a function on x which performs P^{-1}(x)\n \"\"\"\n base_precond, _ = self._preconditioner()\n\n if base_precond is not None:\n return base_precond\n elif gpytorch.beta_features.default_preconditioner.on():\n if hasattr(self, \"_default_preconditioner_cache\"):\n U, S, V = self._default_preconditioner_cache\n else:\n precond_basis_size = min(gpytorch.settings.max_preconditioner_size.value(), self.size(-1))\n random_basis = torch.randn(\n self.batch_shape + torch.Size((self.size(-2), precond_basis_size)),\n device=self.device,\n dtype=self.dtype,\n )\n projected_mat = self._matmul(random_basis)\n proj_q = batch_qr(projected_mat)\n orthog_projected_mat = self._matmul(proj_q).transpose(-2, -1)\n U, S, V = batch_svd(orthog_projected_mat)\n U = proj_q.matmul(U)\n\n self._default_preconditioner_cache = (U, S, V)\n\n def preconditioner(v):\n res = V.transpose(-2, -1).matmul(v)\n res = (1 / S).unsqueeze(-1) * res\n res = U.matmul(res)\n return res\n\n return preconditioner\n else:\n return None\n\n def _quad_form_derivative(self, left_vecs, right_vecs):\n \"\"\"\n Given u (left_vecs) and v (right_vecs),\n Computes the derivatives of (u^t K v) w.r.t. K\n\n ..note::\n This method is intended to be used only internally by various Functions that support backpropagation.\n For example, this method is used internally by :func:`~gpytorch.lazy.LazyTensor.inv_quad_logdet`. It is\n not likely that users will need to call this method directly.\n\n Returns:\n :obj:`torch.tensor`: derivative with respect to the arguments that are actually used to represent this\n this LazyTensor.\n \"\"\"\n from collections import deque\n\n args = tuple(self.representation())\n args_with_grads = tuple(arg for arg in args if arg.requires_grad)\n\n # Easy case: if we don't require any gradients, then just return!\n if not len(args_with_grads):\n return tuple(None for _ in args)\n\n # Normal case: we'll use the autograd to get us a derivative\n with torch.autograd.enable_grad():\n loss = (left_vecs * self._matmul(right_vecs)).sum()\n loss.requires_grad_(True)\n actual_grads = deque(torch.autograd.grad(loss, args_with_grads, allow_unused=True))\n\n # Now make sure that the object we return has one entry for every item in args\n grads = []\n for arg in args:\n if arg.requires_grad:\n grads.append(actual_grads.popleft())\n else:\n grads.append(None)\n\n return grads\n\n def _preconditioner(self):\n \"\"\"\n (Optional) define a preconditioner (P) for linear conjugate gradients\n\n Returns:\n function: a function on x which performs P^{-1}(x)\n scalar: the log determinant of P\n \"\"\"\n return None, None\n\n def _t_matmul(self, rhs):\n \"\"\"\n Performs a transpose matrix multiplication :math:`K^{\\\\top}M` with the matrix :math:`K` that this\n LazyTensor represents.\n\n Args:\n rhs (:obj:`torch.tensor`): the matrix :math:`M` to multiply with.\n\n Returns:\n :obj:`torch.tensor`: matrix * rhs\n \"\"\"\n return self.transpose(-1, -2)._matmul(rhs)\n\n def add_diag(self, diag):\n \"\"\"\n Adds an element to the diagonal of the matrix.\n\n Args:\n - diag (Scalar Tensor)\n \"\"\"\n from .diag_lazy_tensor import DiagLazyTensor\n from .added_diag_lazy_tensor import AddedDiagLazyTensor\n\n if self.size(-1) != self.size(-2):\n raise RuntimeError(\"add_diag only defined for square matrices\")\n\n # Expand things the correct way\n if self.ndimension() == 3:\n if diag.dim() == 0:\n diag = diag.view(1, 1).expand(self.size(0), self.size(1))\n elif diag.dim() == 1:\n diag = diag.unsqueeze(0).expand(self.size(0), self.size(1))\n elif diag.ndimension() == 2:\n diag = diag.expand(self.size(0), self.size(1))\n else:\n raise RuntimeError(\n \"For a 3D tensor ({}), add_diag expects a 1D or 2D diag. \"\n \"Got size ({})\".format(self.size(), diag.size())\n )\n else:\n if diag.dim() == 0:\n diag = diag.view(1).expand(self.size(0))\n elif diag.dim() == 1:\n diag = diag.expand(self.size(0))\n else:\n raise RuntimeError(\n \"For a 2D tensor ({}), add_diag expects a 0D or 1D diag. \"\n \"Got size ({})\".format(self.size(), diag.size())\n )\n\n diag_lazy_tsr = DiagLazyTensor(diag)\n return AddedDiagLazyTensor(self, diag_lazy_tsr)\n\n def add_jitter(self, jitter_val=1e-3):\n \"\"\"\n Adds jitter (i.e., a small diagonal component) to the matrix this\n LazyTensor represents. This could potentially be implemented as a no-op,\n however this could lead to numerical instabilities, so this should only\n be done at the user's risk.\n \"\"\"\n diag = torch.tensor(jitter_val, dtype=self.dtype, device=self.device)\n return self.add_diag(diag)\n\n @property\n def batch_dim(self):\n \"\"\"\n Returns the dimension of the shape over which the tensor is batched.\n \"\"\"\n return len(self.batch_shape)\n\n @property\n def batch_shape(self):\n \"\"\"\n Returns the shape over which the tensor is batched.\n \"\"\"\n return self.shape[:-2]\n\n def clone(self):\n \"\"\"\n Clones the LazyTensor (creates clones of all underlying tensors)\n \"\"\"\n args = [arg.clone() if hasattr(arg, \"clone\") else arg for arg in self._args]\n kwargs = {key: val.clone() if hasattr(val, \"clone\") else val for key, val in self._kwargs.items()}\n return self.__class__(*args, **kwargs)\n\n def cpu(self):\n \"\"\"\n Returns:\n :obj:`~gpytorch.lazy.LazyTensor`: a new LazyTensor identical to ``self``, but on the CPU.\n \"\"\"\n new_args = []\n new_kwargs = {}\n for arg in self._args:\n if hasattr(arg, \"cpu\"):\n new_args.append(arg.cpu())\n else:\n new_args.append(arg)\n for name, val in self._kwargs.items():\n if hasattr(val, \"cpu\"):\n new_kwargs[name] = val.cpu()\n else:\n new_kwargs[name] = val\n return self.__class__(*new_args, **new_kwargs)\n\n def cuda(self, device_id=None):\n \"\"\"\n This method operates identically to :func:`torch.nn.Module.cuda`.\n\n Args:\n device_id (:obj:`str`, optional):\n Device ID of GPU to use.\n Returns:\n :obj:`~gpytorch.lazy.LazyTensor`:\n a new LazyTensor identical to ``self``, but on the GPU.\n \"\"\"\n new_args = []\n new_kwargs = {}\n for arg in self._args:\n if hasattr(arg, \"cuda\"):\n new_args.append(arg.cuda(device_id))\n else:\n new_args.append(arg)\n for name, val in self._kwargs.items():\n if hasattr(val, \"cuda\"):\n new_kwargs[name] = val.cuda(device_id)\n else:\n new_kwargs[name] = val\n return self.__class__(*new_args, **new_kwargs)\n\n @property\n def device(self):\n return self._args[0].device\n\n def detach(self):\n \"\"\"\n Removes the LazyTensor from the current computation graph.\n (In practice, this function removes all Tensors that make up the\n LazyTensor from the computation graph.)\n \"\"\"\n return self.clone().detach_()\n\n def detach_(self):\n \"\"\"\n An in-place version of `detach`.\n \"\"\"\n for arg in self._args:\n if hasattr(arg, \"detach\"):\n arg.detach_()\n for val in self._kwargs.values():\n if hasattr(val, \"detach\"):\n val.detach_()\n return self\n\n def diag(self):\n \"\"\"\n As :func:`torch.diag`, returns the diagonal of the matrix :math:`K` this LazyTensor represents as a vector.\n\n Returns:\n :obj:`torch.tensor`: The diagonal of :math:`K`. If :math:`K` is :math:`n \\times n`, this will be a length\n n vector. If this LazyTensor represents a batch (e.g., is :math:`b \\times n \\times n`), this will be a\n :math:`b \\times n` matrix of diagonals, one for each matrix in the batch.\n \"\"\"\n size = self.size()\n if size[-1] != size[-2]:\n raise RuntimeError(\"Diag works on square matrices (or batches)\")\n\n row_col_iter = torch.arange(0, size[-1], dtype=torch.long, device=self.device)\n if self.ndimension() == 3:\n batch_iter = torch.arange(0, size[0], dtype=torch.long, device=self.device)\n batch_iter = batch_iter.unsqueeze(1).repeat(1, size[1]).view(-1)\n row_col_iter = row_col_iter.unsqueeze(1).repeat(size[0], 1).view(-1)\n return self._get_indices(row_col_iter, row_col_iter, batch_iter).view(size[0], size[1])\n else:\n return self._get_indices(row_col_iter, row_col_iter)\n\n def dim(self):\n \"\"\"\n Alias of :meth:`~gpytorch.lazy.LazyTensor.ndimension`\n \"\"\"\n return self.ndimension()\n\n @property\n def dtype(self):\n return self._args[0].dtype\n\n def expand(self, *sizes):\n if len(sizes) == 1 and hasattr(sizes, \"__iter__\"):\n shape = sizes[0]\n elif all(isinstance(size, int) for size in sizes):\n shape = torch.Size(sizes)\n else:\n raise RuntimeError(\"Invalid arguments {} to expand.\".format(sizes))\n\n current_shape = torch.Size([1 for _ in range(len(shape) - self.dim())] + list(self.shape))\n repeat_shape = torch.Size(\n [expand_size // current_size for expand_size, current_size in zip(shape, current_shape)]\n )\n return self.repeat(*repeat_shape)\n\n @cached\n def evaluate(self):\n \"\"\"\n Explicitly evaluates the matrix this LazyTensor represents. This function\n should return a Tensor storing an exact representation of this LazyTensor.\n \"\"\"\n num_rows, num_cols = self.matrix_shape\n\n if num_rows < num_cols:\n eye = torch.eye(num_rows, dtype=self.dtype, device=self.device)\n eye = eye.expand(*self.batch_shape, num_rows, num_rows)\n return self.transpose(-1, -2).matmul(eye).transpose(-1, -2).contiguous()\n else:\n eye = torch.eye(num_cols, dtype=self.dtype, device=self.device)\n eye = eye.expand(*self.batch_shape, num_cols, num_cols)\n return self.matmul(eye)\n\n def evaluate_kernel(self):\n \"\"\"\n Return a new LazyTensor representing the same one as this one, but with\n all lazily evaluated kernels actually evaluated.\n \"\"\"\n return self.representation_tree()(*self.representation())\n\n def inv_matmul(self, right_tensor, left_tensor=None):\n \"\"\"\n Computes a linear solve (w.r.t self = :math:`A`) with several right hand sides :math:`R`.\n I.e. computes\n\n ... math::\n\n \\begin{equation}\n A^{-1} R,\n \\end{equation}\n\n where :math:`R` is :attr:`right_tensor` and :math:`A` is the LazyTensor.\n\n If :attr:`left_tensor` is supplied, computes\n\n ... math::\n\n \\begin{equation}\n L A^{-1} R,\n \\end{equation}\n\n where :math:`L` is :attr:`left_tensor`. Supplying this can reduce the number of\n CG calls required.\n\n Args:\n - :obj:`torch.tensor` (n x k) - Matrix :math:`R` right hand sides\n - :obj:`torch.tensor` (m x n) - Optional matrix :math:`L` to perform left multiplication with\n\n Returns:\n - :obj:`torch.tensor` - :math:`A^{-1}R` or :math:`LA^{-1}R`.\n \"\"\"\n if not self.is_square:\n raise RuntimeError(\n \"inv_matmul only operates on (batches of) square (positive semi-definite) LazyTensors. \"\n \"Got a {} of size {}.\".format(self.__class__.__name__, self.size())\n )\n\n if self.dim() == 2 and right_tensor.dim() == 1:\n if self.shape[-1] != right_tensor.numel():\n raise RuntimeError(\n \"LazyTensor (size={}) cannot be multiplied with right-hand-side Tensor (size={}).\".format(\n self.shape, right_tensor.shape\n )\n )\n\n func = InvMatmul(\n self.representation_tree(), preconditioner=self._inv_matmul_preconditioner(),\n has_left=(left_tensor is not None)\n )\n if left_tensor is None:\n return func(right_tensor, *self.representation())\n else:\n return func(left_tensor, right_tensor, *self.representation())\n\n def inv_quad(self, tensor, reduce_inv_quad=True):\n \"\"\"\n Computes an inverse quadratic form (w.r.t self) with several right hand sides.\n I.e. computes tr( tensor^T self^{-1} tensor )\n\n NOTE: Don't overwrite this function!\n Instead, overwrite inv_quad_logdet\n\n Args:\n - tensor (tensor nxk) - Vector (or matrix) for inverse quad\n\n Returns:\n - tensor - tr( tensor^T (self)^{-1} tensor )\n \"\"\"\n res, _ = self.inv_quad_logdet(inv_quad_rhs=tensor, logdet=False, reduce_inv_quad=reduce_inv_quad)\n return res\n\n def inv_quad_logdet(self, inv_quad_rhs=None, logdet=False, reduce_inv_quad=True):\n \"\"\"\n Computes an inverse quadratic form (w.r.t self) with several right hand sides.\n I.e. computes tr( tensor^T self^{-1} tensor )\n In addition, computes an (approximate) log determinant of the the matrix\n\n Args:\n - tensor (tensor nxk) - Vector (or matrix) for inverse quad\n\n Returns:\n - scalar - tr( tensor^T (self)^{-1} tensor )\n - scalar - log determinant\n \"\"\"\n if not self.is_square:\n raise RuntimeError(\n \"inv_quad_logdet only operates on (batches of) square (positive semi-definite) LazyTensors. \"\n \"Got a {} of size {}.\".format(self.__class__.__name__, self.size())\n )\n\n if inv_quad_rhs is not None:\n if self.dim() == 2 and inv_quad_rhs.dim() == 1:\n if self.shape[-1] != inv_quad_rhs.numel():\n raise RuntimeError(\n \"LazyTensor (size={}) cannot be multiplied with right-hand-side Tensor (size={}).\".format(\n self.shape, inv_quad_rhs.shape\n )\n )\n elif self.dim() != inv_quad_rhs.dim():\n raise RuntimeError(\n \"LazyTensor (size={}) and right-hand-side Tensor (size={}) should have the same number \"\n \"of dimensions.\".format(self.shape, inv_quad_rhs.shape)\n )\n elif self.batch_shape != inv_quad_rhs.shape[:-2] or self.shape[-1] != inv_quad_rhs.shape[-2]:\n raise RuntimeError(\n \"LazyTensor (size={}) cannot be multiplied with right-hand-side Tensor (size={}).\".format(\n self.shape, inv_quad_rhs.shape\n )\n )\n\n args = self.representation()\n if inv_quad_rhs is not None:\n args = [inv_quad_rhs] + list(args)\n\n probe_vectors, probe_vector_norms = self._probe_vectors_and_norms()\n inv_quad_term, logdet_term = InvQuadLogDet(\n representation_tree=self.representation_tree(),\n matrix_shape=self.matrix_shape,\n batch_shape=self.batch_shape,\n dtype=self.dtype,\n device=self.device,\n inv_quad=(inv_quad_rhs is not None),\n logdet=logdet,\n preconditioner=self._preconditioner()[0],\n logdet_correction=self._preconditioner()[1],\n probe_vectors=probe_vectors,\n probe_vector_norms=probe_vector_norms,\n )(*args)\n\n if inv_quad_term.numel() and reduce_inv_quad:\n inv_quad_term = inv_quad_term.sum(-1)\n return inv_quad_term, logdet_term\n\n @property\n def is_square(self):\n return self.matrix_shape[0] == self.matrix_shape[1]\n\n def logdet(self):\n \"\"\"\n Computes an (approximate) log determinant of the matrix\n\n NOTE: Don't overwrite this function!\n Instead, overwrite inv_quad_logdet\n\n Returns:\n - scalar: log determinant\n \"\"\"\n _, res = self.inv_quad_logdet(inv_quad_rhs=None, logdet=True)\n return res\n\n def matmul(self, other):\n \"\"\"\n Multiplies self by a matrix\n\n Args:\n other (:obj:`torch.tensor`): Matrix or vector to multiply with. Can be either a :obj:`torch.tensor`\n or a :obj:`gpytorch.lazy.LazyTensor`.\n\n Returns:\n :obj:`torch.tensor`: Tensor or LazyTensor containing the result of the matrix multiplication :math:`KM`,\n where :math:`K` is the (batched) matrix that this :obj:`gpytorch.lazy.LazyTensor` represents, and :math:`M`\n is the (batched) matrix input to this method.\n \"\"\"\n # TODO: Move this check to MatmulLazyTensor and Matmul (so we can pass the shapes through from there)\n _matmul_broadcast_shape(self.shape, other.shape)\n\n if isinstance(other, LazyTensor):\n from .matmul_lazy_tensor import MatmulLazyTensor\n\n return MatmulLazyTensor(self, other)\n\n func = Matmul(self.representation_tree())\n return func(other, *self.representation())\n\n @property\n def matrix_shape(self):\n \"\"\"\n Returns the shape of the matrix being represented (without batching).\n \"\"\"\n return torch.Size(self.shape[-2:])\n\n def mul(self, other):\n \"\"\"\n Multiplies the matrix by a constant, or elementwise the matrix by another matrix\n\n Args:\n other (:obj:`torch.tensor` or :obj:`~gpytorch.lazy.LazyTensor`): constant or matrix to elementwise\n multiply by.\n\n Returns:\n :obj:`gpytorch.lazy.LazyTensor`: Another lazy tensor representing the result of the multiplication. if\n other was a constant (or batch of constants), this will likely be a\n :obj:`gpytorch.lazy.ConstantMulLazyTensor`. If other was\n another matrix, this will likely be a :obj:`gpytorch.lazy.MulLazyTensor`.\n \"\"\"\n if not (torch.is_tensor(other) or isinstance(other, LazyTensor)) or (\n torch.is_tensor(other) and (other.numel() == 1 or (self.dim() == 3 and other.numel() == self.size(0)))\n ):\n from .constant_mul_lazy_tensor import ConstantMulLazyTensor\n\n return ConstantMulLazyTensor(self, other)\n\n elif other.size() == self.size():\n from .mul_lazy_tensor import MulLazyTensor\n\n return MulLazyTensor(self, other).evaluate_kernel()\n\n else:\n raise RuntimeError(\n '\"other\" must be a constant (or batch of constants), or the same size as self.\\n'\n \"Expected: size of [1] or [%d] or %s.\\n\"\n \"Got: size of %s\"\n % (self.size(0) if self.ndimension() == 3 else 1, repr(self.size()), repr(other.size()))\n )\n\n def mul_batch(self, mul_batch_size=None):\n \"\"\"\n For a `b x n x m` LazyTensor, compute the product over the batch dimension.\n\n The `mul_batch_size` controls whether or not the batch dimension is grouped when multiplying.\n * `mul_batch_size=None` (default): The entire batch dimension is multiplied. Returns a `n x n` LazyTensor.\n * `mul_batch_size=k`: Creates `b/k` groups, and muls the `k` entries of this group.\n (The LazyTensor is reshaped as a `b/k x k x n x m` LazyTensor and the `k` dimension is multiplied over.\n Returns a `b/k x n x m` LazyTensor.\n\n Args:\n :attr:`mul_batch_size` (int or None):\n Controls the number of groups that are multiplied over (default: None).\n\n Returns:\n :obj:`~gpytorch.lazy.LazyTensor`\n\n Example:\n >>> lazy_tensor = gpytorch.lazy.NonLazyTensor(torch.tensor([\n [[2, 4], [1, 2]],\n [[1, 1], [0, -1]],\n [[2, 1], [1, 0]],\n [[3, 2], [2, -1]],\n ]))\n >>> lazy_tensor.mul_batch().evaluate()\n >>> # Returns: torch.Tensor([[12, 8], [0, 0]])\n >>> lazy_tensor.mul_batch(mul_batch_size=2)\n >>> # Returns: torch.Tensor([[[2, 4], [0, -2]], [[6, 2], [2, 0]]])\n \"\"\"\n from .mul_lazy_tensor import MulLazyTensor\n from .root_lazy_tensor import RootLazyTensor\n\n if self.ndimension() < 3:\n raise RuntimeError(\"mul_batch only works with batched lazy tensors\")\n if self.size(0) == 1:\n return self.sum_batch()\n\n roots = self.root_decomposition().root.evaluate()\n n_batch = roots.size(0) if mul_batch_size is None else mul_batch_size\n true_batch_size = roots.size(0) // mul_batch_size if mul_batch_size is not None else 1\n\n while True:\n roots = roots.view(true_batch_size, n_batch, roots.size(1), roots.size(2))\n\n # Take care of extra roots (odd roots), if they exist\n if n_batch % 2:\n extra_root = (\n torch.randn(roots.size(0), 1, roots.size(2), roots.size(3), dtype=roots.dtype, device=roots.device)\n .mul_(1e-6 / math.sqrt(roots.size(3)))\n .add_(1.0 / math.sqrt(roots.size(3)))\n )\n roots = torch.cat([roots, extra_root], 1)\n n_batch += 1\n\n # Divide and conqour\n # Assumes that there's an even number of roots\n part1 = roots[:, : n_batch // 2]\n part1 = part1.contiguous().view(-1, roots.size(2), roots.size(3))\n part2 = roots[:, n_batch // 2 : 2 * (n_batch // 2)]\n part2 = part2.contiguous().view(-1, roots.size(2), roots.size(3))\n\n if n_batch // 2 == 1:\n if mul_batch_size is None:\n part1 = part1.squeeze(0)\n part2 = part2.squeeze(0)\n res = MulLazyTensor(RootLazyTensor(part1), RootLazyTensor(part2)).evaluate_kernel()\n break\n else:\n res = MulLazyTensor(RootLazyTensor(part1), RootLazyTensor(part2)).evaluate_kernel()\n roots = res.root_decomposition().root.evaluate()\n n_batch = n_batch // 2\n\n return res\n\n def ndimension(self):\n \"\"\"\n Returns the number of dimensions\n \"\"\"\n return len(self.size())\n\n def numel(self):\n \"\"\"\n Returns the number of elements\n \"\"\"\n return self.shape.numel()\n\n def repeat(self, *sizes):\n \"\"\"\n Repeats this tensor along the specified dimensions.\n\n Currently, this only works to create repeated batches of a 2D LazyTensor.\n I.e. all calls should be `lazy_tensor.repeat(<size>, 1, 1)`.\n\n Example:\n >>> lazy_tensor = gpytorch.lazy.ToeplitzLazyTensor(torch.tensor([4. 1., 0.5]))\n >>> lazy_tensor.repeat(2, 1, 1).evaluate()\n tensor([[[4.0000, 1.0000, 0.5000],\n [1.0000, 4.0000, 1.0000],\n [0.5000, 1.0000, 4.0000]],\n [[4.0000, 1.0000, 0.5000],\n [1.0000, 4.0000, 1.0000],\n [0.5000, 1.0000, 4.0000]]])\n \"\"\"\n if len(sizes) < 3 or tuple(sizes[-2:]) != (1, 1):\n raise RuntimeError(\n \"Invalid repeat arguments {}. Currently, repeat only works to create repeated \"\n \"batches of a 2D LazyTensor.\".format(tuple(sizes))\n )\n\n from .batch_repeat_lazy_tensor import BatchRepeatLazyTensor\n\n return BatchRepeatLazyTensor(self, batch_repeat=torch.Size(sizes[:-2]))\n\n def representation(self):\n \"\"\"\n Returns the Tensors that are used to define the LazyTensor\n \"\"\"\n representation = []\n for arg in self._args:\n if torch.is_tensor(arg):\n representation.append(arg)\n elif isinstance(arg, LazyTensor):\n representation += list(arg.representation())\n else:\n raise RuntimeError(\"Representation of a LazyTensor should consist only of Tensors\")\n return tuple(representation)\n\n def representation_tree(self):\n \"\"\"\n Returns a :obj:`gpytorch.lazy.LazyTensorRepresentationTree` tree object that recursively encodes the\n representation of this lazy tensor. In particular, if the definition of this lazy tensor depends on other\n lazy tensors, the tree is an object that can be used to reconstruct the full structure of this lazy tensor,\n including all subobjects. This is used internally.\n \"\"\"\n return LazyTensorRepresentationTree(self)\n\n @property\n def requires_grad(self):\n return any(arg.requires_grad for arg in tuple(self._args) + tuple(self._kwargs.values()))\n\n @requires_grad.setter\n def requires_grad(self, val):\n for arg in self._args:\n if hasattr(arg, \"requires_grad\"):\n if arg.dtype in (torch.float, torch.double, torch.half):\n arg.requires_grad = val\n for arg in self._kwargs.values():\n if hasattr(arg, \"requires_grad\"):\n arg.requires_grad = val\n\n def requires_grad_(self, val):\n \"\"\"\n Sets `requires_grad=val` on all the Tensors that make up the LazyTensor\n This is an inplace operation.\n \"\"\"\n self.requires_grad = val\n return self\n\n @cached(name=\"root_decomposition\")\n def root_decomposition(self):\n \"\"\"\n Returns a (usually low-rank) root decomposotion lazy tensor of a PSD matrix.\n This can be used for sampling from a Gaussian distribution, or for obtaining a\n low-rank version of a matrix\n \"\"\"\n from .root_lazy_tensor import RootLazyTensor\n if not self.is_square:\n raise RuntimeError(\n \"root_decomposition only operates on (batches of) square (symmetric) LazyTensors. \"\n \"Got a {} of size {}.\".format(self.__class__.__name__, self.size())\n )\n\n if (self.matrix_shape.numel() <= settings.max_cholesky_numel.value()\n or settings.fast_computations.covar_root_decomposition.off()):\n try:\n res = torch.cholesky(self.evaluate())\n return RootLazyTensor(res)\n except RuntimeError as e:\n warnings.warn(\n \"Runtime Error when computing Cholesky decomposition: {}. Using RootDecomposition.\".format(e)\n )\n\n res, _ = RootDecomposition(\n self.representation_tree(),\n max_iter=self.root_decomposition_size(),\n dtype=self.dtype,\n device=self.device,\n batch_shape=self.batch_shape,\n matrix_shape=self.matrix_shape,\n )(*self.representation())\n\n return RootLazyTensor(res)\n\n @cached\n def root_inv_decomposition(self, initial_vectors=None, test_vectors=None):\n \"\"\"\n Returns a (usually low-rank) root decomposotion lazy tensor of a PSD matrix.\n This can be used for sampling from a Gaussian distribution, or for obtaining a\n low-rank version of a matrix\n \"\"\"\n from .root_lazy_tensor import RootLazyTensor\n\n if not self.is_square:\n raise RuntimeError(\n \"root_inv_decomposition only operates on (batches of) square (symmetric) LazyTensors. \"\n \"Got a {} of size {}.\".format(self.__class__.__name__, self.size())\n )\n\n if initial_vectors is not None:\n if self.dim() == 2 and initial_vectors.dim() == 1:\n if self.shape[-1] != initial_vectors.numel():\n raise RuntimeError(\n \"LazyTensor (size={}) cannot be multiplied with initial_vectors (size={}).\".format(\n self.shape, initial_vectors.shape\n )\n )\n elif self.dim() != initial_vectors.dim():\n raise RuntimeError(\n \"LazyTensor (size={}) and initial_vectors (size={}) should have the same number \"\n \"of dimensions.\".format(self.shape, initial_vectors.shape)\n )\n elif self.batch_shape != initial_vectors.shape[:-2] or self.shape[-1] != initial_vectors.shape[-2]:\n raise RuntimeError(\n \"LazyTensor (size={}) cannot be multiplied with initial_vectors (size={}).\".format(\n self.shape, initial_vectors.shape\n )\n )\n\n roots, inv_roots = RootDecomposition(\n self.representation_tree(),\n max_iter=self.root_decomposition_size(),\n dtype=self.dtype,\n device=self.device,\n batch_shape=self.batch_shape,\n matrix_shape=self.matrix_shape,\n root=True,\n inverse=True,\n initial_vectors=initial_vectors,\n )(*self.representation())\n\n if initial_vectors is not None and initial_vectors.size(-1) > 1:\n getattr(self, '__cache')[\"root_decomposition\"] = RootLazyTensor(roots[0])\n else:\n getattr(self, '__cache')[\"root_decomposition\"] = RootLazyTensor(roots)\n\n # Choose the best of the inv_roots, if there were more than one initial vectors\n if initial_vectors is not None and initial_vectors.size(-1) > 1:\n num_probes = initial_vectors.size(-1)\n test_vectors = test_vectors.unsqueeze(0)\n\n # Compute solves\n solves = inv_roots.matmul(inv_roots.transpose(-1, -2).matmul(test_vectors))\n\n # Compute self * solves\n solves = (\n solves.permute(*range(1, self.dim() + 1), 0)\n .contiguous()\n .view(*self.batch_shape, self.matrix_shape[-1], -1)\n )\n mat_times_solves = self.matmul(solves)\n mat_times_solves = mat_times_solves.view(*self.batch_shape, self.matrix_shape[-1], -1, num_probes).permute(\n -1, *range(0, self.dim())\n )\n\n # Compute residuals\n residuals = (mat_times_solves - test_vectors).norm(2, dim=-2)\n residuals = residuals.view(residuals.size(0), -1).sum(-1)\n\n # Choose solve that best fits\n _, best_solve_index = residuals.min(0)\n inv_root = inv_roots[best_solve_index].squeeze(0)\n\n else:\n inv_root = inv_roots\n\n return RootLazyTensor(inv_root)\n\n def root_decomposition_size(self):\n \"\"\"\n This is the inner size of the root decomposition.\n This is primarily used to determine if it will be cheaper to compute a\n different root or not\n \"\"\"\n return settings.max_root_decomposition_size.value()\n\n def size(self, val=None):\n \"\"\"\n Returns the size of the resulting Tensor that the lazy tensor represents\n \"\"\"\n size = self._size()\n if val is not None:\n return size[val]\n return size\n\n @property\n def shape(self):\n return self.size()\n\n def sum_batch(self, sum_batch_size=None):\n \"\"\"\n Sum the `b x n x m` LazyTensor over the batch dimension.\n\n The `sum_batch_size` controls whether or not the batch dimension is grouped when summing.\n * `sum_batch_size=None` (default): The entire batch dimension is summed. Returns a `n x n` LazyTensor.\n * `sum_batch_size=k`: Creates `b/k` groups, and sums the `k` entries of this group.\n (The LazyTensor is reshaped as a `b/k x k x n x m` LazyTensor and the `k` dimension is summed over.\n Returns a `b/k x n x m` LazyTensor.\n\n Args:\n :attr:`sum_batch_size` (int or None):\n Controls the number of groups that are summed over (default: None).\n\n Returns:\n :obj:`~gpytorch.lazy.LazyTensor`\n\n Example:\n >>> lazy_tensor = gpytorch.lazy.NonLazyTensor(torch.tensor([\n [[2, 4], [1, 2]],\n [[1, 1], [0, -1]],\n [[2, 1], [1, 0]],\n [[3, 2], [2, -1]],\n ]))\n >>> lazy_tensor.sum_batch().evaluate()\n >>> # Returns: torch.Tensor([[8, 8], [4, 0]])\n >>> lazy_tensor.sum_batch(sum_batch_size=2)\n >>> # Returns: torch.Tensor([[[3, 5], [1, 1]], [[5, 3], [3, -1]]])\n \"\"\"\n from .sum_batch_lazy_tensor import SumBatchLazyTensor\n\n return SumBatchLazyTensor(self, num_blocks=sum_batch_size)\n\n def to(self, device_id):\n \"\"\"\n A device-agnostic method of moving the lazy_tensor to the specified device.\n\n Args:\n device_id (:obj: `torch.device`): Which device to use (GPU or CPU).\n Returns:\n :obj:`~gpytorch.lazy.LazyTensor`: New LazyTensor identical to self on specified device\n \"\"\"\n new_args = []\n new_kwargs = {}\n for arg in self._args:\n if hasattr(arg, \"to\"):\n new_args.append(arg.to(device_id))\n else:\n new_args.append(arg)\n for name, val in self._kwargs.items():\n if hasattr(val, \"to\"):\n new_kwargs[name] = val.to(device_id)\n else:\n new_kwargs[name] = val\n return self.__class__(*new_args, **new_kwargs)\n\n def t(self):\n \"\"\"\n Alias of :meth:`~gpytorch.lazy.LazyTensor.transpose` for 2D LazyTensor.\n (Tranposes the two dimensions.)\n \"\"\"\n if self.ndimension() != 2:\n raise RuntimeError(\"Cannot call t for more than 2 dimensions\")\n return self.transpose(0, 1)\n\n def transpose(self, dim1, dim2):\n \"\"\"\n Transpose the dimensions `dim1` and `dim2` of the LazyTensor.\n\n Example:\n >>> lazy_tensor = gpytorch.lazy.NonLazyTensor(torch.randn(3, 5))\n >>> lazy_tensor.transpose(0, 1)\n \"\"\"\n ndimension = self.ndimension()\n if dim1 < 0:\n dim1 = ndimension + dim1\n if dim2 < 0:\n dim2 = ndimension + dim2\n if dim1 >= ndimension or dim2 >= ndimension or not isinstance(dim1, int) or not isinstance(dim2, int):\n raise RuntimeError(\"Invalid dimension\")\n\n # Batch case\n if dim1 < ndimension - 2 and dim2 < ndimension - 2:\n res = self.__class__(*(arg.transpose(dim1, dim2) for arg in self._args), **self._kwargs)\n\n elif dim1 >= ndimension - 2 and dim2 >= ndimension - 2:\n res = self._transpose_nonbatch()\n\n else:\n raise RuntimeError(\"Cannot transpose batch dimension with non-batch dimension\")\n\n return res\n\n def zero_mean_mvn_samples(self, num_samples):\n \"\"\"\n Assumes that self is a covariance matrix, or a batch of covariance matrices.\n Returns samples from a zero-mean MVN, defined by self (as covariance matrix)\n\n Self should be symmetric, either (batch_size x num_dim x num_dim) or (num_dim x num_dim)\n\n Args:\n :attr:`num_samples` (int):\n Number of samples to draw.\n\n Returns:\n :obj:`torch.tensor`:\n Samples from MVN (num_samples x batch_size x num_dim) or (num_samples x num_dim)\n \"\"\"\n if self.size()[-2:] == torch.Size([1, 1]):\n covar_root = self.evaluate().sqrt()\n else:\n covar_root = self.root_decomposition().root\n\n if self.ndimension() == 3:\n base_samples = torch.randn(\n self.size(0), covar_root.size(-1), num_samples, dtype=self.dtype, device=self.device\n )\n samples = covar_root.matmul(base_samples).permute(2, 0, 1).contiguous()\n else:\n base_samples = torch.randn(covar_root.size(-1), num_samples, dtype=self.dtype, device=self.device)\n samples = covar_root.matmul(base_samples).permute(1, 0).contiguous()\n\n return samples\n\n def __add__(self, other):\n \"\"\"\n Return a :obj:`gpytorch.lazy.LazyTensor` that represents the sum of this lazy tensor and another matrix\n or lazy tensor.\n\n Args:\n :attr:`other` (:obj:`torch.tensor` or :obj:`gpytorch.lazy.LazyTensor`):\n Matrix to add to this one.\n\n Returns:\n :obj:`gpytorch.lazy.SumLazyTensor`:\n A sum lazy tensor representing the sum of this lazy tensor and other.\n \"\"\"\n from .sum_lazy_tensor import SumLazyTensor\n from .zero_lazy_tensor import ZeroLazyTensor\n from .diag_lazy_tensor import DiagLazyTensor\n from .added_diag_lazy_tensor import AddedDiagLazyTensor\n\n if isinstance(other, ZeroLazyTensor):\n return self\n elif isinstance(other, DiagLazyTensor):\n return AddedDiagLazyTensor(self, other)\n else:\n return SumLazyTensor(self, other)\n\n def __div__(self, other):\n \"\"\"\n Return a :obj:`gpytorch.lazy.LazyTensor` that represents the product of this lazy tensor and\n the elementwise reciprocal of another matrix or lazy tensor.\n\n Args:\n :attr:`other` (:obj:`torch.tensor` or :obj:`gpytorch.lazy.LazyTensor`):\n Matrix to divide this one by.\n\n Returns:\n :obj:`gpytorch.lazy.MulLazyTensor`:\n Result of division.\n \"\"\"\n from .zero_lazy_tensor import ZeroLazyTensor\n\n if isinstance(other, ZeroLazyTensor):\n raise RuntimeError(\"Attempted to divide by a ZeroLazyTensor (divison by zero)\")\n\n return self.mul(1.0 / other)\n\n def __getitem__(self, index):\n \"\"\"\n Supports subindexing of the matrix this LazyTensor represents. This may return either another\n :obj:`gpytorch.lazy.LazyTensor` or a :obj:`torch.tensor` depending on the exact implementation.\n \"\"\"\n ndimension = self.ndimension()\n\n # Process the index\n index = index if isinstance(index, tuple) else (index,)\n index = tuple(torch.tensor(idx) if isinstance(idx, list) else idx for idx in index)\n index = tuple(idx.item() if torch.is_tensor(idx) and not len(idx.shape) else idx for idx in index)\n\n # Handle the ellipsis\n # Find the index of the ellipsis\n ellipsis_locs = tuple(index for index, item in enumerate(index) if item is Ellipsis)\n if settings.debug.on():\n if len(ellipsis_locs) > 1:\n raise RuntimeError(\n \"Cannot have multiple ellipsis in a __getitem__ call. LazyTensor {} \"\n \" received index {}.\".format(self, index)\n )\n if len(ellipsis_locs) == 1:\n ellipsis_loc = ellipsis_locs[0]\n num_to_fill_in = ndimension - (len(index) - 1)\n index = (\n index[:ellipsis_loc]\n + tuple(slice(None, None, None) for _ in range(num_to_fill_in))\n + index[ellipsis_loc + 1 :]\n )\n\n # Pad the index with empty slices\n index = index + tuple(slice(None, None, None) for _ in range(ndimension - len(index)))\n\n # Make the index a tuple again\n index = tuple(index)\n\n # Call self._getitem - now that the index has been processed\n return self._getitem(*index)\n\n def __matmul__(self, other):\n return self.matmul(other)\n\n def __mul__(self, other):\n \"\"\"\n Convenience alias of :meth:`~gpytorch.lazy.LazyTensor.mul` that allows the standard product operator to be\n used.\n \"\"\"\n from .zero_lazy_tensor import ZeroLazyTensor\n from .diag_lazy_tensor import DiagLazyTensor\n\n if isinstance(other, ZeroLazyTensor):\n return other\n elif isinstance(other, DiagLazyTensor):\n return other * self\n\n return self.mul(other)\n\n def __setattr__(self, name, val):\n if torch.is_tensor(val) or isinstance(val, LazyTensor):\n if not hasattr(self, \"_args\"):\n raise RuntimeError(\n \"Cannot assign {name} to LazyTensor before calling LazyTensor.__init__()\".format(name=name)\n )\n object.__setattr__(self, name, val)\n\n\ndef _import_dotted_name(name):\n components = name.split(\".\")\n obj = __import__(components[0])\n for component in components[1:]:\n obj = getattr(obj, component)\n return obj\n\n\ndef delazify(obj):\n \"\"\"\n A function which ensures that `obj` is a (normal) Tensor.\n\n If `obj` is a Tensor, this function does nothing.\n If `obj` is a LazyTensor, this function evaluates it.\n \"\"\"\n\n if torch.is_tensor(obj):\n return obj\n elif isinstance(obj, LazyTensor):\n return obj.evaluate()\n else:\n raise TypeError(\"object of class {} cannot be made into a Tensor\".format(obj.__class__.__name__))\n\n\n_deprecate_renamed_methods(\n LazyTensor,\n inv_quad_log_det=\"inv_quad_logdet\",\n log_det=\"logdet\",\n)\n\n__all__ = [\n \"LazyTensor\",\n \"delazify\",\n]\n" ]
[ [ "torch.randn", "torch.tensor" ], [ "torch.tensor" ], [ "torch.autograd.enable_grad", "torch.Size", "torch.cat", "torch.eye", "torch.is_tensor", "torch.tensor", "torch.arange", "torch.autograd.grad" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
UKPLab/emnlp2019-dualgraph
[ "0c58fb7f3ad3b9da3b92b2d2841558807fc79fd0" ]
[ "onmt/models/model_saver.py" ]
[ "import os\nimport torch\nimport torch.nn as nn\n\nfrom collections import deque\nfrom onmt.utils.logging import logger\n\nfrom copy import deepcopy\n\n\ndef build_model_saver(model_opt, opt, model, fields, optim):\n model_saver = ModelSaver(opt.save_model,\n model,\n model_opt,\n fields,\n optim,\n opt.keep_checkpoint)\n return model_saver\n\n\nclass ModelSaverBase(object):\n \"\"\"Base class for model saving operations\n\n Inherited classes must implement private methods:\n * `_save`\n * `_rm_checkpoint\n \"\"\"\n\n def __init__(self, base_path, model, model_opt, fields, optim,\n keep_checkpoint=-1):\n self.base_path = base_path\n self.model = model\n self.model_opt = model_opt\n self.fields = fields\n self.optim = optim\n self.last_saved_step = None\n self.keep_checkpoint = keep_checkpoint\n if keep_checkpoint > 0:\n self.checkpoint_queue = deque([], maxlen=keep_checkpoint)\n\n def save(self, step, moving_average=None):\n \"\"\"Main entry point for model saver\n\n It wraps the `_save` method with checks and apply `keep_checkpoint`\n related logic\n \"\"\"\n\n if self.keep_checkpoint == 0 or step == self.last_saved_step:\n return\n\n if moving_average:\n save_model = deepcopy(self.model)\n for avg, param in zip(moving_average, save_model.parameters()):\n param.data.copy_(avg.data)\n else:\n save_model = self.model\n\n chkpt, chkpt_name = self._save(step, save_model)\n self.last_saved_step = step\n\n if moving_average:\n del save_model\n\n if self.keep_checkpoint > 0:\n if len(self.checkpoint_queue) == self.checkpoint_queue.maxlen:\n todel = self.checkpoint_queue.popleft()\n self._rm_checkpoint(todel)\n self.checkpoint_queue.append(chkpt_name)\n\n def _save(self, step):\n \"\"\"Save a resumable checkpoint.\n\n Args:\n step (int): step number\n\n Returns:\n (object, str):\n\n * checkpoint: the saved object\n * checkpoint_name: name (or path) of the saved checkpoint\n \"\"\"\n\n raise NotImplementedError()\n\n def _rm_checkpoint(self, name):\n \"\"\"Remove a checkpoint\n\n Args:\n name(str): name that indentifies the checkpoint\n (it may be a filepath)\n \"\"\"\n\n raise NotImplementedError()\n\n\nclass ModelSaver(ModelSaverBase):\n \"\"\"Simple model saver to filesystem\"\"\"\n\n def _save(self, step, model):\n real_model = (model.module\n if isinstance(model, nn.DataParallel)\n else model)\n real_generator = (real_model.generator.module\n if isinstance(real_model.generator, nn.DataParallel)\n else real_model.generator)\n\n model_state_dict = real_model.state_dict()\n model_state_dict = {k: v for k, v in model_state_dict.items()\n if 'generator' not in k}\n generator_state_dict = real_generator.state_dict()\n\n # NOTE: We need to trim the vocab to remove any unk tokens that\n # were not originally here.\n\n vocab = deepcopy(self.fields)\n\n if hasattr(model.encoder, 'is_graph_encoder'):\n sides = [\"src\", \"node1\", \"node2\", \"tgt\"]\n else:\n sides = [\"src\", \"tgt\"]\n\n for side in sides:\n keys_to_pop = []\n if hasattr(vocab[side], \"fields\"):\n unk_token = vocab[side].fields[0][1].vocab.itos[0]\n for key, value in vocab[side].fields[0][1].vocab.stoi.items():\n if value == 0 and key != unk_token:\n keys_to_pop.append(key)\n for key in keys_to_pop:\n vocab[side].fields[0][1].vocab.stoi.pop(key, None)\n\n checkpoint = {\n 'model': model_state_dict,\n 'generator': generator_state_dict,\n 'vocab': vocab,\n 'opt': self.model_opt,\n 'optim': self.optim.state_dict(),\n }\n\n logger.info(\"Saving checkpoint %s_step_%d.pt\" % (self.base_path, step))\n checkpoint_path = '%s_step_%d.pt' % (self.base_path, step)\n torch.save(checkpoint, checkpoint_path)\n return checkpoint, checkpoint_path\n\n def _rm_checkpoint(self, name):\n os.remove(name)\n" ]
[ [ "torch.save" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
hsk9767/mesh_rcnn_copy
[ "6dd4d9ea8af33c03a084e34c7d16eeaddfe924ae" ]
[ "meshrcnn/modeling/roi_heads/roi_heads.py" ]
[ "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom typing import Dict\nimport torch\nfrom detectron2.layers import ShapeSpec, cat\nfrom detectron2.modeling import ROI_HEADS_REGISTRY\nfrom detectron2.modeling.poolers import ROIPooler\nfrom detectron2.modeling.roi_heads.fast_rcnn import FastRCNNOutputLayers, FastRCNNOutputs\nfrom detectron2.modeling.roi_heads.roi_heads import StandardROIHeads, select_foreground_proposals\nfrom pytorch3d.ops import cubify\nfrom pytorch3d.structures import Meshes\nfrom pytorch3d.utils import ico_sphere\n\nfrom meshrcnn.modeling.roi_heads.mask_head import mask_rcnn_loss\nfrom meshrcnn.modeling.roi_heads.mesh_head import (\n build_mesh_head,\n mesh_rcnn_inference,\n mesh_rcnn_loss,\n)\nfrom meshrcnn.modeling.roi_heads.voxel_head import (\n build_voxel_head,\n voxel_rcnn_inference,\n voxel_rcnn_loss,\n)\nfrom meshrcnn.modeling.roi_heads.z_head import build_z_head, z_rcnn_inference, z_rcnn_loss\nfrom meshrcnn.utils import vis as vis_utils\n\n\n@ROI_HEADS_REGISTRY.register()\nclass MeshRCNNROIHeads(StandardROIHeads):\n \"\"\"\n The ROI specific heads for Mesh R-CNN\n \"\"\"\n\n def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):\n super().__init__(cfg, input_shape)\n self._init_z_head(cfg, input_shape)\n self._init_voxel_head(cfg, input_shape)\n self._init_mesh_head(cfg, input_shape)\n # If MODEL.VIS_MINIBATCH is True we store minibatch targets\n # for visualization purposes\n self._vis = cfg.MODEL.VIS_MINIBATCH\n self._misc = {}\n self._vis_dir = cfg.OUTPUT_DIR\n\n def _init_z_head(self, cfg, input_shape):\n # fmt: off\n self.zpred_on = cfg.MODEL.ZPRED_ON\n if not self.zpred_on:\n return\n z_pooler_resolution = cfg.MODEL.ROI_Z_HEAD.POOLER_RESOLUTION\n z_pooler_scales = tuple(1.0 / input_shape[k].stride for k in self.in_features)\n z_sampling_ratio = cfg.MODEL.ROI_Z_HEAD.POOLER_SAMPLING_RATIO\n z_pooler_type = cfg.MODEL.ROI_Z_HEAD.POOLER_TYPE\n # fmt: on\n\n self.z_loss_weight = cfg.MODEL.ROI_Z_HEAD.Z_REG_WEIGHT\n self.z_smooth_l1_beta = cfg.MODEL.ROI_Z_HEAD.SMOOTH_L1_BETA\n\n in_channels = [input_shape[f].channels for f in self.in_features][0]\n\n self.z_pooler = ROIPooler(\n output_size=z_pooler_resolution,\n scales=z_pooler_scales,\n sampling_ratio=z_sampling_ratio,\n pooler_type=z_pooler_type,\n )\n shape = ShapeSpec(\n channels=in_channels, width=z_pooler_resolution, height=z_pooler_resolution\n )\n self.z_head = build_z_head(cfg, shape)\n\n def _init_voxel_head(self, cfg, input_shape):\n # fmt: off\n self.voxel_on = cfg.MODEL.VOXEL_ON\n if not self.voxel_on:\n return\n voxel_pooler_resolution = cfg.MODEL.ROI_VOXEL_HEAD.POOLER_RESOLUTION\n voxel_pooler_scales = tuple(1.0 / input_shape[k].stride for k in self.in_features)\n voxel_sampling_ratio = cfg.MODEL.ROI_VOXEL_HEAD.POOLER_SAMPLING_RATIO\n voxel_pooler_type = cfg.MODEL.ROI_VOXEL_HEAD.POOLER_TYPE\n # fmt: on\n\n self.voxel_loss_weight = cfg.MODEL.ROI_VOXEL_HEAD.LOSS_WEIGHT\n self.cls_agnostic_voxel = cfg.MODEL.ROI_VOXEL_HEAD.CLS_AGNOSTIC_VOXEL\n self.cubify_thresh = cfg.MODEL.ROI_VOXEL_HEAD.CUBIFY_THRESH\n\n in_channels = [input_shape[f].channels for f in self.in_features][0]\n\n self.voxel_pooler = ROIPooler(\n output_size=voxel_pooler_resolution,\n scales=voxel_pooler_scales,\n sampling_ratio=voxel_sampling_ratio,\n pooler_type=voxel_pooler_type,\n )\n shape = ShapeSpec(\n channels=in_channels, width=voxel_pooler_resolution, height=voxel_pooler_resolution\n )\n self.voxel_head = build_voxel_head(cfg, shape)\n\n def _init_mesh_head(self, cfg, input_shape):\n # fmt: off\n self.mesh_on = cfg.MODEL.MESH_ON\n if not self.mesh_on:\n return\n mesh_pooler_resolution = cfg.MODEL.ROI_MESH_HEAD.POOLER_RESOLUTION\n mesh_pooler_scales = tuple(1.0 / input_shape[k].stride for k in self.in_features)\n mesh_sampling_ratio = cfg.MODEL.ROI_MESH_HEAD.POOLER_SAMPLING_RATIO\n mesh_pooler_type = cfg.MODEL.ROI_MESH_HEAD.POOLER_TYPE\n # fmt: on\n\n self.chamfer_loss_weight = cfg.MODEL.ROI_MESH_HEAD.CHAMFER_LOSS_WEIGHT\n self.normals_loss_weight = cfg.MODEL.ROI_MESH_HEAD.NORMALS_LOSS_WEIGHT\n self.edge_loss_weight = cfg.MODEL.ROI_MESH_HEAD.EDGE_LOSS_WEIGHT\n self.gt_num_samples = cfg.MODEL.ROI_MESH_HEAD.GT_NUM_SAMPLES\n self.pred_num_samples = cfg.MODEL.ROI_MESH_HEAD.PRED_NUM_SAMPLES\n self.gt_coord_thresh = cfg.MODEL.ROI_MESH_HEAD.GT_COORD_THRESH\n self.ico_sphere_level = cfg.MODEL.ROI_MESH_HEAD.ICO_SPHERE_LEVEL\n\n in_channels = [input_shape[f].channels for f in self.in_features][0]\n\n self.mesh_pooler = ROIPooler(\n output_size=mesh_pooler_resolution,\n scales=mesh_pooler_scales,\n sampling_ratio=mesh_sampling_ratio,\n pooler_type=mesh_pooler_type,\n )\n self.mesh_head = build_mesh_head(\n cfg,\n ShapeSpec(\n channels=in_channels, height=mesh_pooler_resolution, width=mesh_pooler_resolution\n ),\n )\n\n def forward(self, images, features, proposals, targets=None):\n \"\"\"\n See :class:`ROIHeads.forward`.\n \"\"\"\n if self._vis:\n self._misc[\"images\"] = images\n del images\n\n if self.training:\n proposals = self.label_and_sample_proposals(proposals, targets)\n del targets\n\n if self._vis:\n self._misc[\"proposals\"] = proposals\n\n if self.training:\n losses = self._forward_box(features, proposals)\n # During training the proposals used by the box head are\n # used by the z, mask, voxel & mesh head.\n losses.update(self._forward_z(features, proposals))\n losses.update(self._forward_mask(features, proposals))\n losses.update(self._forward_shape(features, proposals))\n # print minibatch examples\n if self._vis:\n vis_utils.visualize_minibatch(self._misc[\"images\"], self._misc, self._vis_dir, True)\n\n return [], losses\n else:\n pred_instances = self._forward_box(features, proposals)\n # During inference cascaded prediction is used: the mask and keypoints heads are only\n # applied to the top scoring box detections.\n pred_instances = self.forward_with_given_boxes(features, pred_instances)\n return pred_instances, {}\n\n def forward_with_given_boxes(self, features, instances):\n \"\"\"\n Use the given boxes in `instances` to produce other (non-box) per-ROI outputs.\n\n Args:\n features: same as in `forward()`\n instances (list[Instances]): instances to predict other outputs. Expect the keys\n \"pred_boxes\" and \"pred_classes\" to exist.\n\n Returns:\n instances (Instances): the same `Instances` object, with extra\n fields such as `pred_masks` or `pred_voxels`.\n \"\"\"\n assert not self.training\n assert instances[0].has(\"pred_boxes\") and instances[0].has(\"pred_classes\")\n\n instances = self._forward_z(features, instances)\n instances = self._forward_mask(features, instances)\n instances = self._forward_shape(features, instances)\n return instances\n\n def _forward_z(self, features, instances):\n \"\"\"\n Forward logic of the z prediction branch.\n \"\"\"\n if not self.zpred_on:\n return {} if self.training else instances\n features = [features[f] for f in self.in_features]\n\n if self.training:\n # The loss is only defined on positive proposals.\n proposals, _ = select_foreground_proposals(instances, self.num_classes)\n proposal_boxes = [x.proposal_boxes for x in proposals]\n z_features = self.z_pooler(features, proposal_boxes)\n z_pred = self.z_head(z_features)\n src_boxes = cat([p.tensor for p in proposal_boxes])\n loss_z_reg = z_rcnn_loss(\n z_pred,\n proposals,\n src_boxes,\n loss_weight=self.z_loss_weight,\n smooth_l1_beta=self.z_smooth_l1_beta,\n )\n return {\"loss_z_reg\": loss_z_reg}\n else:\n pred_boxes = [x.pred_boxes for x in instances]\n z_features = self.z_pooler(features, pred_boxes)\n z_pred = self.z_head(z_features)\n z_rcnn_inference(z_pred, instances)\n return instances\n\n def _forward_mask(self, features, instances):\n \"\"\"\n Forward logic of the mask prediction branch.\n\n Args:\n features (dict[str,Tensor]): mapping from names to backbone features\n instances (list[Instances]): the per-image instances to train/predict masks.\n In training, they can be the proposals.\n In inference, they can be the predicted boxes.\n\n Returns:\n In training, a dict of losses.\n In inference, update `instances` with new fields \"pred_masks\" and return it.\n \"\"\"\n if not self.mask_on:\n return {} if self.training else instances\n\n features = [features[f] for f in self.in_features]\n\n if self.training:\n # The loss is only defined on positive proposals.\n proposals, _ = select_foreground_proposals(instances, self.num_classes)\n proposal_boxes = [x.proposal_boxes for x in proposals]\n mask_features = self.mask_pooler(features, proposal_boxes)\n mask_logits = self.mask_head.layers(mask_features)\n loss_mask, target_masks = mask_rcnn_loss(mask_logits, proposals)\n if self._vis:\n self._misc[\"target_masks\"] = target_masks\n self._misc[\"fg_proposals\"] = proposals\n return {\"loss_mask\": loss_mask}\n else:\n pred_boxes = [x.pred_boxes for x in instances]\n mask_features = self.mask_pooler(features, pred_boxes)\n return self.mask_head(mask_features, instances)\n\n def _forward_shape(self, features, instances):\n \"\"\"\n Forward logic for the voxel and mesh refinement branch.\n\n Args:\n features (list[Tensor]): #level input features for voxel prediction\n instances (list[Instances]): the per-image instances to train/predict meshes.\n In training, they can be the proposals.\n In inference, they can be the predicted boxes.\n Returns:\n In training, a dict of losses.\n In inference, update `instances` with new fields \"pred_voxels\" & \"pred_meshes\" and return it.\n \"\"\"\n if not self.voxel_on and not self.mesh_on:\n return {} if self.training else instances\n\n features = [features[f] for f in self.in_features]\n if self.training:\n # The loss is only defined on positive proposals.\n proposals, _ = select_foreground_proposals(instances, self.num_classes)\n proposal_boxes = [x.proposal_boxes for x in proposals]\n\n losses = {}\n if self.voxel_on:\n voxel_features = self.voxel_pooler(features, proposal_boxes)\n voxel_logits = self.voxel_head(voxel_features)\n loss_voxel, target_voxels = voxel_rcnn_loss(\n voxel_logits, proposals, loss_weight=self.voxel_loss_weight\n )\n losses.update({\"loss_voxel\": loss_voxel})\n if self._vis:\n self._misc[\"target_voxels\"] = target_voxels\n if self.cls_agnostic_voxel:\n with torch.no_grad():\n vox_in = voxel_logits.sigmoid().squeeze(1) # (N, V, V, V)\n init_mesh = cubify(vox_in, self.cubify_thresh) # 1\n else:\n raise ValueError(\"No support for class specific predictions\")\n\n if self.mesh_on:\n mesh_features = self.mesh_pooler(features, proposal_boxes)\n if not self.voxel_on:\n if mesh_features.shape[0] > 0:\n init_mesh = ico_sphere(self.ico_sphere_level, mesh_features.device)\n init_mesh = init_mesh.extend(mesh_features.shape[0])\n else:\n init_mesh = Meshes(verts=[], faces=[])\n pred_meshes = self.mesh_head(mesh_features, init_mesh)\n\n # loss weights\n loss_weights = {\n \"chamfer\": self.chamfer_loss_weight,\n \"normals\": self.normals_loss_weight,\n \"edge\": self.edge_loss_weight,\n }\n\n if not pred_meshes[0].isempty():\n loss_chamfer, loss_normals, loss_edge, target_meshes = mesh_rcnn_loss(\n pred_meshes,\n proposals,\n loss_weights=loss_weights,\n gt_num_samples=self.gt_num_samples,\n pred_num_samples=self.pred_num_samples,\n gt_coord_thresh=self.gt_coord_thresh,\n )\n if self._vis:\n self._misc[\"init_meshes\"] = init_mesh\n self._misc[\"target_meshes\"] = target_meshes\n else:\n loss_chamfer = sum(k.sum() for k in self.mesh_head.parameters()) * 0.0\n loss_normals = sum(k.sum() for k in self.mesh_head.parameters()) * 0.0\n loss_edge = sum(k.sum() for k in self.mesh_head.parameters()) * 0.0\n\n losses.update(\n {\n \"loss_chamfer\": loss_chamfer,\n \"loss_normals\": loss_normals,\n \"loss_edge\": loss_edge,\n }\n )\n\n return losses\n else:\n pred_boxes = [x.pred_boxes for x in instances]\n\n if self.voxel_on:\n voxel_features = self.voxel_pooler(features, pred_boxes)\n voxel_logits = self.voxel_head(voxel_features)\n voxel_rcnn_inference(voxel_logits, instances)\n if self.cls_agnostic_voxel:\n with torch.no_grad():\n vox_in = voxel_logits.sigmoid().squeeze(1) # (N, V, V, V)\n init_mesh = cubify(vox_in, self.cubify_thresh) # 1\n else:\n raise ValueError(\"No support for class specific predictions\")\n\n if self.mesh_on:\n mesh_features = self.mesh_pooler(features, pred_boxes)\n if not self.voxel_on:\n if mesh_features.shape[0] > 0:\n init_mesh = ico_sphere(self.ico_sphere_level, mesh_features.device)\n init_mesh = init_mesh.extend(mesh_features.shape[0])\n else:\n init_mesh = Meshes(verts=[], faces=[])\n pred_meshes = self.mesh_head(mesh_features, init_mesh)\n mesh_rcnn_inference(pred_meshes[-1], instances)\n else:\n assert self.voxel_on\n mesh_rcnn_inference(init_mesh, instances)\n\n return instances\n" ]
[ [ "torch.no_grad" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
J-Massey/postproc
[ "4552b0ad79072f5d217cf62632c08617ea3d2d82", "4552b0ad79072f5d217cf62632c08617ea3d2d82" ]
[ "circular_cylinder/figures/plot.py", "flat_plate/res_test.py" ]
[ "import matplotlib.pyplot as plt\nimport seaborn as sns\nimport numpy as np\nfrom itertools import product\nimport os\nfrom mpl_toolkits.axes_grid1 import make_axes_locatable\nfrom matplotlib import ticker, cm\nfrom mpl_toolkits.axes_grid1 import make_axes_locatable\nfrom matplotlib.ticker import FormatStrFormatter\nfrom matplotlib.colors import LinearSegmentedColormap\nfrom matplotlib.colors import BoundaryNorm\n\n\ncolors = sns.color_palette(\"husl\", 4)\nplt.style.use(['science', 'grid'])\n\n\ndef plot_loss(epochs, cost, fn='cost.pdf'):\n fig, ax = plt.subplots(figsize=(5, 3))\n ax.tick_params(bottom=\"on\", top=\"on\", right=\"on\", which='both', direction='in', length=2)\n ax.set_xlabel(r\"Epochs\")\n ax.set_ylabel(r'$L_2$ loss')\n ax.plot_fill(np.linspace(0, epochs, len(cost)), cost, label=r'$L_{2}$')\n ax.legend()\n plt.savefig(fn)\n plt.show()\n\n\ndef plot_model(cd_hat, fos, Y, fn='model.pdf'):\n fig, ax = plt.subplots(figsize=(5, 3))\n ax.tick_params(bottom=\"on\", top=\"on\", right=\"on\", which='both', direction='in', length=2)\n ax.set_xlabel(r\"$t/D$\")\n ax.set_ylabel(r'$C_{D_f}$')\n ax.plot_fill(fos['t'], Y, label=r'Ground truth')\n ax.plot_fill(fos['t'], cd_hat, label=r'$\\hat{C_{D_f}}$')\n ax.legend()\n plt.savefig(fn)\n plt.show()\n\n\ndef plot_BL_corruption():\n fig, ax = plt.subplots(figsize=(5, 5))\n ax.set_xlabel(r'$x_n$')\n ax.set_ylabel(r'$y_n$', rotation=0)\n # Define grid\n D = 32\n eps = 2\n r = D / 2\n x, y = np.arange(-D, D + 1, 1), np.arange(-D, D + 1, 1)\n X, Y = np.meshgrid(x, y)\n\n # Body coordinates\n theta = np.linspace(0, 2 * np.pi, int(D * np.pi))\n\n Bx, By = r * np.cos(theta), r * np.sin(theta)\n ax.plot_fill(Bx, By, color='k', linewidth=2., label=r'Hard body boundary')\n\n Bepx, Bepy = (r + eps) * np.cos(theta), (r + eps) * np.sin(theta)\n ax.plot_fill(Bepx, Bepy, c='blue', linewidth=0.5, label=r'$D+\\epsilon$')\n\n # Distance function from eps away from body edge\n dis = np.sqrt(X ** 2 + Y ** 2)\n\n # Cmap definition\n bs = iter((np.array([14, 15.8, 18.7, 22]) - 4.5) / D)\n colours = [(0, 'midnightblue'),\n (next(bs), 'midnightblue'),\n (next(bs), 'red'),\n (next(bs), 'green'),\n (next(bs), 'royalblue'),\n (1, 'royalblue')]\n cmap = LinearSegmentedColormap.from_list('corruption', colours, 256)\n\n cs = ax.imshow(dis, zorder=0, aspect=\"auto\", extent=(-D, D, -D, D),\n cmap=cmap, interpolation='bicubic')\n make_axes_locatable(ax)\n divider = make_axes_locatable(ax)\n ax_cb = divider.new_horizontal(size=\"5%\", pad=0.05)\n fig.add_axes(ax_cb)\n cbar = plt.colorbar(cs, cax=ax_cb, ticks=[8, 16.4, 21, 32], extend='max')\n # ax_cb.yaxis.tick_right()\n cbar.ax.set_yticklabels([r'$\\vec{b}$', r'$\\vec{b}*\\vec{f}$', r'$d|_{n \\approx 0}$', r'$\\vec{f}$'])\n cbar.ax.tick_params(which='both', size=0)\n ax.legend()\n plt.savefig('../figures/bl_corruption.pdf', dpi=300)\n plt.close()\n\n\ndef plot_pressure():\n data_root = '/home/masseyjmo/Workspace/Lotus/projects/cylinder_dns/validation'\n p = np.loadtxt(os.path.join(data_root, 'fort.10'), unpack=True)\n p = np.mean(p, axis=1)\n\n fig, ax = plt.subplots(figsize=(4, 4))\n ax.tick_params(bottom=\"on\", top=\"on\", right=\"on\", which='both', direction='in', length=2)\n ax.set_xlabel(r\"$\\theta$\")\n ax.set_ylabel(r'$C_{p}$')\n ax.scatter(np.linspace(0, np.pi / 2, len(p)), p * 2, label=r'Pressure distribution', color='k', marker='+')\n ax.set_ylim(-2, 1)\n ax.legend()\n plt.savefig('pressure_theta.pdf')\n plt.show()\n\n\nif __name__ == \"__main__\":\n plot_BL_corruption()\n", "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\n@author: J. Massey\n@description: Res test for flat plate experiment with analysis Kurt\n@contact: [email protected]\n\"\"\"\n\n# Imports\nimport numpy as np\nimport postproc.visualise.plotter\nimport postproc.io as io\nimport postproc.frequency_spectra\nimport matplotlib.pyplot as plt\nimport os\nimport seaborn as sns\nfrom tqdm import tqdm\n\nplt.style.use(['science', 'grid'])\ncases = ['full_bumps', 'smooth']\nlabels = [r'$ 75\\% $', r'Smooth']\nthetas = [12]\n\n\ndef read_rot(data_root, c, theta):\n force_file = '3D/fort.9'\n names = ['t', 'dt', 'px', 'py', 'pz']\n interest = 'p'\n fos = (io.unpack_flex_forces(os.path.join(data_root, str(c), force_file), names))\n forces_dic = dict(zip(names, fos))\n t = forces_dic['t'] / c\n u = np.array((forces_dic[interest + 'x'], forces_dic[interest + 'y']))\n\n # Transform the forces into the correct plane\n co, si = np.cos(np.radians(theta)), np.sin(np.radians(theta))\n rot = np.matrix([[co, si], [-si, co]])\n m = np.dot(rot, [u[0], u[1]])\n\n old_norm = (c / 45.71 * np.sin(np.radians(theta)) * c * 0.25 * 2)\n new_norm = ((c / 45.71 * np.cos(np.radians(theta)) # Thickness\n + c * np.sin(np.radians(theta))) # Body\n * c * 0.25 * 2) # z\n exp_norm = (c * (c * 0.25 * 2))\n ux, uy = np.squeeze(np.array(m[0])), np.squeeze(np.array(m[1]))\n\n return t, ux * old_norm / exp_norm, uy * old_norm / exp_norm\n\n\ndef plot_coeffs(theta, **kwargs):\n drag = kwargs.get('drag', False)\n lift = kwargs.get('lift', True)\n\n tit = r'Power spectra comparison'\n\n colors = sns.color_palette(\"husl\", len(cases))\n\n # How long from 2D to 3D, and when to crop TS\n init = 100\n snip = 200\n\n fs, uks_labelled, uks = [], [], []\n # Plot TSs and save spectra\n fig1, ax1 = plt.subplots(figsize=(7, 5))\n ax1.set_title(r'AoA = $' + str(theta) + r'$')\n ax1.tick_params(bottom=\"on\", top=\"on\", right=\"on\", which='both', direction='in', length=2)\n ax1.set_xlabel(r'$ t/c $')\n\n if drag: label = r'$ C_{D} $'\n if lift: label = r'$ C_{L} $'\n\n ax1.set_ylabel(label)\n for idx2, case in enumerate(cases):\n data_root = '/home/masseyjmo/Workspace/Lotus/projects/flat_plate/' + alpha + '/' + case\n\n t, ux, uy = read_rot(data_root, c, theta)\n\n old_norm = (c / 45.71 * np.sin(np.radians(theta)) * c * 0.25 * 2)\n new_norm = ((c / 45.71 * np.cos(np.radians(theta)) # Thickness\n + c * np.sin(np.radians(theta))) # Body\n * c * 0.25 * 2) # z\n exp_norm = (c * (c * 0.25 * 2))\n\n if lift:\n u = uy * old_norm / exp_norm # Change normalisation to match experiments\n if drag:\n u = ux * old_norm / exp_norm\n\n t, u = t[t < snip], u[t < snip]\n t, u = t[t > init], u[t > init]\n\n # Append the Welch spectra to a list in order to compare\n criteria = postproc.frequency_spectra.FreqConv(t, u, n=5, OL=0.5)\n f, uk = criteria.welch()\n fs.append(f)\n uks.append(uk)\n uks_labelled.append((labels[idx2], uk))\n ax1.plot_fill(t, u, label=labels[idx2], color=colors[idx2])\n\n ax1.legend()\n if drag:\n fig1.savefig(os.path.join(data_root, '../comparisons/cd_comparison.pdf'),\n bbox_inches='tight', dpi=30, transparent=False)\n postproc.visualise.plotter.plotLogLogTimeSpectra_list(\n os.path.join(data_root, '../comparisons/log_spectra_cd_comparison.pdf'),\n uks_labelled, fs,\n title=tit,\n ylabel=r'$PS$ ' + label,\n colors=colors)\n if lift:\n fig1.savefig(os.path.join(data_root, '../comparisons/cl_comparison.pdf'),\n bbox_inches='tight', dpi=30, transparent=False)\n postproc.visualise.plotter.plotLogLogTimeSpectra_list(\n os.path.join(data_root, '../comparisons/log_spectra_cl_comparison.pdf'),\n uks_labelled, fs,\n title=tit,\n ylabel=r'$PS$ ' + label,\n colors=colors)\n plt.close()\n\n\ndef plot_cl_cd(aoas, theta):\n\n colors = sns.color_palette(\"husl\", len(cases))\n\n fig, ax = plt.subplots(figsize=(7, 5))\n ax.set_title(r'$C_{L}/C_{D}$')\n ax.tick_params(bottom=\"on\", top=\"on\", right=\"on\", which='both', direction='in', length=2)\n ax.set_xlabel(r'Roughness case')\n ax.set_xticks(range(len(cases)))\n ax.set_xticklabels(labels)\n\n init = 100\n snip = 200\n for idx, alpha in enumerate(aoas):\n for idx2, case in enumerate(cases):\n data_root = '/home/masseyjmo/Workspace/Lotus/projects/flat_plate/' + str(alpha) + '/' + case\n t, ux, uy = read_rot(data_root, c, theta[idx])\n u = uy / ux\n t, u = t[t < snip], u[t < snip]\n t, u = t[t > init], u[t > init]\n\n if idx2 == 3:\n ax.axhline(np.mean(u), c=colors[idx], ls='--')\n\n if idx2 == 0:\n lab = r'AoA $' + str(theta[idx]) + r'$'\n else:\n lab = None\n ax.scatter(idx2, np.mean(u), color=colors[idx], label=lab)\n ax.legend()\n fig.savefig(os.path.join(data_root, '../../figures/cl_cd.pdf'),\n bbox_inches='tight', dpi=30, transparent=True)\n plt.close()\n\n\ndef plot_coeffs_rms(aoas, theta, **kwargs):\n\n drag = kwargs.get('drag', False)\n lift = kwargs.get('lift', False)\n mean = kwargs.get('mean', False)\n rms = kwargs.get('rms', False)\n\n colors = sns.color_palette(\"husl\", len(cases))\n\n fig, ax = plt.subplots(figsize=(7, 5))\n\n if drag:\n ax.set_title(r'$C_{D}$')\n if lift:\n ax.set_title(r'$C_{L}$')\n\n ax.tick_params(bottom=\"on\", top=\"on\", right=\"on\", which='both', direction='in', length=2)\n ax.set_xlabel(r'Roughness case')\n if mean:\n ax.set_ylabel(r'Mean')\n else:\n ax.set_ylabel(r'RMS')\n ax.set_xticks(range(len(cases)))\n ax.set_xticklabels(labels)\n\n init = 100\n snip = 200\n for idx, alpha in enumerate(aoas):\n for idx2, case in enumerate(cases):\n data_root = '/home/masseyjmo/Workspace/Lotus/projects/flat_plate/' + str(alpha) + '/' + case\n t, ux, uy = read_rot(data_root, c, theta[idx])\n\n if drag:\n u = ux\n if lift:\n u = uy\n\n t, u = t[t < snip], u[t < snip]\n t, u = t[t > init], u[t > init]\n if idx2 == 3:\n if mean:\n ax.axhline(np.mean(u), c=colors[idx], ls='--')\n else:\n ax.axhline(np.sqrt(np.sum((u-np.mean(u))**2)/len(u)), c=colors[idx], ls='--')\n\n if idx2 == 0:\n lab = r'AoA $' + str(theta[idx]) + r'$'\n else:\n lab = None\n\n if mean:\n ax.scatter(idx2, np.mean(u), color=colors[idx], label=lab)\n print(np.mean(u))\n if rms:\n ax.scatter(idx2, np.sqrt(np.sum((u-np.mean(u))**2)/len(u)), color=colors[idx], label=lab)\n print(np.sqrt(np.sum((u - np.mean(u)) ** 2) / len(u)))\n ax.legend()\n if mean:\n ext = 'mean'\n if rms:\n ext = 'rms'\n ax.set_title(r'RMS $C_{L_p}$')\n\n if drag:\n fig.savefig(os.path.join(data_root, '../../meta_figures/cd_'+ext+'.pdf'),\n bbox_inches='tight', dpi=30, transparent=True)\n if lift:\n fig.savefig(os.path.join(data_root, '../../meta_figures/cl_'+ext+'.pdf'),\n bbox_inches='tight', dpi=30, transparent=True)\n\n plt.close()\n\n\nif __name__ == \"__main__\":\n AoAs = ['AoA_12', '25k', 'AoA_2']\n c = 256\n for idx, alpha in tqdm(enumerate(AoAs)):\n plot_coeffs(thetas[idx], lift=True)\n # plot_coeffs(thetas[idx], drag=True)\n # plot_cl_cd(AoAs, thetas)\n # plot_coeffs_rms(AoAs, thetas, lift=True, rms=True)\n # plot_coeffs_rms(AoAs, thetas, lift=True, mean=True)\n" ]
[ [ "numpy.sqrt", "numpy.meshgrid", "numpy.arange", "matplotlib.pyplot.subplots", "matplotlib.pyplot.savefig", "numpy.cos", "numpy.sin", "matplotlib.pyplot.colorbar", "numpy.mean", "matplotlib.pyplot.close", "matplotlib.colors.LinearSegmentedColormap.from_list", "numpy.array", "matplotlib.pyplot.show", "matplotlib.pyplot.style.use" ], [ "numpy.matrix", "numpy.dot", "numpy.radians", "matplotlib.pyplot.subplots", "numpy.mean", "matplotlib.pyplot.close", "numpy.array", "matplotlib.pyplot.style.use" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
zhengzx-nlp/REDER
[ "7035e089e4d30b8090a2c3caa937b1e0ba27cedc" ]
[ "fairseq/modules/fairseq_dropout.py" ]
[ "# Copyright (c) Facebook, Inc. and its affiliates.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nimport logging\nfrom typing import List, Optional\n\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass FairseqDropout(nn.Module):\n\n def __init__(self, p, module_name=None):\n super().__init__()\n self.p = p\n self.module_name = module_name\n self.apply_during_inference = False\n\n def forward(self, x, inplace: bool = False):\n if self.training or self.apply_during_inference:\n return F.dropout(x, p=self.p, training=True, inplace=inplace)\n else:\n return x\n\n def extra_repr(self) -> str:\n return 'p={}'.format(self.p)\n\n def make_generation_fast_(\n self,\n name: str,\n retain_dropout: bool = False,\n retain_dropout_modules: Optional[List[str]] = None,\n **kwargs\n ):\n if retain_dropout:\n if retain_dropout_modules is not None and self.module_name is None:\n logger.warning(\n 'Cannot enable dropout during inference for module {} '\n 'because module_name was not set'.format(name)\n )\n elif (\n retain_dropout_modules is None # if None, apply to all modules\n or self.module_name in retain_dropout_modules\n ):\n logger.info(\n 'Enabling dropout during inference for module: {}'.format(name)\n )\n self.apply_during_inference = True\n else:\n logger.info('Disabling dropout for module: {}'.format(name))\n" ]
[ [ "torch.nn.functional.dropout" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
EricPedley/FCRN-DepthPrediction
[ "93aaed329e9e071c6d5c5a59e77a73a09684b156" ]
[ "tensorflow/network.py" ]
[ "import numpy as np\nimport tensorflow.compat.v1 as tf\ntf.disable_v2_behavior()\n\n# ----------------------------------------------------------------------------------\n# Commonly used layers and operations based on ethereon's implementation \n# https://github.com/ethereon/caffe-tensorflow\n# Slight modifications may apply. FCRN-specific operations have also been appended. \n# ----------------------------------------------------------------------------------\n# Thanks to *Helisa Dhamo* for the model conversion and integration into TensorFlow.\n# ----------------------------------------------------------------------------------\n\nDEFAULT_PADDING = 'SAME'\n\n\ndef get_incoming_shape(incoming):\n \"\"\" Returns the incoming data shape \"\"\"\n if isinstance(incoming, tf.Tensor):\n return incoming.get_shape().as_list()\n elif type(incoming) in [np.array, list, tuple]:\n return np.shape(incoming)\n else:\n raise Exception(\"Invalid incoming layer.\")\n\n\ndef interleave(tensors, axis):\n old_shape = get_incoming_shape(tensors[0])[1:]\n new_shape = [-1] + old_shape\n new_shape[axis] *= len(tensors)\n return tf.reshape(tf.stack(tensors, axis + 1), new_shape)\n\ndef layer(op):\n '''Decorator for composable network layers.'''\n\n def layer_decorated(self, *args, **kwargs):\n # Automatically set a name if not provided.\n name = kwargs.setdefault('name', self.get_unique_name(op.__name__))\n\n # Figure out the layer inputs.\n if len(self.terminals) == 0:\n raise RuntimeError('No input variables found for layer %s.' % name)\n elif len(self.terminals) == 1:\n layer_input = self.terminals[0]\n else:\n layer_input = list(self.terminals)\n # Perform the operation and get the output.\n layer_output = op(self, layer_input, *args, **kwargs)\n # Add to layer LUT.\n self.layers[name] = layer_output\n # This output is now the input for the next layer.\n self.feed(layer_output)\n # Return self for chained calls.\n return self\n\n return layer_decorated\n\n\nclass Network(object):\n\n def __init__(self, inputs, batch, keep_prob, is_training, trainable = True):\n # The input nodes for this network\n self.inputs = inputs\n # The current list of terminal nodes\n self.terminals = []\n # Mapping from layer names to layers\n self.layers = dict(inputs)\n # If true, the resulting variables are set as trainable\n self.trainable = trainable\n self.batch_size = batch\n self.keep_prob = keep_prob\n self.is_training = is_training\n self.setup()\n\n\n def setup(self):\n '''Construct the network. '''\n raise NotImplementedError('Must be implemented by the subclass.')\n\n def load(self, data_path, session, ignore_missing=False):\n '''Load network weights.\n data_path: The path to the numpy-serialized network weights\n session: The current TensorFlow session\n ignore_missing: If true, serialized weights for missing layers are ignored.\n '''\n data_dict = np.load(data_path, encoding='latin1').item()\n for op_name in data_dict: \n with tf.variable_scope(op_name, reuse=True):\n for param_name, data in iter(data_dict[op_name].items()): \n try:\n var = tf.get_variable(param_name)\n session.run(var.assign(data))\n\n except ValueError:\n if not ignore_missing:\n raise\n\n def feed(self, *args):\n '''Set the input(s) for the next operation by replacing the terminal nodes.\n The arguments can be either layer names or the actual layers.\n '''\n assert len(args) != 0\n self.terminals = []\n for fed_layer in args:\n if isinstance(fed_layer, str):\n try:\n fed_layer = self.layers[fed_layer]\n except KeyError:\n raise KeyError('Unknown layer name fed: %s' % fed_layer)\n self.terminals.append(fed_layer)\n return self\n\n def get_output(self):\n '''Returns the current network output.'''\n return self.terminals[-1]\n\n def get_layer_output(self, name):\n return self.layers[name]\n\n def get_unique_name(self, prefix):\n '''Returns an index-suffixed unique name for the given prefix.\n This is used for auto-generating layer names based on the type-prefix.\n '''\n ident = sum(t.startswith(prefix) for t, _ in self.layers.items()) + 1\n return '%s_%d' % (prefix, ident)\n\n def make_var(self, name, shape):\n '''Creates a new TensorFlow variable.'''\n return tf.get_variable(name, shape, dtype = 'float32', trainable=self.trainable)\n\n def validate_padding(self, padding):\n '''Verifies that the padding is one of the supported ones.'''\n assert padding in ('SAME', 'VALID')\n\n @layer\n def conv(self,\n input_data,\n k_h,\n k_w,\n c_o,\n s_h,\n s_w,\n name,\n relu=True,\n padding=DEFAULT_PADDING,\n group=1,\n biased=True):\n\n # Verify that the padding is acceptable\n self.validate_padding(padding)\n # Get the number of channels in the input\n c_i = input_data.get_shape()[-1]\n\n if (padding == 'SAME'):\n input_data = tf.pad(input_data, [[0, 0], [(k_h - 1)//2, (k_h - 1)//2], [(k_w - 1)//2, (k_w - 1)//2], [0, 0]], \"CONSTANT\")\n \n # Verify that the grouping parameter is valid\n assert c_i % group == 0\n assert c_o % group == 0\n # Convolution for a given input and kernel\n convolve = lambda i, k: tf.nn.conv2d(i, k, [1, s_h, s_w, 1], padding='VALID')\n \n with tf.variable_scope(name) as scope:\n kernel = self.make_var('weights', shape=[k_h, k_w, c_i // group, c_o])\n\n if group == 1:\n # This is the common-case. Convolve the input without any further complications.\n output = convolve(input_data, kernel)\n else:\n # Split the input into groups and then convolve each of them independently\n\n input_groups = tf.split(3, group, input_data)\n kernel_groups = tf.split(3, group, kernel)\n output_groups = [convolve(i, k) for i, k in zip(input_groups, kernel_groups)]\n # Concatenate the groups\n output = tf.concat(3, output_groups)\n\n # Add the biases\n if biased:\n biases = self.make_var('biases', [c_o])\n output = tf.nn.bias_add(output, biases)\n if relu:\n # ReLU non-linearity\n output = tf.nn.relu(output, name=scope.name)\n\n return output\n\n @layer\n def relu(self, input_data, name):\n return tf.nn.relu(input_data, name=name)\n\n @layer\n def max_pool(self, input_data, k_h, k_w, s_h, s_w, name, padding=DEFAULT_PADDING):\n self.validate_padding(padding)\n return tf.nn.max_pool(input_data,\n ksize=[1, k_h, k_w, 1],\n strides=[1, s_h, s_w, 1],\n padding=padding,\n name=name)\n\n @layer\n def avg_pool(self, input_data, k_h, k_w, s_h, s_w, name, padding=DEFAULT_PADDING):\n self.validate_padding(padding)\n return tf.nn.avg_pool(input_data,\n ksize=[1, k_h, k_w, 1],\n strides=[1, s_h, s_w, 1],\n padding=padding,\n name=name)\n\n @layer\n def lrn(self, input_data, radius, alpha, beta, name, bias=1.0):\n return tf.nn.local_response_normalization(input_data,\n depth_radius=radius,\n alpha=alpha,\n beta=beta,\n bias=bias,\n name=name)\n\n @layer\n def concat(self, inputs, axis, name):\n return tf.concat(concat_dim=axis, values=inputs, name=name)\n\n @layer\n def add(self, inputs, name):\n return tf.add_n(inputs, name=name)\n\n @layer\n def fc(self, input_data, num_out, name, relu=True):\n with tf.variable_scope(name) as scope:\n input_shape = input_data.get_shape()\n if input_shape.ndims == 4:\n # The input is spatial. Vectorize it first.\n dim = 1\n for d in input_shape[1:].as_list():\n dim *= d\n feed_in = tf.reshape(input_data, [-1, dim])\n else:\n feed_in, dim = (input_data, input_shape[-1].value)\n weights = self.make_var('weights', shape=[dim, num_out])\n biases = self.make_var('biases', [num_out])\n op = tf.nn.relu_layer if relu else tf.nn.xw_plus_b\n fc = op(feed_in, weights, biases, name=scope.name)\n return fc\n\n @layer\n def softmax(self, input_data, name):\n input_shape = map(lambda v: v.value, input_data.get_shape())\n if len(input_shape) > 2:\n # For certain models (like NiN), the singleton spatial dimensions\n # need to be explicitly squeezed, since they're not broadcast-able\n # in TensorFlow's NHWC ordering (unlike Caffe's NCHW).\n if input_shape[1] == 1 and input_shape[2] == 1:\n input_data = tf.squeeze(input_data, squeeze_dims=[1, 2])\n else:\n raise ValueError('Rank 2 tensor input expected for softmax!')\n return tf.nn.softmax(input_data, name)\n\n @layer\n def batch_normalization(self, input_data, name, scale_offset=True, relu=False):\n\n with tf.variable_scope(name) as scope:\n shape = [input_data.get_shape()[-1]]\n pop_mean = tf.get_variable(\"mean\", shape, initializer = tf.constant_initializer(0.0), trainable=False)\n pop_var = tf.get_variable(\"variance\", shape, initializer = tf.constant_initializer(1.0), trainable=False)\n epsilon = 1e-4\n decay = 0.999\n if scale_offset:\n scale = tf.get_variable(\"scale\", shape, initializer = tf.constant_initializer(1.0))\n offset = tf.get_variable(\"offset\", shape, initializer = tf.constant_initializer(0.0))\n else:\n scale, offset = (None, None)\n if self.is_training:\n batch_mean, batch_var = tf.nn.moments(input_data, [0, 1, 2])\n\n train_mean = tf.assign(pop_mean,\n pop_mean * decay + batch_mean * (1 - decay))\n train_var = tf.assign(pop_var,\n pop_var * decay + batch_var * (1 - decay))\n with tf.control_dependencies([train_mean, train_var]):\n output = tf.nn.batch_normalization(input_data,\n batch_mean, batch_var, offset, scale, epsilon, name = name)\n else:\n output = tf.nn.batch_normalization(input_data,\n pop_mean, pop_var, offset, scale, epsilon, name = name)\n\n if relu:\n output = tf.nn.relu(output)\n\n return output\n\n @layer\n def dropout(self, input_data, keep_prob, name):\n return tf.nn.dropout(input_data, keep_prob, name=name)\n \n\n def unpool_as_conv(self, size, input_data, id, stride = 1, ReLU = False, BN = True):\n\n\t\t# Model upconvolutions (unpooling + convolution) as interleaving feature\n\t\t# maps of four convolutions (A,B,C,D). Building block for up-projections. \n\n\n # Convolution A (3x3)\n # --------------------------------------------------\n layerName = \"layer%s_ConvA\" % (id)\n self.feed(input_data)\n self.conv( 3, 3, size[3], stride, stride, name = layerName, padding = 'SAME', relu = False)\n outputA = self.get_output()\n\n # Convolution B (2x3)\n # --------------------------------------------------\n layerName = \"layer%s_ConvB\" % (id)\n padded_input_B = tf.pad(input_data, [[0, 0], [1, 0], [1, 1], [0, 0]], \"CONSTANT\")\n self.feed(padded_input_B)\n self.conv(2, 3, size[3], stride, stride, name = layerName, padding = 'VALID', relu = False)\n outputB = self.get_output()\n\n # Convolution C (3x2)\n # --------------------------------------------------\n layerName = \"layer%s_ConvC\" % (id)\n padded_input_C = tf.pad(input_data, [[0, 0], [1, 1], [1, 0], [0, 0]], \"CONSTANT\")\n self.feed(padded_input_C)\n self.conv(3, 2, size[3], stride, stride, name = layerName, padding = 'VALID', relu = False)\n outputC = self.get_output()\n\n # Convolution D (2x2)\n # --------------------------------------------------\n layerName = \"layer%s_ConvD\" % (id)\n padded_input_D = tf.pad(input_data, [[0, 0], [1, 0], [1, 0], [0, 0]], \"CONSTANT\")\n self.feed(padded_input_D)\n self.conv(2, 2, size[3], stride, stride, name = layerName, padding = 'VALID', relu = False)\n outputD = self.get_output()\n\n # Interleaving elements of the four feature maps\n # --------------------------------------------------\n left = interleave([outputA, outputB], axis=1) # columns\n right = interleave([outputC, outputD], axis=1) # columns\n Y = interleave([left, right], axis=2) # rows\n \n if BN:\n layerName = \"layer%s_BN\" % (id)\n self.feed(Y)\n self.batch_normalization(name = layerName, scale_offset = True, relu = False)\n Y = self.get_output()\n\n if ReLU:\n Y = tf.nn.relu(Y, name = layerName)\n \n return Y\n\n\n def up_project(self, size, id, stride = 1, BN = True):\n \n # Create residual upsampling layer (UpProjection)\n\n input_data = self.get_output()\n\n # Branch 1\n id_br1 = \"%s_br1\" % (id)\n\n # Interleaving Convs of 1st branch\n out = self.unpool_as_conv(size, input_data, id_br1, stride, ReLU=True, BN=True)\n\n # Convolution following the upProjection on the 1st branch\n layerName = \"layer%s_Conv\" % (id)\n self.feed(out)\n self.conv(size[0], size[1], size[3], stride, stride, name = layerName, relu = False)\n\n if BN:\n layerName = \"layer%s_BN\" % (id)\n self.batch_normalization(name = layerName, scale_offset=True, relu = False)\n\n # Output of 1st branch\n branch1_output = self.get_output()\n\n \n # Branch 2\n id_br2 = \"%s_br2\" % (id)\n # Interleaving convolutions and output of 2nd branch\n branch2_output = self.unpool_as_conv(size, input_data, id_br2, stride, ReLU=False)\n\n \n # sum branches\n layerName = \"layer%s_Sum\" % (id)\n output = tf.add_n([branch1_output, branch2_output], name = layerName)\n # ReLU\n layerName = \"layer%s_ReLU\" % (id)\n output = tf.nn.relu(output, name=layerName)\n\n self.feed(output)\n return self\n" ]
[ [ "tensorflow.compat.v1.nn.dropout", "tensorflow.compat.v1.concat", "tensorflow.compat.v1.add_n", "tensorflow.compat.v1.reshape", "tensorflow.compat.v1.nn.avg_pool", "tensorflow.compat.v1.constant_initializer", "numpy.load", "tensorflow.compat.v1.nn.local_response_normalization", "tensorflow.compat.v1.variable_scope", "tensorflow.compat.v1.nn.softmax", "tensorflow.compat.v1.assign", "tensorflow.compat.v1.get_variable", "tensorflow.compat.v1.nn.batch_normalization", "tensorflow.compat.v1.split", "tensorflow.compat.v1.nn.moments", "tensorflow.compat.v1.nn.conv2d", "tensorflow.compat.v1.nn.relu", "tensorflow.compat.v1.nn.max_pool", "tensorflow.compat.v1.stack", "tensorflow.compat.v1.disable_v2_behavior", "tensorflow.compat.v1.control_dependencies", "numpy.shape", "tensorflow.compat.v1.nn.bias_add", "tensorflow.compat.v1.squeeze", "tensorflow.compat.v1.pad" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
arjunchandra/continuous-rl
[ "8f3c655c6a4b2e9d15a6b052e5466c0a75191a08" ]
[ "code/nn.py" ]
[ "\"\"\"Some nn utilities.\"\"\"\nimport torch\nfrom abstract import ParametricFunction\n\ndef copy_buffer(net: ParametricFunction, target_net: ParametricFunction):\n \"\"\"Copy all buffers from net to target_net.\"\"\"\n with torch.no_grad():\n for target_buf, buf in zip(target_net.buffers(), net.buffers()): # type: ignore\n target_buf.copy_(buf)\n\ndef soft_update(net: ParametricFunction, target_net: ParametricFunction, tau: float):\n \"\"\"Soft update of the parameters of target_net with those of net.\n\n Precisely\n theta_targetnet <- tau * theta_targetnet + (1 - tau) * theta_net\n \"\"\"\n copy_buffer(net, target_net)\n with torch.no_grad():\n for target_param, param in zip(target_net.parameters(), net.parameters()):\n target_param.add_(1 - tau, param - target_param)\n\ndef hard_update(net: ParametricFunction, target_net: ParametricFunction):\n \"\"\"Hard update (i.e. copy) of the parameters of target_net with those of net.\"\"\"\n copy_buffer(net, target_net)\n with torch.no_grad():\n for target_param, param in zip(target_net.parameters(), net.parameters()):\n target_param.copy_(param)\n" ]
[ [ "torch.no_grad" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
sotudian/Natural-Language-Processing
[ "61ba2ac78e440683519d2121ca2b29a17277e46b" ]
[ "LSTM for language modeling/Question2_Part_1_To_2.py" ]
[ "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nTrain the language model on texts from the file pride And Prejudice. Before using it to train the language model,\nyou need to first sentence segment, then tokenize, then lower case each line of the file using Spacy. Append \nstart-of-sentence token ’<s>’ and end-of-sentence ’</s>’ token to each sentence and put each sentence in its own line.\n Use only words that appear more than once in this corpus and assign UNK tokens for the rest; you may also need to\n pad sentences that are shorter than 5. Train the language model and save the trained model. Generate 10 examples\n of text from it, starting from ’<s>’ token and ending at ’</s>’ token.\n\n\n\n\n@author: shahab Sotudian\n\"\"\"\n\nimport re\nimport pickle\nimport random\nimport numpy as np\nimport pandas as pd\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport nltk\nfrom nltk.tokenize import sent_tokenize, word_tokenize \nfrom nltk.lm.preprocessing import pad_both_ends\nfrom collections import Counter\nimport math\n# Functions ###########================-------------------\n'''\n############################################################\n#### Piazza calculate Preplexity\nnet.cuda()\nnet.eval()\nH = 0\nTOTAL_PROBs = 1\nwith torch.no_grad():\n for Test_Sentence in Test_1_Preprocessed_Pride_Text_Perplexity:\n H += len(Test_Sentence)\n # Calculate for each sentence\n Total_prob_Sentence = 1\n for i,word in enumerate(Test_Sentence):\n if i == len(Test_Sentence)-1:\n continue\n else:\n if i==0:\n h = net.init_hidden(1)\n h = tuple([each.data for each in h])\n else:\n h = h_new\n \n x = np.array([[word2idx[word]]])\n inputs = torch.from_numpy(x)\n inputs = inputs.cuda()\n \n out, h_new = net(inputs, h)\n # get the token probabilities\n p = F.softmax(out, dim=1).data\n p = p.cpu()\n p = p.numpy()\n p = p.reshape(p.shape[1],)\n Prob_next_Word = p[word2idx[Test_Sentence[i+1]]] # P(w4|w1,w2,w3)\n Total_prob_Sentence = Prob_next_Word * Total_prob_Sentence\n \n TOTAL_PROBs = TOTAL_PROBs * Total_prob_Sentence\n\nPreplexity = (1/TOTAL_PROBs)**(1/float(H)) \n############################################################\n''' \ndef NLP_PreProcessing(text_main): \n # sentence segmenting \n sentences = nltk.sent_tokenize(text_main) \n # Tokenization + lower casing \n Tokenized_sentences = [word_tokenize(S.lower()) for S in sentences] \n # Padding \n Pad_Tokenized_sentences = [list(pad_both_ends(TS, n=2)) for TS in Tokenized_sentences]\n \n return Pad_Tokenized_sentences\n\ndef NLP_PreProcessing_Test(text_main): \n # Tokenization + lower casing \n Tokenized_sentences = word_tokenize(text_main.lower())\n # Padding \n Pad_Tokenized_sentences = [list(pad_both_ends(Tokenized_sentences, n=2))]\n \n return Pad_Tokenized_sentences \n \ndef Equal_seq(text, seq_len):\n sequences = []\n if len(text) > seq_len:\n for i in range(seq_len, (len(text)+1)):\n seq = text[i-seq_len:i]\n sequences.append(seq)\n else:\n sequences = [['_PAD']*(seq_len-len(text)) + text ]\n \n return sequences \n \n\n\n\ndef get_batches(arr_x, arr_y, batch_size):\n \n # iterate through the arrays\n prv = 0\n for n in range(batch_size, arr_x.shape[0], batch_size):\n x = arr_x[prv:n,:]\n y = arr_y[prv:n,:]\n prv = n\n yield x, y\n \n \nclass WordLSTM(nn.Module):\n \n def __init__(self, n_hidden=256, n_layers=4, drop_prob=0.3, lr=0.001):\n super().__init__()\n\n self.drop_prob = drop_prob\n self.n_layers = n_layers\n self.n_hidden = n_hidden\n self.lr = lr\n \n self.emb_layer = nn.Embedding(vocab_size, 200)\n\n ## define the LSTM\n self.lstm = nn.LSTM(200, n_hidden, n_layers, \n dropout=drop_prob, batch_first=True)\n \n ## define a dropout layer\n self.dropout = nn.Dropout(drop_prob)\n \n ## define the fully-connected layer\n self.fc = nn.Linear(n_hidden, vocab_size) \n \n def forward(self, x, hidden):\n ''' Forward pass through the network. \n These inputs are x, and the hidden/cell state `hidden`. '''\n\n ## pass input through embedding layer\n embedded = self.emb_layer(x) \n \n ## Get the outputs and the new hidden state from the lstm\n lstm_output, hidden = self.lstm(embedded, hidden)\n \n ## pass through a dropout layer\n out = self.dropout(lstm_output)\n \n #out = out.contiguous().view(-1, self.n_hidden) \n out = out.reshape(-1, self.n_hidden) \n\n ## put \"out\" through the fully-connected layer\n out = self.fc(out)\n\n # return the final output and the hidden state\n return out, hidden\n \n \n def init_hidden(self, batch_size):\n ''' initializes hidden state '''\n # Create two new tensors with sizes n_layers x batch_size x n_hidden,\n # initialized to zero, for hidden state and cell state of LSTM\n weight = next(self.parameters()).data\n\n # if GPU is available\n if (torch.cuda.is_available()):\n hidden = (weight.new(self.n_layers, batch_size, self.n_hidden).zero_().cuda(),\n weight.new(self.n_layers, batch_size, self.n_hidden).zero_().cuda())\n \n # if GPU is not available\n else:\n hidden = (weight.new(self.n_layers, batch_size, self.n_hidden).zero_(),\n weight.new(self.n_layers, batch_size, self.n_hidden).zero_())\n \n return hidden \n \ndef train(net, epochs, batch_size, lr, clip, print_every,XX,YY):\n \n # optimizer\n opt = torch.optim.Adam(net.parameters(), lr=lr)\n \n # loss\n criterion = nn.CrossEntropyLoss()\n \n # push model to GPU\n net.cuda()\n \n counter = 0\n\n net.train()\n\n for e in range(epochs):\n\n # initialize hidden state\n h = net.init_hidden(batch_size)\n \n for x, y in get_batches(XX, YY, batch_size):\n counter+= 1\n \n # convert numpy arrays to PyTorch arrays\n inputs, targets = torch.from_numpy(x), torch.from_numpy(y)\n \n # push tensors to GPU\n inputs, targets = inputs.cuda(), targets.cuda()\n\n # detach hidden states\n h = tuple([each.data for each in h])\n\n # zero accumulated gradients\n net.zero_grad()\n \n # get the output from the model\n output, h = net(inputs, h)\n \n # calculate the loss and perform backprop\n loss = criterion(output, targets.view(-1))\n\n # back-propagate error\n loss.backward()\n\n # `clip_grad_norm` helps prevent the exploding gradient problem in RNNs / LSTMs.\n nn.utils.clip_grad_norm_(net.parameters(), clip)\n\n # update weigths\n opt.step() \n \n if counter % print_every == 0:\n \n print(\"Epoch: {}/{}...\".format(e+1, epochs),\n \"Step: {}...\".format(counter)) \n \ndef predict(net, tkn, h=None, word2idx_Inp = None, idx2word_Inp =None ): \n # tensor inputs\n x = np.array([[word2idx_Inp[tkn]]])\n inputs = torch.from_numpy(x)\n \n # push to GPU\n inputs = inputs.cuda()\n\n # detach hidden state from history\n h = tuple([each.data for each in h])\n\n # get the output of the model\n out, h = net(inputs, h)\n\n # get the token probabilities\n p = F.softmax(out, dim=1).data\n\n p = p.cpu()\n\n p = p.numpy()\n p = p.reshape(p.shape[1],)\n\n # get indices of top 3 values\n top_n_idx = p.argsort()[-3:][::-1]\n\n # randomly select one of the three indices\n sampled_token_index = top_n_idx[random.sample([0,1,2],1)[0]]\n\n # return the encoded value of the predicted char and the hidden state\n return idx2word_Inp[sampled_token_index], h\n\n\n# function to generate text\ndef sample(net, size, prime=\"<s>\",word2idx_Inp = None, idx2word_Inp =None ):\n \n # push to GPU\n net.cuda()\n \n net.eval()\n\n # batch size is 1\n h = net.init_hidden(1)\n\n toks = prime.split()\n\n # predict next token\n for t in prime.split():\n token, h = predict(net, t, h,word2idx_Inp,idx2word_Inp)\n \n toks.append(token)\n\n # predict subsequent tokens\n if size == '</s>':\n while(token!='</s>'):\n token, h = predict(net, toks[-1], h,word2idx_Inp,idx2word_Inp)\n toks.append(token)\n else: \n for i in range(size-1):\n token, h = predict(net, toks[-1], h,word2idx_Inp,idx2word_Inp)\n toks.append(token)\n\n return ' '.join(toks) \n\n\n\n\n\ndef Testing(net, batch_size,Test_X,Test_Y):\n net.eval()\n criterion = nn.CrossEntropyLoss()\n # initialize hidden state\n h = net.init_hidden(batch_size)\n test_loss = 0.\n \n with torch.no_grad():\n for x, y in get_batches(Test_X, Test_Y, batch_size):\n # convert numpy arrays to PyTorch arrays\n inputs, targets = torch.from_numpy(x), torch.from_numpy(y) \n # push tensors to GPU\n inputs, targets = inputs.cuda(), targets.cuda() \n # detach hidden states\n h = tuple([each.data for each in h])\n # get the output from the model\n output, h = net(inputs, h) \n test_loss += criterion(output, targets.view(-1)).item()\n\n test_loss = test_loss / ((len(Test_X) // batch_size)+1)\n print('-' * 40)\n print('Test loss {:5.2f} ------ Test perplexity {:8.2f}'.format(test_loss, math.exp(test_loss)))\n print('-' * 40)\n\n\n \nclass WordLSTM_with_Glove(nn.Module): \n \n def __init__(self, n_hidden=256, n_layers=4, drop_prob=0.3, lr=0.001):\n super().__init__()\n\n self.drop_prob = drop_prob\n self.n_layers = n_layers\n self.n_hidden = n_hidden\n self.lr = lr\n \n self.emb_layer = nn.Embedding(vocab_size_Q6,100, padding_idx=0) \n self.emb_layer.weight.data.copy_(torch.from_numpy(embedding_matrix))\n self.emb_layer.weight.requires_grad = False ## freeze embeddings \n '''\n self.emb_layer = nn.Embedding(vocab_size_Q6,100) \n self.emb_layer.weight = nn.Parameter(torch.from_numpy(embedding_matrix).float())\n '''\n ## define the LSTM\n self.lstm = nn.LSTM(100, n_hidden, n_layers, \n dropout=drop_prob, batch_first=True)\n \n ## define a dropout layer\n self.dropout = nn.Dropout(drop_prob)\n \n ## define the fully-connected layer\n self.fc = nn.Linear(n_hidden, vocab_size_Q6) \n \n def forward(self, x, hidden):\n ''' Forward pass through the network. \n These inputs are x, and the hidden/cell state `hidden`. '''\n\n ## pass input through embedding layer\n embedded = self.emb_layer(x) \n \n ## Get the outputs and the new hidden state from the lstm\n lstm_output, hidden = self.lstm(embedded, hidden)\n \n ## pass through a dropout layer\n out = self.dropout(lstm_output)\n \n #out = out.contiguous().view(-1, self.n_hidden) \n out = out.reshape(-1, self.n_hidden) \n\n ## put \"out\" through the fully-connected layer\n out = self.fc(out)\n\n # return the final output and the hidden state\n return out, hidden\n \n \n def init_hidden(self, batch_size):\n ''' initializes hidden state '''\n # Create two new tensors with sizes n_layers x batch_size x n_hidden,\n # initialized to zero, for hidden state and cell state of LSTM\n weight = next(self.parameters()).data\n\n # if GPU is available\n if (torch.cuda.is_available()):\n hidden = (weight.new(self.n_layers, batch_size, self.n_hidden).zero_().cuda(),\n weight.new(self.n_layers, batch_size, self.n_hidden).zero_().cuda())\n \n # if GPU is not available\n else:\n hidden = (weight.new(self.n_layers, batch_size, self.n_hidden).zero_(),\n weight.new(self.n_layers, batch_size, self.n_hidden).zero_())\n \n return hidden \n \n \n \n\n\n# Data ###########================-------------------\nwith open('prideAndPrejudice.txt') as f:\n Pride_Text = [line.rstrip() for line in f]\n\n\n\n# Q2.1 ###########================-------------------\n\n# sentence segmenting + lower casing + Tokenization + Padding using function NLP_PreProcessing\nPreprocessed_Pride_Text = []\nfor t in range(len(Pride_Text)):\n Preprocessed_Pride_Text = Preprocessed_Pride_Text + NLP_PreProcessing(Pride_Text[t]) \n\nLength_of_Sequences = 5\n\nPride_Text_Equal_seqs_L5 = sum([Equal_seq(i,Length_of_Sequences) for i in Preprocessed_Pride_Text], [])\n\ndel t,f\n\n# Create Vocab\nwords = Counter() \nfor i, sentence in enumerate(Preprocessed_Pride_Text):\n for word in sentence: \n words.update([word]) \nwords = {k:v for k,v in words.items() if v>1} # Removing the words that only appear once\ndel i,sentence,word\nwords = sorted(words, key=words.get, reverse=True) # Sorting the words\nwords = ['_PAD','_UNK'] + words\nword2idx = {o:i for i,o in enumerate(words)}\nidx2word = {i:o for i,o in enumerate(words)}\n# Looking up the mapping dictionary and assigning the index to the respective words\nPride_Text_Equal_seqs_INDICES_L5 =[]\nfor i, sentence in enumerate(Pride_Text_Equal_seqs_L5):\n Pride_Text_Equal_seqs_INDICES_L5.append([word2idx[word] if word in word2idx else word2idx['_UNK'] for word in sentence])\ndel i, sentence\n\n\nX = []\nY = []\nfor S in Pride_Text_Equal_seqs_INDICES_L5:\n X.append(S[:-1])\n Y.append(S[1:])\n\nx_int_L5 = np.array(X)\ny_int_L5 = np.array(Y)\n\n\nvocab_size = len(word2idx)\n\n\n# Train Or Load LSTM\nDo_want_To_Train = 0\nbatch_size = 320\nepochs=20\nlr=0.001\nif Do_want_To_Train == 1:\n net1 = WordLSTM() # instantiate the model\n net1.cuda() # push the model to GPU\n train(net1, epochs, batch_size, lr, 1, 50,x_int_L5,y_int_L5) # train the model\n torch.save(net1, 'Q2_Part_1_Network.pt')\nelse:\n net1 = torch.load('Q2_Part_1_Network.pt')\n net1.eval()\n \n\n\n\nprint(net1)\n\n# Generate text\nfor i in range(10):\n print('=======================================')\n print(\"- Example \"+str(i+1)+\": \",sample(net1, size='</s>' , prime=\"<s>\", word2idx_Inp = word2idx, idx2word_Inp =idx2word ),'\\n')\n\n\n\ndel X,Y,i,S,Do_want_To_Train\n\n\n\nprint('=' * 60)\n\n\n\n\n# Q2.2 ###########================-------------------\n\nwith open('test_1.txt') as f:\n test_1 = [line.rstrip() for line in f]\n\n# sentence segmenting + lower casing + Tokenization + Padding using function NLP_PreProcessing_Test\nTest_1_Preprocessed_Pride_Text = []\nfor t in range(len(test_1)):\n Test_1_Preprocessed_Pride_Text = Test_1_Preprocessed_Pride_Text + NLP_PreProcessing_Test((test_1[t])[4:-5]) \n\nTest_1_Pride_Text_Equal_seqs = sum([Equal_seq(i,Length_of_Sequences) for i in Test_1_Preprocessed_Pride_Text], []) \n\ndel t,f\n# Looking up the mapping dictionary and assigning the index to the respective words\nTest_1_Pride_Text_Equal_seqs_INDICES =[]\nfor i, sentence in enumerate(Test_1_Pride_Text_Equal_seqs):\n Test_1_Pride_Text_Equal_seqs_INDICES.append([word2idx[word] if word in word2idx else word2idx['_UNK'] for word in sentence])\ndel i, sentence\n\n\nTest_1_X = []\nTest_1_Y = []\nfor S in Test_1_Pride_Text_Equal_seqs_INDICES:\n Test_1_X.append(S[:-1])\n Test_1_Y.append(S[1:])\n\nTest_1_x_int = np.array(Test_1_X)\nTest_1_y_int = np.array(Test_1_Y)\n\ndel Test_1_X,Test_1_Y,S\n# Calculate Perplexity\nTesting(net1, batch_size ,Test_1_x_int,Test_1_y_int) \n\n\ndel Pride_Text,Length_of_Sequences\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" ]
[ [ "torch.nn.CrossEntropyLoss", "torch.nn.Dropout", "torch.nn.functional.softmax", "torch.load", "torch.nn.LSTM", "torch.from_numpy", "torch.nn.Embedding", "torch.nn.Linear", "torch.no_grad", "torch.cuda.is_available", "numpy.array", "torch.save" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
ChenRocks/Distill-BERT-Textgen
[ "a3b0b22ce16febc4d3ffdbd8791ea3374110a892" ]
[ "dump_teacher_hiddens.py" ]
[ "\"\"\"\nCopyright (c) Microsoft Corporation.\nLicensed under the MIT license.\n\nprecompute hidden states of CMLM teacher to speedup KD training\n\"\"\"\nimport argparse\nimport io\nimport os\nimport shelve\n\nimport numpy as np\nimport torch\nfrom torch.utils.data import Dataset, DataLoader\nfrom tqdm import tqdm\nfrom pytorch_pretrained_bert import BertTokenizer\nfrom toolz.sandbox import unzip\n\nfrom cmlm.model import BertForSeq2seq\nfrom cmlm.data import convert_token_to_bert, CLS, SEP, MASK\n\n\ndef tensor_dumps(tensor):\n with io.BytesIO() as writer:\n np.save(writer, tensor.cpu().numpy().astype(np.float16),\n allow_pickle=False)\n dump = writer.getvalue()\n return dump\n\n\ndef gather_hiddens(hiddens, masks):\n outputs = []\n for hid, mask in zip(hiddens.split(1, dim=1), masks.split(1, dim=1)):\n if mask.sum().item() == 0:\n continue\n mask = mask.unsqueeze(-1).expand_as(hid)\n outputs.append(hid.masked_select(mask))\n output = torch.stack(outputs, dim=0)\n return output\n\n\nclass BertSampleDataset(Dataset):\n def __init__(self, corpus_path, tokenizer, num_samples=7):\n self.db = shelve.open(corpus_path, 'r')\n self.ids = []\n for i, ex in self.db.items():\n if len(ex['src']) + len(ex['tgt']) + 3 <= 512:\n self.ids.append(i)\n self.toker = tokenizer\n self.num_samples = num_samples\n\n def __len__(self):\n return len(self.ids)\n\n def __getitem__(self, i):\n id_ = self.ids[i]\n example = self.db[id_]\n features = convert_example(example['src'], example['tgt'],\n self.toker, self.num_samples)\n return (id_, ) + features\n\n\ndef convert_example(src, tgt, toker, num_samples):\n src = [convert_token_to_bert(tok) for tok in src]\n tgt = [convert_token_to_bert(tok) for tok in tgt] + [SEP]\n\n # build the random masks\n tgt_len = len(tgt)\n if tgt_len <= num_samples:\n masks = torch.eye(tgt_len).byte()\n num_samples = tgt_len\n else:\n mask_inds = [list(range(i, tgt_len, num_samples))\n for i in range(num_samples)]\n masks = torch.zeros(num_samples, tgt_len).byte()\n for i, indices in enumerate(mask_inds):\n for j in indices:\n masks.data[i, j] = 1\n assert (masks.sum(dim=0) != torch.ones(tgt_len).long()).sum().item() == 0\n assert masks.sum().item() == tgt_len\n masks = torch.cat([torch.zeros(num_samples, len(src)+2).byte(), masks],\n dim=1)\n\n # make BERT inputs\n input_ids = toker.convert_tokens_to_ids([CLS] + src + [SEP] + tgt)\n mask_id = toker.convert_tokens_to_ids([MASK])[0]\n input_ids = torch.tensor([input_ids for _ in range(num_samples)])\n input_ids.data.masked_fill_(masks, mask_id)\n token_ids = torch.tensor([[0] * (len(src) + 2) + [1] * len(tgt)\n for _ in range(num_samples)])\n return input_ids, token_ids, masks\n\n\ndef batch_features(features):\n ids, all_input_ids, all_token_ids, all_masks = map(list, unzip(features))\n batch_size = sum(input_ids.size(0) for input_ids in all_input_ids)\n max_len = max(input_ids.size(1) for input_ids in all_input_ids)\n input_ids = torch.zeros(batch_size, max_len).long()\n token_ids = torch.zeros(batch_size, max_len).long()\n attn_mask = torch.zeros(batch_size, max_len).long()\n i = 0\n for inp, tok in zip(all_input_ids, all_token_ids):\n block, len_ = inp.size()\n input_ids.data[i: i+block, :len_] = inp.data\n token_ids.data[i: i+block, :len_] = tok.data\n attn_mask.data[i: i+block, :len_].fill_(1)\n i += block\n return ids, input_ids, token_ids, attn_mask, all_masks\n\n\ndef process_batch(batch, bert, toker, num_samples=7):\n input_ids, token_ids, attn_mask, all_masks = batch\n input_ids = input_ids.cuda()\n token_ids = token_ids.cuda()\n attn_mask = attn_mask.cuda()\n hiddens, _ = bert.bert(input_ids, token_ids, attn_mask,\n output_all_encoded_layers=False)\n hiddens = bert.cls.predictions.transform(hiddens)\n i = 0\n outputs = []\n for masks in all_masks:\n block, len_ = masks.size()\n hids = hiddens[i:i+block, :len_, :]\n masks = masks.cuda()\n outputs.append(gather_hiddens(hids, masks))\n i += block\n return outputs\n\n\ndef build_db_batched(corpus, out_db, bert, toker, batch_size=8):\n dataset = BertSampleDataset(corpus, toker)\n loader = DataLoader(dataset, batch_size=batch_size,\n num_workers=4, collate_fn=batch_features)\n with tqdm(desc='computing BERT features', total=len(dataset)) as pbar:\n for ids, *batch in loader:\n outputs = process_batch(batch, bert, toker)\n for id_, output in zip(ids, outputs):\n out_db[id_] = tensor_dumps(output)\n pbar.update(len(ids))\n\n\ndef main(opts):\n # load BERT\n state_dict = torch.load(opts.ckpt)\n vsize = state_dict['cls.predictions.decoder.weight'].size(0)\n bert = BertForSeq2seq.from_pretrained(opts.bert).eval().half().cuda()\n bert.update_output_layer_by_size(vsize)\n bert.load_state_dict(state_dict)\n toker = BertTokenizer.from_pretrained(opts.bert,\n do_lower_case='uncased' in opts.bert)\n\n # save the final projection layer\n linear = torch.nn.Linear(bert.config.hidden_size, bert.config.vocab_size)\n linear.weight.data = state_dict['cls.predictions.decoder.weight']\n linear.bias.data = state_dict['cls.predictions.bias']\n os.makedirs(opts.output)\n torch.save(linear, f'{opts.output}/linear.pt')\n\n # create DB\n with shelve.open(f'{opts.output}/db') as out_db, \\\n torch.no_grad():\n build_db_batched(opts.db, out_db, bert, toker)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('--bert', required=True,\n choices=['bert-base-uncased',\n 'bert-base-multilingual-cased'],\n help='BERT model')\n parser.add_argument('--ckpt', required=True, help='BERT checkpoint')\n parser.add_argument('--db', required=True, help='dataset to compute')\n parser.add_argument('--output', required=True, help='path to dump output')\n args = parser.parse_args()\n\n main(args)\n" ]
[ [ "torch.ones", "torch.load", "torch.zeros", "torch.utils.data.DataLoader", "torch.eye", "torch.nn.Linear", "torch.no_grad", "torch.stack", "torch.save" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
mikimaus78/ml_monorepo
[ "b2c2627ff0e86e27f6829170d0dac168d8e5783b", "b2c2627ff0e86e27f6829170d0dac168d8e5783b", "b2c2627ff0e86e27f6829170d0dac168d8e5783b", "b2c2627ff0e86e27f6829170d0dac168d8e5783b", "b2c2627ff0e86e27f6829170d0dac168d8e5783b" ]
[ "trading-with-python/util/trendy.py", "BiBloSA/exp_SQuAD_sim/src/nn_utils/baselines/block_attention.py", "ufcnn-keras/models/a3c/game_ac_network_v2.py", "pymf/pymf/svd.py", "pymf/pymf/cmde.py" ]
[ "import numpy as np\nfrom filter import movingaverage\n\ndef gentrends(x, window=1/3.0, charts=True):\n \"\"\"\n Returns a Pandas dataframe with support and resistance lines.\n\n :param x: One-dimensional data set\n :param window: How long the trendlines should be. If window < 1, then it\n will be taken as a percentage of the size of the data\n :param charts: Boolean value saying whether to print chart to screen\n\n \"\"\"\n import numpy as np\n import pandas.io.data as pd\n\n x = np.array(x)\n\n if window < 1:\n window = int(window * len(x))\n\n max1 = np.where(x == max(x))[0][0] # find the index of the abs max\n min1 = np.where(x == min(x))[0][0] # find the index of the abs min\n\n # First the max\n if max1 + window > len(x):\n max2 = max(x[0:(max1 - window)])\n else:\n max2 = max(x[(max1 + window):])\n\n # Now the min\n if min1 - window < 0:\n min2 = min(x[(min1 + window):])\n else:\n min2 = min(x[0:(min1 - window)])\n\n # Now find the indices of the secondary extrema\n max2 = np.where(x == max2)[0][0] # find the index of the 2nd max\n min2 = np.where(x == min2)[0][0] # find the index of the 2nd min\n\n # Create & extend the lines\n maxslope = (x[max1] - x[max2]) / (max1 - max2) # slope between max points\n minslope = (x[min1] - x[min2]) / (min1 - min2) # slope between min points\n a_max = x[max1] - (maxslope * max1) # y-intercept for max trendline\n a_min = x[min1] - (minslope * min1) # y-intercept for min trendline\n b_max = x[max1] + (maxslope * (len(x) - max1)) # extend to last data pt\n b_min = x[min1] + (minslope * (len(x) - min1)) # extend to last data point\n maxline = np.linspace(a_max, b_max, len(x)) # Y values between max's\n minline = np.linspace(a_min, b_min, len(x)) # Y values between min's\n\n # OUTPUT\n trends = np.transpose(np.array((x, maxline, minline)))\n trends = pd.DataFrame(trends, index=np.arange(0, len(x)),\n columns=['Data', 'Max Line', 'Min Line'])\n\n if charts is True:\n from matplotlib.pyplot import plot, grid, show, figure\n figure()\n plot(trends)\n grid()\n show()\n\n return trends, maxslope, minslope\n\ndef segtrends(x, segments=2, charts=True, window=7):\n \"\"\"\n Turn minitrends to iterative process more easily adaptable to\n implementation in simple trading systems; allows backtesting functionality.\n\n :param x: One-dimensional data set\n :param window: How long the trendlines should be. If window < 1, then it\n will be taken as a percentage of the size of the data\n :param charts: Boolean value saying whether to print chart to screen\n \"\"\"\n\n import numpy as np\n y = np.array(x)\n n=len(y)\n movy = movingaverage(y, window)\n # Implement trendlines and Find the indexes of these maxima in the data\n segments = int(segments)\n maxima = np.ones(segments)\n minima = np.ones(segments) \n x_maxima = np.ones(segments)\n x_minima = np.ones(segments)\n segsize = int(len(y)/segments)\n for i in range(1, segments+1):\n ind2 = i*segsize\n ind1 = ind2 - segsize\n seg = y[ind1:ind2]\n maxima[i-1] = max(seg)\n minima[i-1] = min(seg)\n x_maxima[i-1] = ind1 + (np.where(seg == maxima[i-1])[0][0])\n x_minima[i-1] = ind1 + (np.where(seg == minima[i-1])[0][0])\n\n if charts:\n import matplotlib.pyplot as plt\n plt.plot(y)\n plt.grid(True)\n\n for i in range(0, segments-1):\n maxslope = (maxima[i+1] - maxima[i]) / (x_maxima[i+1] - x_maxima[i])\n a_max = maxima[i] - (maxslope * x_maxima[i])\n b_max = maxima[i] + (maxslope * (len(y) - x_maxima[i]))\n maxline = np.linspace(a_max, b_max, len(y))\n\n minslope = (minima[i+1] - minima[i]) / (x_minima[i+1] - x_minima[i])\n a_min = minima[i] - (minslope * x_minima[i])\n b_min = minima[i] + (minslope * (len(y) - x_minima[i]))\n minline = np.linspace(a_min, b_min, len(y))\n\n if charts:\n #plt.plot(maxline, 'g')\n #plt.plot(minline, 'r')\n pass\n\n if charts:\n plt.plot(range(n), movy, 'b')\n plt.plot(x_maxima, maxima, 'g')\n plt.plot(x_minima, minima, 'r')\n plt.show()\n\n # OUTPUT\n return x_maxima, maxima, x_minima, minima\n\ndef minitrends(x, window=20, charts=True):\n \"\"\"\n Turn minitrends to iterative process more easily adaptable to\n implementation in simple trading systems; allows backtesting functionality.\n\n :param x: One-dimensional data set\n :param window: How long the trendlines should be. If window < 1, then it\n will be taken as a percentage of the size of the data\n :param charts: Boolean value saying whether to print chart to screen\n \"\"\"\n\n import numpy as np\n\n y = np.array(x)\n if window < 1: # if window is given as fraction of data length\n window = float(window)\n window = int(window * len(y))\n x = np.arange(0, len(y))\n dy = y[window:] - y[:-window]\n crit = dy[:-1] * dy[1:] < 0\n\n # Find whether max's or min's\n maxi = (y[x[crit]] - y[x[crit] + window] > 0) & \\\n (y[x[crit]] - y[x[crit] - window] > 0) * 1\n mini = (y[x[crit]] - y[x[crit] + window] < 0) & \\\n (y[x[crit]] - y[x[crit] - window] < 0) * 1\n maxi = maxi.astype(float)\n mini = mini.astype(float)\n maxi[maxi == 0] = np.nan\n mini[mini == 0] = np.nan\n xmax = x[crit] * maxi\n xmax = xmax[~np.isnan(xmax)]\n xmax = xmax.astype(int)\n xmin = x[crit] * mini\n xmin = xmin[~np.isnan(xmin)]\n xmin = xmin.astype(int)\n\n # See if better max or min in region\n yMax = np.array([])\n xMax = np.array([])\n for i in xmax:\n indx = np.where(xmax == i)[0][0] + 1\n try:\n Y = y[i:xmax[indx]]\n yMax = np.append(yMax, Y.max())\n xMax = np.append(xMax, np.where(y == yMax[-1])[0][0])\n except:\n pass\n yMin = np.array([])\n xMin = np.array([])\n for i in xmin:\n indx = np.where(xmin == i)[0][0] + 1\n try:\n Y = y[i:xmin[indx]]\n yMin = np.append(yMin, Y.min())\n xMin = np.append(xMin, np.where(y == yMin[-1])[0][0])\n except:\n pass\n if y[-1] > yMax[-1]:\n yMax = np.append(yMax, y[-1])\n xMax = np.append(xMax, x[-1])\n if y[0] not in yMax:\n yMax = np.insert(yMax, 0, y[0])\n xMax = np.insert(xMax, 0, x[0])\n if y[-1] < yMin[-1]:\n yMin = np.append(yMin, y[-1])\n xMin = np.append(xMin, x[-1])\n if y[0] not in yMin:\n yMin = np.insert(yMin, 0, y[0])\n xMin = np.insert(xMin, 0, x[0])\n\n # Plot results if desired\n if charts is True:\n from matplotlib.pyplot import plot, show, grid\n plot(x, y)\n plot(xMax, yMax, '-o')\n plot(xMin, yMin, '-o')\n grid(True)\n show()\n # Return arrays of critical points\n return xMax, yMax, xMin, yMin\n\ndef iterlines(x, window=30, charts=True):\n \"\"\"\n Turn minitrends to iterative process more easily adaptable to\n implementation in simple trading systems; allows backtesting functionality.\n\n :param x: One-dimensional data set\n :param window: How long the trendlines should be. If window < 1, then it\n will be taken as a percentage of the size of the data\n :param charts: Boolean value saying whether to print chart to screen\n \"\"\"\n\n import numpy as np\n\n x = np.array(x)\n n = len(x)\n if window < 1:\n window = int(window * n)\n sigs = np.zeros(n, dtype=float)\n\n i = window\n while i != n:\n if x[i] > max(x[i-window:i]): sigs[i] = 1\n elif x[i] < min(x[i-window:i]): sigs[i] = -1\n i += 1\n\n xmin = np.where(sigs == -1.0)[0]\n xmax = np.where(sigs == 1.0)[0]\n ymin = x[xmin]\n ymax = x[xmax]\n if charts is True:\n from matplotlib.pyplot import plot, grid, show\n plot(x)\n plot(xmin, ymin, 'ro')\n plot(xmax, ymax, 'go')\n grid(True)\n show()\n\n return sigs\n", "import tensorflow as tf\n\nfrom src.nn_utils.general import exp_mask_for_high_rank, mask_for_high_rank\nfrom src.nn_utils.integration_func import directional_attention_with_dense\nfrom src.nn_utils.nn import bn_dense_layer, linear\n\n\ndef bi_directional_simple_block_attention(\n rep_tensor, rep_mask, block_len=5, scope=None,\n keep_prob=1., is_train=None, wd=0., activation='elu', hn=None):\n with tf.variable_scope(scope or 'bi_directional_simple_block_attn'):\n\n fw_attn_res = simple_block_attention(\n rep_tensor, rep_mask, block_len, \"forward_attn\", \"forward\",\n keep_prob, is_train, wd, activation, hn)\n bw_attn_res = simple_block_attention(\n rep_tensor, rep_mask, block_len, \"backward_attn\", \"backward\",\n keep_prob, is_train, wd, activation, hn)\n attn_res = tf.concat([fw_attn_res, bw_attn_res], -1)\n return attn_res\n\n\ndef simple_block_attention(\n rep_tensor, rep_mask, block_len=5, scope=None, direction=None,\n keep_prob=1., is_train=None, wd=0., activation='elu', hn=None):\n assert direction is not None\n\n def scaled_tanh(x, scale=5.):\n return scale * tf.nn.tanh(1. / scale * x)\n\n bs, sl, vec = tf.shape(rep_tensor)[0], tf.shape(rep_tensor)[1], tf.shape(rep_tensor)[2]\n ivec = hn or rep_tensor.get_shape().as_list()[2]\n input_dim = rep_tensor.get_shape().as_list()[2]\n with tf.variable_scope(scope or 'block_simple'):\n # @1. split sequence\n with tf.variable_scope('split_seq'):\n block_num = tf.cast(tf.ceil(tf.divide(tf.cast(sl, tf.float32), tf.cast(block_len, tf.float32))), tf.int32)\n comp_len = block_num * block_len - sl\n\n rep_tensor_comp = tf.concat([rep_tensor, tf.zeros([bs, comp_len, input_dim], tf.float32)], 1)\n rep_mask_comp = tf.concat([rep_mask, tf.cast(tf.zeros([bs, comp_len], tf.int32), tf.bool)], 1)\n\n rep_tensor_split = tf.reshape(rep_tensor_comp, [bs, block_num, block_len, input_dim]) # bs,bn,bl,d\n rep_mask_split = tf.reshape(rep_mask_comp, [bs, block_num, block_len]) # bs,bn,bl\n\n # non-linear\n rep_map = bn_dense_layer(rep_tensor_split, ivec, True, 0., 'bn_dense_map', activation,\n False, wd, keep_prob, is_train) # bs,bn,bl,vec\n rep_map_tile = tf.tile(tf.expand_dims(rep_map, 2), [1, 1, block_len, 1, 1]) # bs,bn,bl,bl,vec\n # rep_map_dp = dropout(rep_map, keep_prob, is_train)\n bn = block_num\n bl = block_len\n\n with tf.variable_scope('self_attention'):\n # @2.self-attention in block\n # mask generation\n sl_indices = tf.range(block_len, dtype=tf.int32)\n sl_col, sl_row = tf.meshgrid(sl_indices, sl_indices)\n if direction == 'forward':\n direct_mask = tf.greater(sl_row, sl_col) # bl,bl\n else:\n direct_mask = tf.greater(sl_col, sl_row) # bl,bl\n direct_mask_tile = tf.tile(\n tf.expand_dims(tf.expand_dims(direct_mask, 0), 0), [bs, bn, 1, 1]) # bs,bn,bl,bl\n rep_mask_tile_1 = tf.tile(tf.expand_dims(rep_mask_split, 2), [1, 1, bl, 1]) # bs,bn,bl,bl\n rep_mask_tile_2 = tf.tile(tf.expand_dims(rep_mask_split, 3), [1, 1, 1, bl]) # bs,bn,bl,bl\n rep_mask_tile = tf.logical_and(rep_mask_tile_1, rep_mask_tile_2)\n attn_mask = tf.logical_and(direct_mask_tile, rep_mask_tile, name='attn_mask') # bs,bn,bl,bl\n\n # attention\n f_bias = tf.get_variable('f_bias', [ivec], tf.float32, tf.constant_initializer(0.))\n dependent_head = linear(\n rep_map, 2 * ivec, False, 0., 'linear_dependent_head', False, wd, keep_prob, is_train) # bs,bn,bl,2vec\n dependent, head = tf.split(dependent_head, 2, 3)\n dependent_etd = tf.expand_dims(dependent, 2) # bs,bn,1,bl,vec\n head_etd = tf.expand_dims(head, 3) # bs,bn,bl,1,vec\n logits = scaled_tanh(dependent_etd + head_etd + f_bias, 5.0) # bs,bn,bl,bl,vec\n logits_masked = exp_mask_for_high_rank(logits, attn_mask)\n attn_score = tf.nn.softmax(logits_masked, 3) # bs,bn,bl,bl,vec\n attn_score = mask_for_high_rank(attn_score, attn_mask) # bs,bn,bl,bl,vec\n self_attn_result = tf.reduce_sum(attn_score * rep_map_tile, 3) # bs,bn,bl,vec\n\n with tf.variable_scope('source2token_self_attn'):\n inter_block_logits = bn_dense_layer(self_attn_result, ivec, True, 0., 'bn_dense_map', 'linear',\n False, wd, keep_prob, is_train) # bs,bn,bl,vec\n inter_block_logits_masked = exp_mask_for_high_rank(inter_block_logits, rep_mask_split) # bs,bn,bl,vec\n inter_block_soft = tf.nn.softmax(inter_block_logits_masked, 2) # bs,bn,bl,vec\n inter_block_attn_output = tf.reduce_sum(self_attn_result * inter_block_soft, 2) # bs,bn,vec\n\n with tf.variable_scope('self_attn_inter_block'):\n inter_block_attn_output_mask = tf.cast(tf.ones([bs, bn], tf.int32), tf.bool)\n block_ct_res = directional_attention_with_dense(\n inter_block_attn_output, inter_block_attn_output_mask, direction, 'disa',\n keep_prob, is_train, wd, activation\n ) # [bs,bn,vec]\n\n block_ct_res_tile = tf.tile(tf.expand_dims(block_ct_res, 2), [1, 1, bl, 1])#[bs,bn,vec]->[bs,bn,bl,vec]\n\n with tf.variable_scope('combination'):\n # input:1.rep_map[bs,bn,bl,vec]; 2.self_attn_result[bs,bn,bl,vec]; 3.rnn_res_tile[bs,bn,bl,vec]\n rep_tensor_with_ct = tf.concat([rep_map, self_attn_result, block_ct_res_tile], -1) # [bs,bn,bl,3vec]\n new_context_and_gate = linear(rep_tensor_with_ct, 2 * ivec, True, 0., 'linear_new_context_and_gate',\n False, wd, keep_prob, is_train) # [bs,bn,bl,2vec]\n new_context, gate = tf.split(new_context_and_gate, 2, 3) # bs,bn,bl,vec\n if activation == \"relu\":\n new_context_act = tf.nn.relu(new_context)\n elif activation == \"elu\":\n new_context_act = tf.nn.elu(new_context)\n elif activation == \"linear\":\n new_context_act = tf.identity(new_context)\n else:\n raise RuntimeError\n gate_sig = tf.nn.sigmoid(gate)\n combination_res = gate_sig * new_context_act + (1 - gate_sig) * rep_map # bs,bn,bl,vec\n\n with tf.variable_scope('restore_original_length'):\n combination_res_reshape = tf.reshape(combination_res, [bs, bn * bl, ivec]) # bs,bn*bl,vec\n output = combination_res_reshape[:, :sl, :]\n return output", "# -*- coding: utf-8 -*-\nimport tensorflow as tf\nimport numpy as np\nfrom custom_lstm import CustomBasicLSTMCell\nfrom constants import FEATURES_LIST, SEQUENCE_LENGTH\n\n# Actor-Critic Network Base Class\n# (Policy network and Value network)\n\nclass GameACNetwork(object):\n def __init__(self,\n action_size,\n device=\"/cpu:0\"):\n self._device = device\n self._action_size = action_size\n\n def prepare_loss(self, entropy_beta):\n with tf.device(self._device):\n # taken action (input for policy)\n self.a = tf.placeholder(\"float\", [None, self._action_size])\n \n # temporary difference (R-V) (input for policy)\n self.td = tf.placeholder(\"float\", [None])\n \n # policy entropy\n entropy = -tf.reduce_sum(self.pi * tf.log(self.pi), reduction_indices=1)\n\n ##self.pi = tf.Print(self.pi, [self.pi], message=\"This is self.pi: \", summarize=40)\n ##self.a = tf.Print(self.a, [self.a], message=\"This is self.a: \", summarize=40)\n \n # policy loss (output) (add minus, because this is for gradient ascent)\n policy_loss = - tf.reduce_sum( tf.reduce_sum( tf.mul( tf.log(self.pi), self.a ), reduction_indices=1 ) * self.td + entropy * entropy_beta )\n\n # R (input for value)\n self.r = tf.placeholder(\"float\", [None])\n \n # value loss (output)\n # (Learning rate for Critic is half of Actor's, so multiply by 0.5)\n ##print(\"HHH\",self.r.get_shape())\n ##print(\"HHH\",self.v.get_shape())\n ##self.r = tf.Print(self.r, [self.r], message=\"This is self.r: \", summarize=40)\n ##self.v = tf.Print(self.v, [self.v], message=\"This is self.v: \", summarize=40)\n value_loss = 0.5 * tf.nn.l2_loss(self.r - self.v)\n\n # gradienet of policy and value are summed up\n self.total_loss = policy_loss + value_loss\n\n def run_policy_and_value(self, sess, s_t):\n raise NotImplementedError()\n \n def run_policy(self, sess, s_t):\n raise NotImplementedError()\n\n def run_value(self, sess, s_t):\n raise NotImplementedError() \n\n def get_vars(self):\n raise NotImplementedError()\n\n def sync_from(self, src_netowrk, name=None):\n src_vars = src_netowrk.get_vars()\n dst_vars = self.get_vars()\n\n sync_ops = []\n\n with tf.device(self._device):\n with tf.op_scope([], name, \"GameACNetwork\") as name:\n for(src_var, dst_var) in zip(src_vars, dst_vars):\n sync_op = tf.assign(dst_var, src_var)\n sync_ops.append(sync_op)\n\n return tf.group(*sync_ops, name=name)\n\n # weight initialization based on muupan's code\n # https://github.com/muupan/async-rl/blob/master/a3c_ale.py\n def _fc_weight_variable(self, shape):\n \"\"\"\n shape[0] ... number of input channels \n shape[1] ... number of nodes \n \"\"\"\n input_channels = shape[0]\n d = 1.0 / np.sqrt(input_channels)\n initial = tf.random_uniform(shape, minval=-d, maxval=d)\n return tf.Variable(initial)\n\n def _fc_bias_variable(self, shape, input_channels):\n \"\"\"\n shape[0] ... number of nodes \n input channels ... number of input channels \n \"\"\"\n d = 1.0 / np.sqrt(input_channels)\n initial = tf.random_uniform(shape, minval=-d, maxval=d)\n return tf.Variable(initial) \n\n def _conv_weight_variable(self, shape):\n \"\"\" shape: \n shape[0] w ... width of a filter\n shape[1] h ... height of a filter\n shape[2] ... number of input channels\n shape[3] ... number of filters in output\n \"\"\"\n w = shape[0] #width of a filter\n h = shape[1] #height of a filter \n input_channels = shape[2] #number of input channels\n\n d = 1.0 / np.sqrt(input_channels * w * h)\n initial = tf.random_uniform(shape, minval=-d, maxval=d)\n return tf.Variable(initial)\n\n def _conv_bias_variable(self, shape, w, h, input_channels):\n \"\"\" shape: \n shape[0] ... number of output channels\n w ... width of a filter\n h ... height of a filter\n input_channels ... number of input channels\n \"\"\"\n d = 1.0 / np.sqrt(input_channels * w * h)\n initial = tf.random_uniform(shape, minval=-d, maxval=d)\n return tf.Variable(initial)\n\n def _conv2d(self, x, W, stride):\n return tf.nn.conv2d(x, W, strides = [1, stride, stride, 1], padding = \"VALID\")\n\n# Actor-Critic FF Network\n\nclass GameACFFNetwork(GameACNetwork):\n def __init__(self,\n action_size,\n device=\"/cpu:0\"):\n GameACNetwork.__init__(self, action_size, device)\n print(\"Initializing Conv FF Network \")\n \n with tf.device(self._device):\n\n # 8 ... length of a filter\n # 2nd dim is 1 since we have a one dimensional input\n # 16 filters in total\n self.W_conv1 = self._conv_weight_variable([8, 1, len(FEATURES_LIST), 16]) # stride=4\n self.b_conv1 = self._conv_bias_variable([16], 8, 1, len(FEATURES_LIST))\n\n # 32 filters in total\n # with a size of 1x1 - does this make sense?\n self.W_conv2 = self._conv_weight_variable([1, 1, 16, 32]) # stride=2\n self.b_conv2 = self._conv_bias_variable([32], 1, 1, 16)\n\n self.W_fc1 = self._fc_weight_variable([2592, 256])\n self.b_fc1 = self._fc_bias_variable([256], 2592 )\n\n # 256 must be larger than SEQUENCE_LENGTH\n # weight for policy output layer\n self.W_fc2 = self._fc_weight_variable([256, action_size])\n self.b_fc2 = self._fc_bias_variable([action_size], 256)\n\n # weight for value output layer\n self.W_fc3 = self._fc_weight_variable([256, 1])\n self.b_fc3 = self._fc_bias_variable([1], 256)\n\n self.s = tf.placeholder(\"float\", [None, SEQUENCE_LENGTH, 1, len(FEATURES_LIST)])\n\n h_conv1 = tf.nn.relu(self._conv2d(self.s, self.W_conv1, 1) + self.b_conv1)\n h_conv2 = tf.nn.relu(self._conv2d(h_conv1, self.W_conv2, 2) + self.b_conv2)\n\n h_conv2_flat = tf.reshape(h_conv2, [-1, 2592])\n h_fc1 = tf.nn.relu(tf.matmul(h_conv2_flat, self.W_fc1) + self.b_fc1)\n\n # policy (output)\n self.pi = tf.nn.softmax(tf.matmul(h_fc1, self.W_fc2) + self.b_fc2)\n # value (output)\n v_ = tf.matmul(h_fc1, self.W_fc3) + self.b_fc3\n self.v = tf.reshape( v_, [-1] )\n #print(\"SHAPE \", self.v.get_shape())\n\n def run_policy_and_value(self, sess, s_t):\n pi_out, v_out = sess.run( [self.pi, self.v], feed_dict = {self.s : [s_t]} )\n return (pi_out[0], v_out[0])\n\n def run_policy(self, sess, s_t):\n pi_out = sess.run( self.pi, feed_dict = {self.s : [s_t]} )\n return pi_out[0]\n\n def run_value(self, sess, s_t):\n v_out = sess.run( self.v, feed_dict = {self.s : [s_t]} )\n return v_out[0]\n\n def get_vars(self):\n return [self.W_conv1, self.b_conv1,\n self.W_conv2, self.b_conv2,\n self.W_fc1, self.b_fc1,\n self.W_fc2, self.b_fc2,\n self.W_fc3, self.b_fc3]\n\n# ActorCritic dilated Conv network\n\nclass GameACDilatedNetwork(GameACFFNetwork):\n\n def __init__(self,\n action_size,\n device=\"/cpu:0\"):\n print(\"Initializing Dilated Conv Network\")\n GameACNetwork.__init__(self, action_size, device)\n\n \n with tf.device(self._device):\n filter_length = 5\n\n\n\n # ===============================128 FILTERS============Developer_tests==================================\n\n self.W_dilconv1 = self._conv_weight_variable([filter_length, 1, len(FEATURES_LIST), 64]) # stride=4\n self.b_dilconv1 = self._conv_bias_variable([64], filter_length, 1, len(FEATURES_LIST))\n\n # 64 filters in total\n # with a size of 1x1 - does this make sense?\n self.W_dilconv2 = self._conv_weight_variable([filter_length, 1, 64, 128]) # stride=2\n self.b_dilconv2 = self._conv_bias_variable([128], filter_length, 1, 64)\n\n self.W_dilconv3 = self._conv_weight_variable([filter_length, 1, 128, 128]) # stride=2\n self.b_dilconv3 = self._conv_bias_variable([128], filter_length, 1, 128)\n\n \n #self.W_fc1 = self._fc_weight_variable([64896, 256]) # When using only 2 dilated levels\n #self.b_fc1 = self._fc_bias_variable([256], 64896 )\n\n self.W_fc1 = self._fc_weight_variable([SEQUENCE_LENGTH * 128, 1024]) # for 3 dilation levels\n self.b_fc1 = self._fc_bias_variable([1024], SEQUENCE_LENGTH * 128, )\n\n # 512 must be larger than SEQUENCE_LENGTH\n # weight for policy output layer\n self.W_fc2 = self._fc_weight_variable([1024, action_size])\n self.b_fc2 = self._fc_bias_variable([action_size], 1024)\n \n # end of replacement\n # weight for value output layer\n self.W_fc3 = self._fc_weight_variable([1024, 1])\n self.b_fc3 = self._fc_bias_variable([1], 1024)\n\n self.s = tf.placeholder(\"float\", [None, SEQUENCE_LENGTH, 1, len(FEATURES_LIST)])\n\n #h_dilconv1 = tf.nn.relu(self._conv2d(self.s, self.W_dilconv1, 1) + self.b_dilconv1)\n \n \n # 2**(layer - 1)\n dilation1 = 1\n dilation2 = 2 \n dilation3 = 4 \n\n #3**(layer - 1)\n #dilation1 = 1\n #dilation2 = 3 \n #dilation3 = 9\n \n #4**(layer - 1)\n #dilation1 = 1\n #dilation2 = 4 \n #dilation3 = 16\n\n\n filter_length1 = filter_length\n filter_length2 = filter_length\n filter_length3 = filter_length\n h_dilconv1 = tf.nn.relu(self._dilconv(self.s, self.W_dilconv1, self.b_dilconv1, filter_length1, dilation1))\n h_dilconv2 = tf.nn.relu(self._dilconv(h_dilconv1, self.W_dilconv2, self.b_dilconv2, filter_length2, dilation2))\n h_dilconv3 = tf.nn.relu(self._dilconv(h_dilconv2, self.W_dilconv3, self.b_dilconv3, filter_length3, dilation3))\n\n print(\"Dilated output shape: {}\".format(h_dilconv3.get_shape()))\n \n #h_conv2_flat = tf.reshape(h_dilconv2, [-1, 64896]) # when using 2 dilated levels\n h_conv2_flat = tf.reshape(h_dilconv3, [-1, SEQUENCE_LENGTH * 128])\n\n h_fc1 = tf.nn.relu(tf.matmul(h_conv2_flat, self.W_fc1) + self.b_fc1)\n\n # policy (output)\n self.pi = tf.nn.softmax(tf.matmul(h_fc1, self.W_fc2) + self.b_fc2)\n # value (output)\n v_ = tf.matmul(h_fc1, self.W_fc3) + self.b_fc3\n self.v = tf.reshape( v_, [-1] )\n print(\"SHAPE \", self.v.get_shape())\n\n\n \n \n # ====================64 FILTERS======================Developer_tests=========== \n \n '''\n self.W_dilconv1 = self._conv_weight_variable([filter_length, 1, len(FEATURES_LIST), 32]) # stride=4\n self.b_dilconv1 = self._conv_bias_variable([32], filter_length, 1, len(FEATURES_LIST))\n\n # 64 filters in total\n # with a size of 1x1 - does this make sense?\n self.W_dilconv2 = self._conv_weight_variable([filter_length, 1, 32, 64]) # stride=2\n self.b_dilconv2 = self._conv_bias_variable([64], filter_length, 1, 32)\n\n self.W_dilconv3 = self._conv_weight_variable([filter_length, 1, 64, 64]) # stride=2\n self.b_dilconv3 = self._conv_bias_variable([64], filter_length, 1, 64)\n\n \n #self.W_fc1 = self._fc_weight_variable([64896, 256]) # When using only 2 dilated levels\n #self.b_fc1 = self._fc_bias_variable([256], 64896 )\n\n self.W_fc1 = self._fc_weight_variable([SEQUENCE_LENGTH * 64, 512]) # for 3 dilation levels\n self.b_fc1 = self._fc_bias_variable([512], SEQUENCE_LENGTH * 64, )\n\n # 512 must be larger than SEQUENCE_LENGTH\n # weight for policy output layer\n self.W_fc2 = self._fc_weight_variable([512, action_size])\n self.b_fc2 = self._fc_bias_variable([action_size], 512)\n \n # end of replacement\n # weight for value output layer\n self.W_fc3 = self._fc_weight_variable([512, 1])\n self.b_fc3 = self._fc_bias_variable([1], 512)\n\n self.s = tf.placeholder(\"float\", [None, SEQUENCE_LENGTH, 1, len(FEATURES_LIST)])\n\n #h_dilconv1 = tf.nn.relu(self._conv2d(self.s, self.W_dilconv1, 1) + self.b_dilconv1)\n \n # 2**(layer - 1)\n #dilation1 = 1\n #dilation2 = 2 \n #dilation3 = 4 \n\n #3**(layer - 1)\n #dilation1 = 1\n #dilation2 = 3 \n #dilation3 = 9\n \n #4**(layer - 1)\n #dilation1 = 1\n #dilation2 = 4 \n #dilation3 = 16\n\n filter_length1 = filter_length\n filter_length2 = filter_length\n filter_length3 = filter_length\n h_dilconv1 = tf.nn.relu(self._dilconv(self.s, self.W_dilconv1, self.b_dilconv1, filter_length1, dilation1))\n h_dilconv2 = tf.nn.relu(self._dilconv(h_dilconv1, self.W_dilconv2, self.b_dilconv2, filter_length2, dilation2))\n h_dilconv3 = tf.nn.relu(self._dilconv(h_dilconv2, self.W_dilconv3, self.b_dilconv3, filter_length3, dilation3))\n\n print(\"Dilated output shape: {}\".format(h_dilconv3.get_shape()))\n \n #h_conv2_flat = tf.reshape(h_dilconv2, [-1, 64896]) # when using 2 dilated levels\n h_conv2_flat = tf.reshape(h_dilconv3, [-1, SEQUENCE_LENGTH * 64])\n\n h_fc1 = tf.nn.relu(tf.matmul(h_conv2_flat, self.W_fc1) + self.b_fc1)\n\n # policy (output)\n self.pi = tf.nn.softmax(tf.matmul(h_fc1, self.W_fc2) + self.b_fc2)\n # value (output)\n v_ = tf.matmul(h_fc1, self.W_fc3) + self.b_fc3\n self.v = tf.reshape( v_, [-1] )\n print(\"SHAPE \", self.v.get_shape())\n\n '''\n \n # ====================32 FILTERS==================Developer_tests===============\n\n\n ''' \n\n self.W_dilconv1 = self._conv_weight_variable([filter_length, 1, len(FEATURES_LIST), 16]) # stride=4\n self.b_dilconv1 = self._conv_bias_variable([16], filter_length, 1, len(FEATURES_LIST))\n\n # 32 filters in total\n # with a size of 1x1 - does this make sense?\n self.W_dilconv2 = self._conv_weight_variable([filter_length, 1, 16, 32]) # stride=2\n self.b_dilconv2 = self._conv_bias_variable([32], filter_length, 1, 16)\n\n self.W_dilconv3 = self._conv_weight_variable([filter_length, 1, 32, 32]) # stride=2\n self.b_dilconv3 = self._conv_bias_variable([32], filter_length, 1, 32)\n\n #self.W_fc1 = self._fc_weight_variable([64896, 256]) # When using only 2 dilated levels\n #self.b_fc1 = self._fc_bias_variable([256], 64896 )\n\n self.W_fc1 = self._fc_weight_variable([SEQUENCE_LENGTH * 32, 256]) # for 3 dilation levels\n self.b_fc1 = self._fc_bias_variable([256], SEQUENCE_LENGTH * 32, )\n\n # 256 must be larger than SEQUENCE_LENGTH\n # weight for policy output layer\n self.W_fc2 = self._fc_weight_variable([256, action_size])\n self.b_fc2 = self._fc_bias_variable([action_size], 256)\n \n # end of replacement\n # weight for value output layer\n self.W_fc3 = self._fc_weight_variable([256, 1])\n self.b_fc3 = self._fc_bias_variable([1], 256)\n\n self.s = tf.placeholder(\"float\", [None, SEQUENCE_LENGTH, 1, len(FEATURES_LIST)])\n\n \n\n #h_dilconv1 = tf.nn.relu(self._conv2d(self.s, self.W_dilconv1, 1) + self.b_dilconv1)\n\n # Receptive Field = [2**(layer + 1) - 1 ] x [2**(layer + 1) - 1]\n # Dilation factor = 2**(layer -1), where 2 is the base\n\n # 2**(layer - 1)\n #dilation1 = 1\n #dilation2 = 2 \n #dilation3 = 4 \n\n #3**(layer - 1)\n #dilation1 = 1\n #dilation2 = 3 \n #dilation3 = 9\n \n #4**(layer - 1)\n #dilation1 = 1\n #dilation2 = 4 \n #dilation3 = 16\n\n filter_length1 = filter_length\n filter_length2 = filter_length\n filter_length3 = filter_length\n h_dilconv1 = tf.nn.relu(self._dilconv(self.s, self.W_dilconv1, self.b_dilconv1, filter_length1, dilation1))\n h_dilconv2 = tf.nn.relu(self._dilconv(h_dilconv1, self.W_dilconv2, self.b_dilconv2, filter_length2, dilation2))\n h_dilconv3 = tf.nn.relu(self._dilconv(h_dilconv2, self.W_dilconv3, self.b_dilconv3, filter_length3, dilation3))\n\n print(\"Dilated output shape: {}\".format(h_dilconv3.get_shape()))\n \n #h_conv2_flat = tf.reshape(h_dilconv2, [-1, 64896]) # when using 2 dilated levels\n h_conv2_flat = tf.reshape(h_dilconv3, [-1, SEQUENCE_LENGTH * 32])\n\n h_fc1 = tf.nn.relu(tf.matmul(h_conv2_flat, self.W_fc1) + self.b_fc1)\n\n # policy (output)\n self.pi = tf.nn.softmax(tf.matmul(h_fc1, self.W_fc2) + self.b_fc2)\n # value (output)\n v_ = tf.matmul(h_fc1, self.W_fc3) + self.b_fc3\n self.v = tf.reshape( v_, [-1] )\n print(\"SHAPE \", self.v.get_shape())\n\n ''' \n\n def get_vars(self):\n return [self.W_dilconv1, self.b_dilconv1,\n self.W_dilconv2, self.b_dilconv2,\n self.W_fc1, self.b_fc1,\n self.W_fc2, self.b_fc2,\n self.W_fc3, self.b_fc3]\n\n def _dilconv(self, x, w, b, filter_length, dilation):\n print(\"Tensor before padding: {}\".format(x))\n padding = [[0, 0], [dilation * (filter_length - 1), 0], [0, 0], [0, 0]] \n x = tf.pad(x, padding, \"CONSTANT\") # Updated by Developer.\n print(\"Tensor after padding: {}\".format(x))\n\n if dilation == 1:\n x = tf.nn.conv2d(x, w, [1, 1, 1, 1], padding='VALID')\n else:\n print(\"x.shape\", x.get_shape())\n print(\"w.shape\", w.get_shape())\n x = tf.nn.atrous_conv2d(x, w, dilation, padding='VALID')\n print(\"Tensor after (dil)conv: {}\".format(x))\n\n return x + b\n\n# Actor-Critic LSTM Network\n\nclass GameACLSTMNetwork(GameACNetwork):\n def __init__(self,\n action_size,\n thread_index, # -1 for global\n device=\"/cpu:0\" ):\n GameACNetwork.__init__(self, action_size, device) \n print(\"Initializing LSTM Network \")\n\n with tf.device(self._device):\n self.W_conv1 = self._conv_weight_variable([8, 1, len(FEATURES_LIST), 16]) # stride=4\n self.b_conv1 = self._conv_bias_variable([16], 8, 1, len(FEATURES_LIST))\n\n self.W_conv2 = self._conv_weight_variable([1, 1, 16, 32]) # stride=2\n self.b_conv2 = self._conv_bias_variable([32], 1, 1, 16)\n\n self.W_fc1 = self._fc_weight_variable([2592, 256])\n self.b_fc1 = self._fc_bias_variable([256], 2592 )\n\n # lstm\n self.lstm = CustomBasicLSTMCell(256)\n\n # 256 must be larger than SEQUENCE_LENGTH\n # weight for policy output layer\n self.W_fc2 = self._fc_weight_variable([256, action_size])\n self.b_fc2 = self._fc_bias_variable([action_size], 256)\n\n # weight for value output layer\n self.W_fc3 = self._fc_weight_variable([256, 1])\n self.b_fc3 = self._fc_bias_variable([1], 256)\n\n # state (input)\n #self.s = tf.placeholder(\"float\", [None, 84, 84, 4])\n self.s = tf.placeholder(\"float\", [None, SEQUENCE_LENGTH, 1, len(FEATURES_LIST)])\n \n h_conv1 = tf.nn.relu(self._conv2d(self.s, self.W_conv1, 1) + self.b_conv1)\n h_conv2 = tf.nn.relu(self._conv2d(h_conv1, self.W_conv2, 2) + self.b_conv2)\n\n h_conv2_flat = tf.reshape(h_conv2, [-1, 2592])\n h_fc1 = tf.nn.relu(tf.matmul(h_conv2_flat, self.W_fc1) + self.b_fc1)\n # h_fc1 shape=(5,256)\n ##h_fc1 = tf.Print(h_fc1, [h_fc1], message=\"NN This is h_fc1: \", summarize=40)\n\n h_fc1_reshaped = tf.reshape(h_fc1, [1,-1,256])\n # h_fc_reshaped = (1,5,256)\n\n self.step_size = tf.placeholder(tf.float32, [1])\n\n self.initial_lstm_state = tf.placeholder(tf.float32, [1, self.lstm.state_size])\n \n scope = \"net_\" + str(thread_index)\n\n # time_major = False, so output shape is [batch_size, max_time, cell.output_size]\n lstm_outputs, self.lstm_state = tf.nn.dynamic_rnn(self.lstm,\n h_fc1_reshaped,\n initial_state = self.initial_lstm_state,\n sequence_length = self.step_size,\n time_major = False,\n scope = scope)\n\n # lstm_outputs: (1,5,256), (1,1,256)\n \n lstm_outputs = tf.reshape(lstm_outputs, [-1,256])\n\n # policy (output)\n self.pi = tf.nn.softmax(tf.matmul(lstm_outputs, self.W_fc2) + self.b_fc2)\n ##self.pi = tf.Print(self.pi, [self.pi], message=\"NN This is self.pi: \", summarize=40)\n \n # value (output)\n v_ = tf.matmul(lstm_outputs, self.W_fc3) + self.b_fc3\n ##v_ = tf.Print(v_, [v_], message=\"NN This is v_ \", summarize=40)\n self.v = tf.reshape( v_, [-1] )\n ##self.v = tf.Print(self.v, [self.v], message=\"NN This is self.v: \", summarize=40)\n\n # in OK tensorflow/core/kernels/logging_ops.cc:79] NN This is self.v: [-0.036351625]\n #I tensorflow/core/kernels/logging_ops.cc:79] NN This is self.pi: [0.49193981 0.50806022]\n #I tensorflow/core/kernels/logging_ops.cc:79] NN This is self.v: [-0.03456594]\n\n self.reset_state()\n print(\"Initializing Network finished\")\n \n def reset_state(self):\n self.lstm_state_out = np.zeros([1, self.lstm.state_size])\n\n def run_policy_and_value(self, sess, s_t):\n pi_out, v_out, self.lstm_state_out = sess.run( [self.pi, self.v, self.lstm_state],\n feed_dict = {self.s : [s_t],\n self.initial_lstm_state : self.lstm_state_out,\n self.step_size : [1]} )\n # pi_out: (1,3), v_out: (1)\n return (pi_out[0], v_out[0])\n\n def run_policy(self, sess, s_t):\n pi_out, self.lstm_state_out = sess.run( [self.pi, self.lstm_state],\n feed_dict = {self.s : [s_t],\n self.initial_lstm_state : self.lstm_state_out,\n self.step_size : [1]} )\n \n return pi_out[0]\n\n def run_value(self, sess, s_t):\n prev_lstm_state_out = self.lstm_state_out\n v_out, _ = sess.run( [self.v, self.lstm_state],\n feed_dict = {self.s : [s_t],\n self.initial_lstm_state : self.lstm_state_out,\n self.step_size : [1]} )\n \n # roll back lstm state\n self.lstm_state_out = prev_lstm_state_out\n return v_out[0]\n\n def get_vars(self):\n return [self.W_conv1, self.b_conv1,\n self.W_conv2, self.b_conv2,\n self.W_fc1, self.b_fc1,\n self.lstm.matrix, self.lstm.bias,\n self.W_fc2, self.b_fc2,\n self.W_fc3, self.b_fc3]\n\n\n", "# Authors: Christian Thurau\n# License: BSD 3 Clause\n\"\"\" \nPyMF Singular Value Decomposition.\n\n SVD : Class for Singular Value Decomposition\n pinv() : Compute the pseudoinverse of a Matrix\n \n\"\"\"\nfrom numpy.linalg import eigh\nimport time\nimport scipy.sparse\nimport numpy as np\n\nfrom base import PyMFBase3, eighk\n\ntry:\n import scipy.sparse.linalg.eigen.arpack as linalg\nexcept (ImportError, AttributeError):\n import scipy.sparse.linalg as linalg\n\n\ndef pinv(A, k=-1, eps= np.finfo(float).eps): \n # Compute Pseudoinverse of a matrix \n svd_mdl = SVD(A, k=k)\n svd_mdl.factorize()\n \n S = svd_mdl.S\n Sdiag = S.diagonal()\n Sdiag = np.where(Sdiag>eps, 1.0/Sdiag, 0.0)\n \n for i in range(S.shape[0]):\n S[i,i] = Sdiag[i]\n\n if scipy.sparse.issparse(A): \n A_p = svd_mdl.V.transpose() * (S * svd_mdl.U.transpose())\n else: \n A_p = np.dot(svd_mdl.V.T, np.core.multiply(np.diag(S)[:,np.newaxis], svd_mdl.U.T))\n\n return A_p\n\n\nclass SVD(PyMFBase3): \n \"\"\" \n SVD(data, show_progress=False)\n \n \n Singular Value Decomposition. Factorize a data matrix into three matrices s.t.\n F = | data - USV| is minimal. U and V correspond to eigenvectors of the matrices\n data*data.T and data.T*data.\n \n Parameters\n ----------\n data : array_like [data_dimension x num_samples]\n the input data\n \n Attributes\n ----------\n U,S,V : submatrices s.t. data = USV \n \n Example\n -------\n >>> import numpy as np\n >>> data = np.array([[1.0, 0.0, 2.0], [0.0, 1.0, 1.0]])\n >>> svd_mdl = SVD(data) \n >>> svd_mdl.factorize()\n \"\"\"\n\n def _compute_S(self, values):\n \"\"\"\n \"\"\"\n self.S = np.diag(np.sqrt(values))\n \n # and the inverse of it\n S_inv = np.diag(np.sqrt(values)**-1.0)\n return S_inv\n\n \n def factorize(self): \n\n def _right_svd(): \n AA = np.dot(self.data[:,:], self.data[:,:].T)\n # argsort sorts in ascending order -> access is backwards\n values, self.U = eighk(AA, k=self._k)\n\n # compute S\n self.S = np.diag(np.sqrt(values))\n \n # and the inverse of it\n S_inv = self._compute_S(values)\n \n # compute V from it\n self.V = np.dot(S_inv, np.dot(self.U[:,:].T, self.data[:,:])) \n \n \n def _left_svd():\n AA = np.dot(self.data[:,:].T, self.data[:,:])\n \n values, Vtmp = eighk(AA, k=self._k)\n self.V = Vtmp.T \n\n # and the inverse of it\n S_inv = self._compute_S(values)\n\n self.U = np.dot(np.dot(self.data[:,:], self.V.T), S_inv) \n \n def _sparse_right_svd():\n ## for some reasons arpack does not allow computation of rank(A) eigenvectors (??) #\n AA = self.data*self.data.transpose()\n \n if self.data.shape[0] > 1: \n # only compute a few eigenvectors ...\n if self._k > 0 and self._k < self.data.shape[0]-1:\n k = self._k\n else:\n k = self.data.shape[0]-1\n values, u_vectors = linalg.eigsh(AA,k=k)\n else: \n values, u_vectors = eigh(AA.todense())\n \n # get rid of negative/too low eigenvalues \n s = np.where(values > self._EPS)[0]\n u_vectors = u_vectors[:, s] \n values = values[s]\n \n # sort eigenvectors according to largest value\n # argsort sorts in ascending order -> access is backwards\n idx = np.argsort(values)[::-1]\n values = values[idx] \n \n self.U = scipy.sparse.csc_matrix(u_vectors[:,idx])\n \n # compute S\n tmp_val = np.sqrt(values) \n l = len(idx)\n self.S = scipy.sparse.spdiags(tmp_val, 0, l, l,format='csc') \n \n # and the inverse of it \n S_inv = scipy.sparse.spdiags(1.0/tmp_val, 0, l, l,format='csc')\n \n # compute V from it\n self.V = self.U.transpose() * self.data\n self.V = S_inv * self.V\n \n def _sparse_left_svd(): \n # for some reasons arpack does not allow computation of rank(A) eigenvectors (??)\n AA = self.data.transpose()*self.data\n \n if self.data.shape[1] > 1: \n # do not compute full rank if desired\n if self._k > 0 and self._k < AA.shape[1]-1:\n k = self._k\n else:\n k = self.data.shape[1]-1\n \n values, v_vectors = linalg.eigsh(AA,k=k) \n else: \n values, v_vectors = eigh(AA.todense()) \n \n # get rid of negative/too low eigenvalues \n s = np.where(values > self._EPS)[0]\n v_vectors = v_vectors[:, s] \n values = values[s]\n \n # sort eigenvectors according to largest value\n idx = np.argsort(values)[::-1] \n values = values[idx]\n \n # argsort sorts in ascending order -> access is backwards \n self.V = scipy.sparse.csc_matrix(v_vectors[:,idx]) \n \n # compute S\n tmp_val = np.sqrt(values) \n l = len(idx) \n self.S = scipy.sparse.spdiags(tmp_val, 0, l, l,format='csc') \n \n # and the inverse of it \n S_inv = scipy.sparse.spdiags(1.0/tmp_val, 0, l, l,format='csc')\n \n self.U = self.data * self.V * S_inv \n self.V = self.V.transpose() \n \n if self._rows >= self._cols:\n if scipy.sparse.issparse(self.data): \n _sparse_left_svd()\n else: \n _left_svd()\n else:\n if scipy.sparse.issparse(self.data):\n _sparse_right_svd()\n else: \n _right_svd()\n\ndef _test():\n import doctest\n doctest.testmod()\n \nif __name__ == \"__main__\":\n _test()\n", "# Authors: Christian Thurau\n# License: BSD 3 Clause\n\"\"\"\nPyMF Compact Matrix Decomposition [1]\n\n CMD(CUR): Class for Compact Matrix Decomposition\n\n[1] Sun, J., Xie, Y., Zhang, H. and Faloutsos, C. (2007), Less is More: Compact \nMatrix Decomposition for Large Sparse Graphs, in Proc. SIAM Int. Conf. on Data \nMining. \n\"\"\"\nimport numpy as np\nfrom cur import CUR\n\n__all__ = [\"CMD\"]\n\nclass CMD(CUR):\n \"\"\" \n CMD(data, rrank=0, crank=0)\n \n \n Compact Matrix Decomposition. Factorize a data matrix into three matrices s.t.\n F = | data - USV| is minimal. CMD randomly selects rows and columns from\n data for building U and V, respectively. \n \n Parameters\n ----------\n data : array_like [data_dimension x num_samples]\n the input data\n rrank: int, optional \n Number of rows to sample from data. Double entries are eliminiated s.t.\n the resulting rank might be lower.\n 4 (default)\n crank: int, optional\n Number of columns to sample from data. Double entries are eliminiated s.t.\n the resulting rank might be lower.\n 4 (default) \n \n Attributes\n ----------\n U,S,V : submatrices s.t. data = USV \n \n Example\n -------\n >>> import numpy as np\n >>> from cmde import CMD\n >>> data = np.array([[1.0, 0.0, 2.0], [0.0, 1.0, 1.0]])\n >>> cmd_mdl = CMD(data, rrank=1, crank=2) \n >>> cmd_mdl.factorize()\n \"\"\"\n \n def _cmdinit(self):\n nrids = np.unique(self._rid)\n ncids = np.unique(self._cid)\n \n self._rcnt = np.zeros(len(nrids)) \n self._ccnt = np.zeros(len(ncids))\n \n for i,idx in enumerate(nrids):\n self._rcnt[i] = len(np.where(self._rid == idx)[0])\n \n for i,idx in enumerate(ncids):\n self._ccnt[i] = len(np.where(self._cid == idx)[0])\n\n self._rid = np.int32(list(nrids))\n self._cid = np.int32(list(ncids))\n \n def factorize(self):\n \"\"\" Factorize s.t. CUR = data\n \n Updated Values\n --------------\n .C : updated values for C.\n .U : updated values for U.\n .R : updated values for R. \n \"\"\"\n \n [prow, pcol] = self.sample_probability()\n\n self._rid = self.sample(self._rrank, prow)\n self._cid = self.sample(self._crank, pcol)\n \n self._cmdinit()\n \n self.computeUCR()\n\ndef _test():\n import doctest\n doctest.testmod()\n \nif __name__ == \"__main__\":\n _test()\n" ]
[ [ "numpy.isnan", "numpy.ones", "matplotlib.pyplot.plot", "numpy.append", "numpy.insert", "matplotlib.pyplot.grid", "numpy.array", "numpy.zeros", "numpy.where", "matplotlib.pyplot.show", "matplotlib.pyplot.figure" ], [ "tensorflow.concat", "tensorflow.zeros", "tensorflow.reduce_sum", "tensorflow.cast", "tensorflow.greater", "tensorflow.nn.elu", "tensorflow.nn.sigmoid", "tensorflow.shape", "tensorflow.identity", "tensorflow.nn.tanh", "tensorflow.meshgrid", "tensorflow.split", "tensorflow.nn.relu", "tensorflow.nn.softmax", "tensorflow.range", "tensorflow.reshape", "tensorflow.expand_dims", "tensorflow.ones", "tensorflow.constant_initializer", "tensorflow.variable_scope", "tensorflow.logical_and" ], [ "tensorflow.device", "tensorflow.nn.dynamic_rnn", "tensorflow.matmul", "numpy.sqrt", "tensorflow.Variable", "tensorflow.reshape", "tensorflow.assign", "tensorflow.placeholder", "tensorflow.nn.l2_loss", "tensorflow.op_scope", "tensorflow.log", "tensorflow.pad", "tensorflow.group", "tensorflow.random_uniform", "tensorflow.nn.atrous_conv2d", "numpy.zeros", "tensorflow.nn.conv2d" ], [ "numpy.diag", "numpy.dot", "numpy.sqrt", "numpy.finfo", "numpy.argsort", "numpy.where", "scipy.sparse.linalg.eigsh" ], [ "numpy.where", "numpy.unique" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10", "1.12", "1.4", "1.13", "1.5", "1.7", "0.12", "1.0", "1.2" ] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10" ] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "0.16", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "1.3", "1.8" ], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
jbushago/GamestonkTerminal
[ "73a2b419664bf62bbdc59aa8402c8cd6a913a518", "ab4de1dd70fba866930150e440a03e461a6ca6a8", "73a2b419664bf62bbdc59aa8402c8cd6a913a518", "ab4de1dd70fba866930150e440a03e461a6ca6a8" ]
[ "gamestonk_terminal/stocks/insider/openinsider_view.py", "gamestonk_terminal/stocks/insider/finviz_view.py", "gamestonk_terminal/stocks/due_diligence/finnhub_model.py", "gamestonk_terminal/common/quantitative_analysis/qa_model.py" ]
[ "import itertools\nimport logging\nimport os\nimport textwrap\nfrom typing import List\n\nimport numpy as np\nimport pandas as pd\nimport requests\nfrom bs4 import BeautifulSoup\n\nfrom gamestonk_terminal.decorators import log_start_end\nfrom gamestonk_terminal.helper_funcs import (\n export_data,\n patch_pandas_text_adjustment,\n print_rich_table,\n)\nfrom gamestonk_terminal.rich_config import console\nfrom gamestonk_terminal.stocks.insider.openinsider_model import (\n get_open_insider_data,\n get_open_insider_link,\n)\nfrom gamestonk_terminal import rich_config\n\nlogger = logging.getLogger(__name__)\n\nd_open_insider = {\n \"lcb\": \"latest-cluster-buys\",\n \"lpsb\": \"latest-penny-stock-buys\",\n \"lit\": \"latest-insider-trading\",\n \"lip\": \"insider-purchases\",\n \"blip\": \"latest-insider-purchases-25k\",\n \"blop\": \"latest-officer-purchases-25k\",\n \"blcp\": \"latest-ceo-cfo-purchases-25k\",\n \"lis\": \"insider-sales\",\n \"blis\": \"latest-insider-sales-100k\",\n \"blos\": \"latest-officer-sales-100k\",\n \"blcs\": \"latest-ceo-cfo-sales-100k\",\n \"topt\": \"top-officer-purchases-of-the-day\",\n \"toppw\": \"top-officer-purchases-of-the-week\",\n \"toppm\": \"top-officer-purchases-of-the-month\",\n \"tipt\": \"top-insider-purchases-of-the-day\",\n \"tippw\": \"top-insider-purchases-of-the-week\",\n \"tippm\": \"top-insider-purchases-of-the-month\",\n \"tist\": \"top-insider-sales-of-the-day\",\n \"tispw\": \"top-insider-sales-of-the-week\",\n \"tispm\": \"top-insider-sales-of-the-month\",\n}\n\nd_notes = {\n \"A\": \"A: Amended filing\",\n \"D\": \"D: Derivative transaction in filing (usually option exercise)\",\n \"E\": \"E: Error detected in filing\",\n \"M\": \"M: Multiple transactions in filing; earliest reported transaction date & weighted average transaction price\",\n}\n\nd_trade_types = {\n \"S - Sale\": \"[red]S - Sale: Sale of securities on an exchange or to another person[/red]\",\n \"S - Sale+OE\": \"[yellow]S - Sale+OE: Sale of securities \"\n \"on an exchange or to another person (after option exercise)[/yellow]\",\n \"F - Tax\": \"[magenta]F - Tax: Payment of exercise price or \"\n \"tax liability using portion of securities received from the company[/magenta]\",\n \"P - Purchase\": \"[green]P - Purchase: Purchase of securities on \"\n \"an exchange or from another person[/green]\",\n}\n\n\ndef lambda_red_highlight(values) -> List[str]:\n \"\"\"Red highlight\n\n Parameters\n ----------\n values : List[str]\n dataframe values to color\n\n Returns\n ----------\n List[str]\n colored dataframes values\n \"\"\"\n return [f\"[red]{val}[/red]\" for val in values]\n\n\ndef lambda_yellow_highlight(values) -> List[str]:\n \"\"\"Yellow highlight\n\n Parameters\n ----------\n values : List[str]\n dataframe values to color\n\n Returns\n ----------\n List[str]\n colored dataframes values\n \"\"\"\n return [f\"[yellow]{val}[/yellow]\" for val in values]\n\n\ndef lambda_magenta_highlight(values):\n \"\"\"Magenta highlight\n\n Parameters\n ----------\n values : List[str]\n dataframe values to color\n\n Returns\n ----------\n List[str]\n colored dataframes values\n \"\"\"\n return [f\"[magenta]{val}[/magenta]\" for val in values]\n\n\ndef lambda_green_highlight(values):\n \"\"\"Green highlight\n\n Parameters\n ----------\n values : List[str]\n dataframe values to color\n\n Returns\n ----------\n List[str]\n colored dataframes values\n \"\"\"\n return [f\"[green]{val}[/green]\" for val in values]\n\n\n@log_start_end(log=logger)\ndef print_insider_data(type_insider: str, limit: int = 10, export: str = \"\"):\n \"\"\"Print insider data\n\n Parameters\n ----------\n type_insider: str\n Insider type of data\n limit: int\n Limit of data rows to display\n export: str\n Export data format\n \"\"\"\n response = requests.get(f\"http://openinsider.com/{d_open_insider[type_insider]}\")\n soup = BeautifulSoup(response.text, \"html.parser\")\n table = soup.find(\"table\", {\"class\": \"tinytable\"})\n\n if not table:\n console.print(\"No insider information found\", \"\\n\")\n return\n\n table_rows = table.find_all(\"tr\")\n\n res = []\n for tr in table_rows:\n td = tr.find_all(\"td\")\n row = [tr.text.strip() for tr in td if tr.text.strip()]\n res.append(row)\n\n df = pd.DataFrame(res).dropna().head(n=limit)\n columns = [\n \"X\",\n \"Filing Date\",\n \"Trade Date\",\n \"Ticker\",\n \"Company Name\",\n \"Industry\" if type_insider == \"lcb\" else \"Insider Name\",\n \"Title\",\n \"Trade Type\",\n \"Price\",\n \"Qty\",\n \"Owned\",\n \"Diff Own\",\n \"Value\",\n ]\n\n if df.shape[1] == 13:\n df.columns = columns\n else:\n df.columns = columns[1:]\n\n df[\"Filing Date\"] = df[\"Filing Date\"].apply(\n lambda x: \"\\n\".join(textwrap.wrap(x, width=10)) if isinstance(x, str) else x\n )\n df[\"Company Name\"] = df[\"Company Name\"].apply(\n lambda x: \"\\n\".join(textwrap.wrap(x, width=20)) if isinstance(x, str) else x\n )\n df[\"Title\"] = df[\"Title\"].apply(\n lambda x: \"\\n\".join(textwrap.wrap(x, width=10)) if isinstance(x, str) else x\n )\n if type_insider == \"lcb\":\n df[\"Industry\"] = df[\"Industry\"].apply(\n lambda x: \"\\n\".join(textwrap.wrap(x, width=20)) if isinstance(x, str) else x\n )\n else:\n df[\"Insider Name\"] = df[\"Insider Name\"].apply(\n lambda x: \"\\n\".join(textwrap.wrap(x, width=20)) if isinstance(x, str) else x\n )\n\n print_rich_table(\n df,\n headers=[x.title() for x in df.columns],\n show_index=False,\n title=\"Insider Data\",\n )\n\n export_data(export, os.path.dirname(os.path.abspath(__file__)), type_insider, df)\n\n if df.shape[1] == 13:\n l_chars = [list(chars) for chars in df[\"X\"].values]\n l_uchars = np.unique(list(itertools.chain(*l_chars)))\n\n for char in l_uchars:\n console.print(d_notes[char])\n console.print(\"\")\n\n\n@log_start_end(log=logger)\ndef print_insider_filter(\n preset_loaded: str,\n ticker: str,\n limit: int = 10,\n links: bool = False,\n export: str = \"\",\n):\n \"\"\"Print insider filter based on loaded preset. [Source: OpenInsider]\n\n Parameters\n ----------\n preset_loaded : str\n Loaded preset filter\n ticker : str\n Stock ticker\n limit : int\n Limit of rows of data to display\n links : bool\n Flag to show hyperlinks\n export : str\n Format to export data\n \"\"\"\n if ticker:\n link = f\"http://openinsider.com/screener?s={ticker}\"\n else:\n link = get_open_insider_link(preset_loaded)\n\n if not link:\n console.print(\"\")\n return\n\n df_insider = get_open_insider_data(link, has_company_name=bool(not ticker))\n df_insider_orig = df_insider.copy()\n\n if df_insider.empty:\n console.print(\"No insider data found\\n\")\n return\n\n if links:\n df_insider = df_insider[[\"Ticker Link\", \"Insider Link\", \"Filing Link\"]].head(\n limit\n )\n else:\n df_insider = df_insider.drop(\n columns=[\"Filing Link\", \"Ticker Link\", \"Insider Link\"]\n ).head(limit)\n\n if rich_config.USE_COLOR and not links:\n if not df_insider[df_insider[\"Trade Type\"] == \"S - Sale\"].empty:\n df_insider[df_insider[\"Trade Type\"] == \"S - Sale\"] = df_insider[\n df_insider[\"Trade Type\"] == \"S - Sale\"\n ].apply(lambda_red_highlight)\n if not df_insider[df_insider[\"Trade Type\"] == \"S - Sale+OE\"].empty:\n df_insider[df_insider[\"Trade Type\"] == \"S - Sale+OE\"] = df_insider[\n df_insider[\"Trade Type\"] == \"S - Sale+OE\"\n ].apply(lambda_yellow_highlight)\n if not df_insider[df_insider[\"Trade Type\"] == \"F - Tax\"].empty:\n df_insider[df_insider[\"Trade Type\"] == \"F - Tax\"] = df_insider[\n df_insider[\"Trade Type\"] == \"F - Tax\"\n ].apply(lambda_magenta_highlight)\n if not df_insider[df_insider[\"Trade Type\"] == \"P - Purchase\"].empty:\n df_insider[df_insider[\"Trade Type\"] == \"P - Purchase\"] = df_insider[\n df_insider[\"Trade Type\"] == \"P - Purchase\"\n ].apply(lambda_green_highlight)\n\n patch_pandas_text_adjustment()\n pd.set_option(\"display.max_colwidth\", 0)\n pd.set_option(\"display.max_rows\", None)\n\n # needs to be done because table is too large :(\n df_insider = df_insider.drop(columns=[\"Filing Date\", \"Trade Type\"])\n\n else:\n # needs to be done because table is too large :(\n df_insider = df_insider.drop(columns=[\"Filing Date\"])\n\n console.print(\"\")\n print_rich_table(\n df_insider,\n headers=[x.title() for x in df_insider.columns],\n title=\"Insider filtered\",\n )\n\n if export:\n if preset_loaded:\n cmd = \"filter\"\n if ticker:\n cmd = \"lis\"\n\n export_data(export, os.path.dirname(os.path.abspath(__file__)), cmd, df_insider)\n\n if not links:\n l_chars = [list(chars) for chars in df_insider_orig[\"X\"].values]\n l_uchars = np.unique(list(itertools.chain(*l_chars)))\n console.print(\"\")\n for char in l_uchars:\n console.print(d_notes[char])\n\n l_tradetype = df_insider_orig[\"Trade Type\"].values\n l_utradetype = np.unique(l_tradetype)\n console.print(\"\")\n for tradetype in l_utradetype:\n console.print(d_trade_types[tradetype])\n\n console.print(\"\")\n", "\"\"\" Finviz View \"\"\"\n__docformat__ = \"numpy\"\n\nimport logging\nimport os\n\nimport pandas as pd\n\nfrom gamestonk_terminal.decorators import log_start_end\nfrom gamestonk_terminal.helper_funcs import export_data, print_rich_table\nfrom gamestonk_terminal.rich_config import console\nfrom gamestonk_terminal.stocks.insider import finviz_model\n\nlogger = logging.getLogger(__name__)\n\n\n@log_start_end(log=logger)\ndef last_insider_activity(ticker: str, num: int, export: str):\n \"\"\"Display insider activity for a given stock ticker. [Source: Finviz]\n\n Parameters\n ----------\n ticker : str\n Stock ticker\n num : int\n Number of latest insider activity to display\n export : str\n Export dataframe data to csv,json,xlsx file\n \"\"\"\n d_finviz_insider = finviz_model.get_last_insider_activity(ticker)\n df = pd.DataFrame.from_dict(d_finviz_insider)\n if df.empty:\n console.print(f\"[red]No insider information found for {ticker}.\\n[/red]\")\n return\n df.set_index(\"Date\", inplace=True)\n df = df[\n [\n \"Relationship\",\n \"Transaction\",\n \"#Shares\",\n \"Cost\",\n \"Value ($)\",\n \"#Shares Total\",\n \"Insider Trading\",\n \"SEC Form 4\",\n ]\n ]\n\n print_rich_table(\n df.head(num),\n headers=list(df.columns),\n show_index=True,\n title=\"Insider Activity\",\n )\n console.print(\"\")\n\n export_data(\n export,\n os.path.dirname(os.path.abspath(__file__)),\n \"lins\",\n df,\n )\n", "\"\"\" Finnhub Model \"\"\"\n__docformat__ = \"numpy\"\n\nimport logging\n\nimport pandas as pd\nimport requests\n\nfrom gamestonk_terminal import config_terminal as cfg\nfrom gamestonk_terminal.decorators import log_start_end\nfrom gamestonk_terminal.rich_config import console\n\nlogger = logging.getLogger(__name__)\n\n\n@log_start_end(log=logger)\ndef get_rating_over_time(ticker: str) -> pd.DataFrame:\n \"\"\"Get rating over time data. [Source: Finnhub]\n\n Parameters\n ----------\n ticker : str\n Ticker to get ratings from\n\n Returns\n -------\n pd.DataFrame\n Get dataframe with ratings\n \"\"\"\n response = requests.get(\n f\"https://finnhub.io/api/v1/stock/recommendation?symbol={ticker}&token={cfg.API_FINNHUB_KEY}\"\n )\n df = pd.DataFrame()\n\n if response.status_code == 200:\n if response.json():\n df = pd.DataFrame(response.json())\n else:\n console.print(\"No ratings over time found\", \"\\n\")\n elif response.status_code == 401:\n console.print(\"[red]Invalid API Key[/red]\\n\")\n elif response.status_code == 403:\n console.print(\"[red]API Key not authorized for Premium Feature[/red]\\n\")\n else:\n console.print(f\"Error in request: {response.json()['error']}\", \"\\n\")\n\n return df\n", "\"\"\"Quantitative Analysis Model\"\"\"\n__docformat__ = \"numpy\"\n\nimport logging\nimport warnings\nfrom typing import Any, Tuple, Union, List\nimport pandas as pd\nimport statsmodels.api as sm\nfrom statsmodels.tools.sm_exceptions import MissingDataError\nfrom statsmodels.tsa.seasonal import seasonal_decompose\nfrom statsmodels.tsa.stattools import adfuller, kpss\nfrom scipy import stats\nimport numpy as np\n\nfrom gamestonk_terminal.decorators import log_start_end\n\n# TODO : Since these are common/ they should be independent of 'stock' info.\n# df_stock should be replaced with a generic df and a column variable\n\n\nlogger = logging.getLogger(__name__)\n\n\n@log_start_end(log=logger)\ndef get_summary(df: pd.DataFrame) -> pd.DataFrame:\n \"\"\"Print summary statistics\n\n Parameters\n ----------\n df : pd.DataFrame\n Dataframe to get summary statistics for\n \"\"\"\n\n df_stats = df.describe(percentiles=[0.1, 0.25, 0.5, 0.75, 0.9])\n df_stats.loc[\"var\"] = df_stats.loc[\"std\"] ** 2\n\n return df_stats\n\n\n@log_start_end(log=logger)\ndef get_seasonal_decomposition(\n df: pd.DataFrame, multiplicative: bool\n) -> Tuple[Any, pd.DataFrame, pd.DataFrame]:\n \"\"\"Perform seasonal decomposition\n\n Parameters\n ----------\n df_stock : pd.DataFrame\n Dataframe of targeted data\n multiplicative : bool\n Boolean to indicate multiplication instead of addition\n\n Returns\n -------\n result: Any\n Result of statsmodels seasonal_decompose\n cycle: pd.DataFrame\n Filtered cycle\n trend: pd.DataFrame\n Filtered Trend\n \"\"\"\n seasonal_periods = 5\n # Hodrick-Prescott filter\n # See Ravn and Uhlig: http://home.uchicago.edu/~huhlig/papers/uhlig.ravn.res.2002.pdf\n lamb = 107360000000\n\n model = [\"additive\", \"multiplicative\"][multiplicative]\n\n result = seasonal_decompose(df, model=model, period=seasonal_periods)\n cycle, trend = sm.tsa.filters.hpfilter(\n result.trend[result.trend.notna().values], lamb=lamb\n )\n\n return result, pd.DataFrame(cycle), pd.DataFrame(trend)\n\n\n@log_start_end(log=logger)\ndef get_normality(data: pd.DataFrame) -> pd.DataFrame:\n \"\"\"\n Look at the distribution of returns and generate statistics on the relation to the normal curve.\n This function calculates skew and kurtosis (the third and fourth moments) and performs both\n a Jarque-Bera and Shapiro Wilk test to determine if data is normally distributed.\n\n Parameters\n ----------\n df : pd.DataFrame\n Dataframe of targeted data\n\n Returns\n -------\n pd.DataFrame\n Dataframe containing statistics of normality\n \"\"\"\n # Kurtosis\n # Measures height and sharpness of the central peak relative to that of a standard bell curve\n k, kpval = stats.kurtosistest(data)\n\n # Skewness\n # Measure of the asymmetry of the probability distribution of a random variable about its mean\n s, spval = stats.skewtest(data)\n\n # Jarque-Bera goodness of fit test on sample data\n # Tests if the sample data has the skewness and kurtosis matching a normal distribution\n jb, jbpval = stats.jarque_bera(data)\n\n # Shapiro\n # The Shapiro-Wilk test tests the null hypothesis that the data was drawn from a normal distribution.\n sh, shpval = stats.shapiro(data)\n\n # Kolmogorov-Smirnov\n # The one-sample test compares the underlying distribution F(x) of a sample against a given distribution G(x).\n # Comparing to normal here.\n ks, kspval = stats.kstest(data, \"norm\")\n\n l_statistic = [k, s, jb, sh, ks]\n l_pvalue = [kpval, spval, jbpval, shpval, kspval]\n\n return pd.DataFrame(\n [l_statistic, l_pvalue],\n columns=[\n \"Kurtosis\",\n \"Skewness\",\n \"Jarque-Bera\",\n \"Shapiro-Wilk\",\n \"Kolmogorov-Smirnov\",\n ],\n index=[\"Statistic\", \"p-value\"],\n )\n\n\n@log_start_end(log=logger)\ndef get_unitroot(df: pd.DataFrame, fuller_reg: str, kpss_reg: str) -> pd.DataFrame:\n \"\"\"Calculate test statistics for unit roots\n\n Parameters\n ----------\n df : pd.DataFrame\n DataFrame of target variable\n fuller_reg : str\n Type of regression of ADF test\n kpss_reg : str\n Type of regression for KPSS test\n\n Returns\n -------\n pd.DataFrame\n Dataframe with results of ADF test and KPSS test\n \"\"\"\n # The Augmented Dickey-Fuller test\n # Used to test for a unit root in a univariate process in the presence of serial correlation.\n try:\n result = adfuller(df, regression=fuller_reg)\n except MissingDataError:\n df = df.dropna(axis=0)\n result = adfuller(df, regression=fuller_reg)\n cols = [\"Test Statistic\", \"P-Value\", \"NLags\", \"Nobs\", \"ICBest\"]\n vals = [result[0], result[1], result[2], result[3], result[5]]\n data = pd.DataFrame(data=vals, index=cols, columns=[\"ADF\"])\n\n # Kwiatkowski-Phillips-Schmidt-Shin test\n # Test for level or trend stationarity\n # This test seems to produce an Interpolation Error which says\n # The test statistic is outside of the range of p-values available in the\n # look-up table. The actual p-value is greater than the p-value returned.\n # Wrap this in catch_warnings to prevent\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n res2 = kpss(df, regression=kpss_reg, nlags=\"auto\")\n vals2 = [res2[0], res2[1], res2[2], \"\", \"\"]\n data[\"KPSS\"] = vals2\n return data\n\n\ndef calculate_adjusted_var(\n kurtosis: float, skew: float, ndp: float, std: float, mean: float\n):\n \"\"\"Calculates VaR, which is adjusted for skew and kurtosis (Cornish-Fischer-Expansion)\n\n Parameters\n ----------\n kurtosis: float\n kurtosis of data\n skew: float\n skew of data\n ndp: float\n normal distribution percentage number (99% -> -2.326)\n std: float\n standard deviation of data\n mean: float\n mean of data\n\n Returns\n -------\n float\n Real adjusted VaR\n \"\"\"\n\n # Derived from Cornish-Fisher-Expansion\n # Formula for quantile from \"Finance Compact Plus\" by Zimmerman; Part 1, page 130-131\n # More material/resources:\n # - \"Numerical Methods and Optimization in Finance\" by Gilli, Maringer & Schumann;\n # - https://www.value-at-risk.net/the-cornish-fisher-expansion/;\n # - https://www.diva-portal.org/smash/get/diva2:442078/FULLTEXT01.pdf, Section 2.4.2, p.18;\n # - \"Risk Management and Financial Institutions\" by John C. Hull\n\n skew_component = skew / 6 * (ndp**2 - 1) ** 2 - skew**2 / 36 * ndp * (\n 2 * ndp**2 - 5\n )\n kurtosis_component = (kurtosis - 3) / 24 * ndp * (ndp**2 - 3)\n quantile = ndp + skew_component + kurtosis_component\n log_return = mean + quantile * std\n real_return = 2.7182818**log_return - 1\n return real_return\n\n\ndef get_var(\n data: pd.DataFrame,\n use_mean: bool,\n adjusted_var: bool,\n student_t: bool,\n percentile: Union[int, float],\n portfolio: bool,\n):\n \"\"\"Gets value at risk for specified stock dataframe\n\n Parameters\n ----------\n data: pd.DataFrame\n Dataframe of a stock/portfolio\n use_mean: bool\n If one should use the stocks mean for calculation\n adjusted_var: bool\n If one should return VaR adjusted for skew and kurtosis\n student_t: bool\n If one should use the student-t distribution\n percentile: Union[int,float]\n VaR percentile\n portfolio: bool\n If the data is a portfolio\n\n Returns\n -------\n list\n list of VaR\n list\n list of historical VaR\n \"\"\"\n if not portfolio:\n data = data[[\"adjclose\"]].copy()\n data.loc[:, \"return\"] = data.adjclose.pct_change()\n data_return = data[\"return\"]\n else:\n data = data[1:].copy()\n data_return = data\n\n # Distribution percentages\n percentile_90 = -1.282\n percentile_95 = -1.645\n percentile_99 = -2.326\n percentile_custom = stats.norm.ppf(1 - percentile)\n\n # Mean\n if use_mean:\n mean = data_return.mean()\n else:\n mean = 0\n\n # Standard Deviation\n std = data_return.std(axis=0)\n\n if adjusted_var:\n\n # Kurtosis\n # Measures height and sharpness of the central peak relative to that of a standard bell curve\n k = data_return.kurtosis(axis=0)\n\n # Skewness\n # Measure of the asymmetry of the probability distribution of a random variable about its mean\n s = data_return.skew(axis=0)\n\n # Adjusted VaR\n var_90 = calculate_adjusted_var(k, s, percentile_90, std, mean)\n var_95 = calculate_adjusted_var(k, s, percentile_95, std, mean)\n var_99 = calculate_adjusted_var(k, s, percentile_99, std, mean)\n var_custom = calculate_adjusted_var(k, s, percentile_custom, std, mean)\n\n elif student_t:\n # Calculating VaR based on the Student-t distribution\n\n # Fitting student-t parameters to the data\n v, _, _ = stats.t.fit(data_return.fillna(0))\n if not use_mean:\n mean = 0\n var_90 = np.sqrt((v - 2) / v) * stats.t.ppf(0.1, v) * std + mean\n var_95 = np.sqrt((v - 2) / v) * stats.t.ppf(0.05, v) * std + mean\n var_99 = np.sqrt((v - 2) / v) * stats.t.ppf(0.01, v) * std + mean\n var_custom = np.sqrt((v - 2) / v) * stats.t.ppf(1 - percentile, v) * std + mean\n\n else:\n # Regular Var\n var_90 = np.exp(mean + percentile_90 * std) - 1\n var_95 = np.exp(mean + percentile_95 * std) - 1\n var_99 = np.exp(mean + percentile_99 * std) - 1\n var_custom = np.exp(mean + percentile_custom * std) - 1\n\n if not portfolio:\n data.sort_values(\"return\", inplace=True, ascending=True)\n data_return = data[\"return\"]\n else:\n data.sort_values(inplace=True, ascending=True)\n data_return = data\n\n # Historical VaR\n hist_var_90 = data_return.quantile(0.1)\n hist_var_95 = data_return.quantile(0.05)\n hist_var_99 = data_return.quantile(0.01)\n hist_var_custom = data_return.quantile(1 - percentile)\n\n var_list = [var_90, var_95, var_99, var_custom]\n hist_var_list = [hist_var_90, hist_var_95, hist_var_99, hist_var_custom]\n return var_list, hist_var_list\n\n\ndef get_es(\n data: pd.DataFrame,\n use_mean: bool,\n distribution: str,\n percentile: Union[float, int],\n portfolio: bool,\n) -> Tuple[List[float], List[float]]:\n \"\"\"Gets Expected Shortfall for specified stock dataframe\n\n Parameters\n ----------\n data: pd.DataFrame\n Dataframe of a stock\n use_mean: bool\n If one should use the stocks mean for calculation\n distribution: str\n Type of distribution, options: laplace, student_t, normal\n percentile: Union[float,int]\n VaR percentile\n portfolio: bool\n If the data is a portfolio\n\n Returns\n -------\n list\n list of ES\n list\n list of historical ES\n \"\"\"\n if not portfolio:\n data = data[[\"adjclose\"]].copy()\n data.loc[:, \"return\"] = data.adjclose.pct_change()\n data_return = data[\"return\"]\n else:\n data = data[1:].copy()\n data_return = data\n\n # Distribution percentages\n percentile_90 = -1.282\n percentile_95 = -1.645\n percentile_99 = -2.326\n percentile_custom = stats.norm.ppf(1 - percentile)\n\n # Mean\n if use_mean:\n mean = data_return.mean()\n else:\n mean = 0\n\n # Standard Deviation\n std = data_return.std(axis=0)\n\n if distribution == \"laplace\":\n # Calculating ES based on Laplace distribution\n # For formula see: https://en.wikipedia.org/wiki/Expected_shortfall#Laplace_distribution\n\n # Fitting b (scale parameter) to the variance of the data\n # Since variance of the Laplace dist.: var = 2*b**2\n # Thus:\n b = np.sqrt(std**2 / 2)\n\n # Calculation\n es_90 = -b * (1 - np.log(2 * 0.1)) + mean\n es_95 = -b * (1 - np.log(2 * 0.05)) + mean\n es_99 = -b * (1 - np.log(2 * 0.01)) + mean\n\n if (1 - percentile) < 0.5:\n es_custom = -b * (1 - np.log(2 * (1 - percentile))) + mean\n else:\n es_custom = 0\n\n elif distribution == \"student_t\":\n # Calculating ES based on the Student-t distribution\n\n # Fitting student-t parameters to the data\n v, _, scale = stats.t.fit(data_return.fillna(0))\n if not use_mean:\n mean = 0\n\n # Student T Distribution percentages\n percentile_90 = stats.t.ppf(0.1, v)\n percentile_95 = stats.t.ppf(0.05, v)\n percentile_99 = stats.t.ppf(0.01, v)\n percentile_custom = stats.t.ppf(1 - percentile, v)\n\n # Calculation\n es_90 = (\n -scale\n * (v + percentile_90**2)\n / (v - 1)\n * stats.t.pdf(percentile_90, v)\n / 0.1\n + mean\n )\n es_95 = (\n -scale\n * (v + percentile_95**2)\n / (v - 1)\n * stats.t.pdf(percentile_95, v)\n / 0.05\n + mean\n )\n es_99 = (\n -scale\n * (v + percentile_99**2)\n / (v - 1)\n * stats.t.pdf(percentile_99, v)\n / 0.01\n + mean\n )\n es_custom = (\n -scale\n * (v + percentile_custom**2)\n / (v - 1)\n * stats.t.pdf(percentile_custom, v)\n / (1 - percentile)\n + mean\n )\n\n elif distribution == \"logistic\":\n # Logistic distribution\n # For formula see: https://en.wikipedia.org/wiki/Expected_shortfall#Logistic_distribution\n\n # Fitting s (scale parameter) to the variance of the data\n # Since variance of the Logistic dist.: var = s**2*pi**2/3\n # Thus:\n s = np.sqrt(3 * std**2 / np.pi**2)\n\n # Calculation\n a = 1 - percentile\n es_90 = -s * np.log((0.9 ** (1 - 1 / 0.1)) / 0.1) + mean\n es_95 = -s * np.log((0.95 ** (1 - 1 / 0.05)) / 0.05) + mean\n es_99 = -s * np.log((0.99 ** (1 - 1 / 0.01)) / 0.01) + mean\n es_custom = -s * np.log((percentile ** (1 - 1 / a)) / a) + mean\n\n else:\n # Regular Expected Shortfall\n es_90 = std * -stats.norm.pdf(percentile_90) / 0.1 + mean\n es_95 = std * -stats.norm.pdf(percentile_95) / 0.05 + mean\n es_99 = std * -stats.norm.pdf(percentile_99) / 0.01 + mean\n es_custom = std * -stats.norm.pdf(percentile_custom) / (1 - percentile) + mean\n\n # Historical Expected Shortfall\n _, hist_var_list = get_var(data, use_mean, False, False, percentile, portfolio)\n hist_es_90 = data_return[data_return <= hist_var_list[0]].mean()\n hist_es_95 = data_return[data_return <= hist_var_list[1]].mean()\n hist_es_99 = data_return[data_return <= hist_var_list[2]].mean()\n hist_es_custom = data_return[data_return <= hist_var_list[3]].mean()\n\n es_list = [es_90, es_95, es_99, es_custom]\n hist_es_list = [hist_es_90, hist_es_95, hist_es_99, hist_es_custom]\n return es_list, hist_es_list\n" ]
[ [ "pandas.set_option", "pandas.DataFrame", "numpy.unique" ], [ "pandas.DataFrame.from_dict" ], [ "pandas.DataFrame" ], [ "scipy.stats.kstest", "scipy.stats.norm.ppf", "numpy.log", "numpy.sqrt", "scipy.stats.norm.pdf", "scipy.stats.skewtest", "pandas.DataFrame", "scipy.stats.t.ppf", "scipy.stats.shapiro", "scipy.stats.jarque_bera", "scipy.stats.kurtosistest", "numpy.exp", "scipy.stats.t.pdf" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] } ]
drkostas/COSC525-Project2
[ "a33c786621e6047b0a586c7c3a3b5b85cb51fd6d", "a33c786621e6047b0a586c7c3a3b5b85cb51fd6d" ]
[ "main.py", "main_project1.py" ]
[ "import traceback\nimport argparse\nimport numpy as np\nfrom src import NeuralNetwork, generateExample, getTensorExample\nfrom typing import *\n\n\ndef get_args() -> argparse.Namespace:\n \"\"\"Set-up the argument parser\n\n Returns:\n argparse.Namespace:\n \"\"\"\n parser = argparse.ArgumentParser(\n description='Project 2 for the Deep Learning class (COSC 525). '\n 'Involves the development of a Convolutional Neural Network.',\n add_help=False)\n # Required Args\n required_args = parser.add_argument_group('Required Arguments')\n required_args.add_argument('-d', '--dataset', required=True,\n help=\"The datasets to train the network on. \"\n \"Options: [example1, example2, example3]\")\n # Optional args\n optional_args = parser.add_argument_group('Optional Arguments')\n optional_args.add_argument(\"-h\", \"--help\", action=\"help\", help=\"Show this help message and exit\")\n\n return parser.parse_args()\n\n\ndef main():\n \"\"\"This is the main function of main.py\n\n Example:\n python main.py --dataset example1\n \"\"\"\n\n # Initializing\n args = get_args()\n # Load the configurations\n dataset_type = args.dataset\n if dataset_type in ('example1', 'example2', 'example3'):\n example_num = int(dataset_type[-1])\n inputs, targets, layers = generateExample(example_num)\n getTensorExample(example_num)\n else:\n raise ValueError('Invalid dataset type')\n\n # ------- Start of Code ------- #\n # # Initialize the network # #\n netWork = NeuralNetwork(input_size=inputs.shape, loss_function=\"square_error\",\n learning_rate=100, input_channels=1)\n # Add layers\n for layer in layers:\n if layer['type'] == 'Conv':\n weights = []\n for k_ind in range(layer['num_kernels']):\n kernels = [k_w.flatten() for k_w in layer['weights'][k_ind]]\n kernel_weights = np.concatenate((*kernels,\n layer['biases'][k_ind]))\n weights.append(kernel_weights)\n weights = np.array(weights)\n netWork.addConvLayer(num_kernels=layer['num_kernels'],\n kernel_size=layer['kernel_size'],\n activation=layer['activation'],\n weights=weights)\n elif layer['type'] == 'Flat':\n netWork.addFlattenLayer()\n elif layer['type'] == 'MaxPool':\n netWork.addMaxPoolLayer(kernel_size=layer['kernel_size'])\n elif layer['type'] == 'Dense':\n weights = np.array([np.concatenate((layer['weights'].flatten(), layer['bias']))])\n netWork.addFCLayer(num_neurons=targets.shape[0],\n activation=layer['activation'],\n weights=weights)\n else:\n raise ValueError(f'Invalid layer type: {layer[\"type\"]}')\n\n # # Train the network # #\n # First Feed forward\n outputs = netWork.calculate(inputs=inputs)\n print(\"----------- Custom Model -----------\")\n print(f\"model output before:\\n{outputs}\")\n\n # Calculate Loss derivative\n loss_der = netWork.loss_derivative(outputs, targets)\n loss = netWork.calculate_loss(np.array([inputs]), targets)\n netWork.train(np.array([inputs]), targets) # Train the network\n\n outputs = netWork.calculate(inputs=inputs)\n print(f\"model output after: \\n{outputs}\")\n\n if example_num == 1:\n print('1st convolutional layer, kernel weights:')\n print(netWork.layers[0].kernels[0][0][0].weights[:-1].reshape((3, 3)))\n print('1st convolutional layer, kernel bias:')\n print(np.array([netWork.layers[0].kernels[0][0][0].weights[-1]]))\n print('fully connected layer weights:')\n\n print(netWork.layers[2].neurons[0].weights[:-1])\n print('fully connected layer bias:')\n print(np.array([netWork.layers[2].neurons[0].weights[-1]]))\n elif example_num == 2:\n print('1st convolutional layer, 1st kernel weights:')\n print(netWork.layers[0].kernels[0][0][0].weights[:-1].reshape((3, 3)))\n print('1st convolutional layer, 1st kernel bias:')\n print(np.array([netWork.layers[0].kernels[0][0][0].weights[-1]]))\n\n print('1st convolutional layer, 2st kernel weights:')\n print(netWork.layers[0].kernels[1][0][0].weights[:-1].reshape((3, 3)))\n print('1st convolutional layer, 2st kernel bias:')\n print(np.array([netWork.layers[0].kernels[1][0][0].weights[-1]]))\n\n print('2nd convolutional layer, 1st kernel weights:')\n print(netWork.layers[1].kernels[0][0][0].weights[:-1].reshape((2, 3, 3)))\n print('2nd convolutional layer, 1st kernel bias:')\n print(np.array([netWork.layers[1].kernels[0][0][0].weights[-1]]))\n\n print('fully connected layer weights:')\n\n print(netWork.layers[3].neurons[0].weights[:-1])\n print('fully connected layer bias:')\n print(np.array([netWork.layers[3].neurons[0].weights[-1]]))\n elif example_num == 3:\n print('1st convolutional layer, 1st kernel weights:')\n print(netWork.layers[0].kernels[0][0][0].weights[:-1].reshape((3, 3)))\n print('1st convolutional layer, 1st kernel bias:')\n print(np.array([netWork.layers[0].kernels[0][0][0].weights[-1]]))\n\n print('1st convolutional layer, 2st kernel weights:')\n print(netWork.layers[0].kernels[1][0][0].weights[:-1].reshape((3, 3)))\n print('1st convolutional layer, 2st kernel bias:')\n print(np.array([netWork.layers[0].kernels[1][0][0].weights[-1]]))\n\n print('fully connected layer weights:')\n\n print(netWork.layers[3].neurons[0].weights[:-1])\n print('fully connected layer bias:')\n print(np.array([netWork.layers[3].neurons[0].weights[-1]]))\n else:\n raise ValueError(f'Invalid example number: {example_num}')\n\n\nif __name__ == '__main__':\n try:\n main()\n except Exception as e:\n print(str(e) + '\\n' + str(traceback.format_exc()))\n raise e\n\n# # First Layer (Convolutional)\n# weights_L1 = np.array(\n# [np.concatenate((l1k1.flatten(), l1b1)), np.concatenate((l1k2.flatten(), l1b2))])\n# netWork.addConvLayer(num_kernels=2, kernel_size=3, activation=\"logistic\", weights=weights_L1)\n# # Second Layer (Convolutional)\n# weights_L2 = np.array([np.concatenate((l2c1.flatten(), l2c2.flatten(), l2b))])\n# netWork.addConvLayer(num_kernels=1, kernel_size=3, activation=\"logistic\", weights=weights_L2)\n# # Third Layer (Fully Connected)\n# netWork.addFlattenLayer()\n# weights_L3 = np.array([np.concatenate((l3.flatten(), l3b))])\n# netWork.addFCLayer(num_neurons=1, activation=\"logistic\", weights=weights_L3)\n", "import traceback\nimport argparse\nimport numpy as np\nfrom src import NeuralNetwork\nfrom typing import *\n\n\ndef get_args() -> argparse.Namespace:\n \"\"\"Set-up the argument parser\n\n Returns:\n argparse.Namespace:\n \"\"\"\n parser = argparse.ArgumentParser(\n description='Project 1 for the Deep Learning class (COSC 525). '\n 'Involves the development of a Feed-Forward Neural Network.',\n add_help=False)\n # Required Args\n required_args = parser.add_argument_group('Required Arguments')\n required_args.add_argument('-d', '--dataset', required=True,\n help=\"The datasets to train the network on. \"\n \"Options: [and, xor, class_example]\")\n required_args.add_argument('-n', '--network', required=True,\n help=\"The network configuration to use. \"\n \"Options: [1x1_net, 2x1_net, 2x2_net]\")\n # Optional args\n optional_args = parser.add_argument_group('Optional Arguments')\n optional_args.add_argument(\"-h\", \"--help\", action=\"help\", help=\"Show this help message and exit\")\n\n return parser.parse_args()\n\n\ndef get_network_config(network_name: str) -> Dict[str, Any]:\n \"\"\"Get the network configuration\n\n Args:\n network_name (str): The name of the network to get the configuration for\n\n Returns:\n Dict[str, Any]: The network configuration\n \"\"\"\n nn_conf = {}\n if network_name == '1x1_net':\n nn_conf['neurons_per_layer'] = [1]\n nn_conf['activations'] = ['logistic']\n nn_conf['loss_function'] = 'square_error'\n nn_conf['learning_rate'] = 5\n nn_conf['epochs'] = 5000\n nn_conf['print_every'] = 500\n elif network_name == '2x1_net':\n nn_conf['neurons_per_layer'] = [2, 1]\n nn_conf['activations'] = ['logistic', 'logistic']\n nn_conf['loss_function'] = 'square_error'\n nn_conf['learning_rate'] = 5\n nn_conf['epochs'] = 5000\n nn_conf['print_every'] = 500\n elif network_name == '2x2_net':\n nn_conf['neurons_per_layer'] = [2, 2]\n nn_conf['activations'] = ['logistic', 'logistic']\n nn_conf['loss_function'] = 'cross_entropy'\n nn_conf['learning_rate'] = 0.5\n nn_conf['epochs'] = 100\n nn_conf['print_every'] = 100\n else:\n raise ValueError(f\"Network name {network_name} not recognized.\")\n\n return nn_conf\n\n\ndef get_dataset_config(dataset_name: str) -> Dict[str, Any]:\n \"\"\"Get the dataset configuration\n\n Args:\n dataset_name (str): The name of the dataset to get the configuration for\n\n Returns:\n Dict[str, Any]: The dataset configuration\n \"\"\"\n dataset_conf = {}\n if dataset_name == 'and':\n dataset_conf['inputs'] = [[0, 0], [0, 1], [1, 0], [1, 1]]\n dataset_conf['outputs'] = [[0], [0], [0], [1]]\n elif dataset_name == 'xor':\n dataset_conf['inputs'] = [[0, 0], [0, 1], [1, 0], [1, 1]]\n dataset_conf['outputs'] = [[0], [1], [1], [0]]\n elif dataset_name == 'class_example':\n dataset_conf['inputs'] = [0.05, 0.1]\n dataset_conf['desired_outputs'] = [0.01, 0.99]\n dataset_conf['weights'] = [[[0.15, 0.20, 0.35], [0.25, 0.30, 0.35]],\n [[0.40, 0.45, 0.60], [0.50, 0.55, 0.60]]]\n else:\n raise ValueError(f\"Dataset name {dataset_name} not recognized.\")\n\n return dataset_conf\n\n\ndef main():\n \"\"\"This is the main function of main.py\n\n Example:\n python main.py --dataset xor --network 2x1_net\n \"\"\"\n\n # Initializing\n args = get_args()\n # Load the configurations\n nn_type = args.network\n nn_conf = get_network_config(nn_type)\n dataset_type = args.dataset\n dataset_conf = get_dataset_config(dataset_type)\n\n # ------- Start of Code ------- #\n print()\n print(f'Training the `{nn_type}` network on the `{dataset_type}` dataset.')\n if args.dataset != 'class_example': # XOR and AND cases\n # Train the network\n inputs = np.array(dataset_conf['inputs'])\n outputs = np.array(dataset_conf['outputs'])\n # Initialize the network\n netWork = NeuralNetwork(input_size=inputs.shape[1],\n loss_function=nn_conf['loss_function'],\n learning_rate=nn_conf['learning_rate'])\n # Add the layers\n for num_neurons, activation in zip(nn_conf['neurons_per_layer'], nn_conf['activations']):\n netWork.addFCLayer(num_neurons=num_neurons, activation=activation)\n # Train the network for the given number of epochs\n for epoch in range(nn_conf['epochs']):\n netWork.train(inputs, outputs) # Train the network\n loss = netWork.calculate_loss(inputs, outputs) # Calculate the loss\n if epoch % nn_conf['print_every'] == 0:\n print(f\"Epoch: {epoch} Loss: {loss}\")\n print(f\"Epoch: {nn_conf['epochs']} Loss: {loss}\")\n # Test on the predictions\n print(f'Predictions on the {dataset_type} dataset')\n for inp, outp in zip(inputs, outputs):\n print(f\"True Output: {outp} Prediction: {netWork.calculate(inp)[0]}\")\n else: # Class Example\n # Set up the weights and biases based on the class example\n inputs = [np.array(dataset_conf['inputs'])]\n desired_outputs = np.array(dataset_conf['desired_outputs'])\n weights = [np.array(weight) for weight in dataset_conf['weights']]\n # Initialize the network using the predefined weights and biases\n netWork = NeuralNetwork(input_size=2,\n loss_function=nn_conf['loss_function'],\n learning_rate=nn_conf['learning_rate'])\n # Add the layers\n for num_neurons, activation, weights_ in \\\n zip(nn_conf['neurons_per_layer'], nn_conf['activations'], weights):\n netWork.addFCLayer(num_neurons=num_neurons, activation=activation,\n weights=weights_)\n # Print the network inputs and weights before training\n print(\"Pre-training Inputs:\")\n print(f\"{inputs[0]}\")\n print(\"Pre-training Weights:\")\n print(f\"{netWork.layers[0].neurons[0].weights} (h1) x \"\n \"{netWork.layers[1].neurons[0].weights} (O1)\")\n print(f\"{netWork.layers[0].neurons[1].weights} (h1) x \"\n \"{netWork.layers[1].neurons[1].weights} (O1)\")\n # Activate the network\n outputs = netWork.calculate(inputs[0]) # Feed-forward the network\n print(f\"Outputs after calling `activate()`:\")\n print(f\"{outputs}\")\n # Calculate the wdeltas - single step of backpropagation\n wdeltas = [netWork.loss_derivative(np.array(outputs), desired_outputs)]\n for j in range(len(netWork.layers) - 1, -1, -1):\n wdeltas = netWork.layers[j].calculate_wdeltas(wdeltas)\n # Print the wdeltas, the weights, and the outputs after backpropagation\n print(\"Wdeltas after calling `calculate_wdeltas()`:\")\n print(f\"{wdeltas}\")\n print(\"Weights after a single step of back-propagation:\")\n print(f\"{netWork.layers[0].neurons[0].weights} (h1) x \"\n \"{netWork.layers[1].neurons[0].weights} (O1)\")\n print(f\"{netWork.layers[0].neurons[1].weights} (h1) x \"\n \"{netWork.layers[1].neurons[1].weights} (O1)\")\n outputs = netWork.calculate(inputs[0])\n print(\"Post-training Outputs:\")\n print(f\"{outputs}\")\n\n\nif __name__ == '__main__':\n try:\n main()\n except Exception as e:\n print(str(e) + '\\n' + str(traceback.format_exc()))\n raise e\n" ]
[ [ "numpy.concatenate", "numpy.array" ], [ "numpy.array" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
suo/pytext
[ "400c80b4c040de12028970a85ce0af864931e0f4" ]
[ "pytext/trainers/trainer.py" ]
[ "#!/usr/bin/env python3\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport itertools\nimport time\nfrom contextlib import ExitStack as contextlib_ExitStack\nfrom typing import Any, Iterable, List, Optional, Tuple\n\nimport torch\nfrom pytext.common.constants import BatchContext, Stage\nfrom pytext.config import PyTextConfig\nfrom pytext.config.component import (\n Component,\n ComponentType,\n create_optimizer,\n create_scheduler,\n create_sparsifier,\n)\nfrom pytext.config.pytext_config import ConfigBase\nfrom pytext.data.data_handler import BatchIterator\nfrom pytext.metric_reporters import MetricReporter\nfrom pytext.models.distributed_model import DistributedModel\nfrom pytext.models.model import Model\nfrom pytext.optimizer import Adam, Optimizer, learning_rates\nfrom pytext.optimizer.scheduler import Scheduler\nfrom pytext.optimizer.sparsifier import Sparsifier\nfrom pytext.task.serialize import save\nfrom pytext.trainers.training_state import TrainingState\nfrom pytext.utils import cuda, precision, timing\n\n\nclass TrainerBase(Component):\n __COMPONENT_TYPE__ = ComponentType.TRAINER\n\n\ndef cycle(iterator: Iterable[Any]) -> Iterable[Any]:\n \"\"\"Like itertools.cycle, but will call iter on the original iterable instead.\n This limits it to not be able to run on say raw generators, but also doesn't\n store a copy of the iterable in memory for repetition.\"\"\"\n while True:\n yield from iterator\n\n\ndef maybe_accumulate_gradients(exit_stack, model, index, sample_size):\n # index == sample_size - 1 represents the last backward pass\n if (\n cuda.DISTRIBUTED_WORLD_SIZE > 1\n and hasattr(model, \"no_sync\")\n and index < sample_size - 1\n ):\n \"\"\"\n Whenever *samples* contains more than one mini-batch (e.g sample_size > 1),\n we want to accumulate gradients locally and only call all-reduce in the\n last backwards pass.\n \"\"\"\n exit_stack.enter_context(model.no_sync())\n\n if precision._FP16_ENABLED and index < sample_size - 1:\n \"\"\"\n Whenever *samples* contains more than one mini-batch (e.g sample_size > 1),\n we want to accumulate gradients in FP16 parameters (e.g delay unscale)\n and only unscale to FP32 parameters after the last backward pass.\n \"\"\"\n exit_stack.enter_context(precision.delay_unscale())\n\n\nclass Trainer(TrainerBase):\n \"\"\"\n Base Trainer class that provide ways to\n 1 Train model, compute metrics against eval set and use the metrics for\n model selection.\n 2 Test trained model, compute and publish metrics against a blind test set.\n\n Attributes:\n epochs (int): Training epochs\n early_stop_after (int): Stop after how many epochs when the eval metric\n is not improving\n max_clip_norm (Optional[float]): Clip gradient norm if set\n report_train_metrics (bool): Whether metrics on training data should be\n computed and reported.\n target_time_limit_seconds (float): Target time limit for training in seconds. If\n the expected time to train another epoch exceeds this limit, stop training.\n \"\"\"\n\n class Config(ConfigBase):\n #: Training epochs\n epochs: int = 10\n #: Stop after how many epochs when the eval metric is not improving\n early_stop_after: int = 0\n #: Clip gradient norm if set\n max_clip_norm: Optional[float] = None\n #: Whether metrics on training data should be computed and reported.\n report_train_metrics: bool = True\n #: Target time limit for training, default (None) to no time limit.\n target_time_limit_seconds: Optional[int] = None\n #: Whether to do evaluation and model selection based on it.\n do_eval: bool = True\n #: Number of samples for logging training progress.\n num_samples_to_log_progress: int = 1000\n #: Number of forward & backward per batch before update gradients, the\n #: actual_batch_size = batch_size x num_accumulated_batches\n num_accumulated_batches: int = 1\n #: Define epoch as a fixed number of batches. Subsequent epochs will continue\n #: to iterate through the data, cycling through it when they reach the end.\n #: If not set, use exactly one pass through the dataset as one epoch.\n #: This configuration only affects the train epochs, test and eval\n #: will always test their entire datasets.\n num_batches_per_epoch: Optional[int] = None\n #: config for optimizer, used in parameter update\n optimizer: Optimizer.Config = Adam.Config()\n scheduler: Optional[Scheduler.Config] = None\n sparsifier: Optional[Sparsifier.Config] = None\n\n def __init__(self, config: Config, model: torch.nn.Module):\n if config.early_stop_after > 0:\n assert config.do_eval, \"can't do early stopping when not running evalution\"\n optimizer: torch.optim.Optimizer = create_optimizer(config.optimizer, model)\n self.scheduler: torch.optim.lr_scheduler = (\n create_scheduler(config.scheduler, optimizer)\n if config.scheduler\n else Scheduler()\n )\n self.sparsifier: Sparsifier = (\n create_sparsifier(config.sparsifier) if config.sparsifier else Sparsifier()\n )\n model, self.optimizer = precision.initialize(model, optimizer)\n self.config = config\n\n @classmethod\n def from_config(cls, config: Config, model: torch.nn.Module, *args, **kwargs):\n return cls(config, model)\n\n @timing.time(\"Trainer.test\")\n def test(self, test_iter, model, metric_reporter: MetricReporter):\n state = TrainingState(stage=Stage.TEST, model=model, epoch=1)\n if cuda.CUDA_ENABLED:\n state.model.cuda()\n state.model.eval()\n with torch.no_grad():\n return self.run_epoch(state, test_iter, metric_reporter)\n\n @timing.time(\"pre-training\")\n def set_up_training(self, state: TrainingState, training_data: BatchIterator):\n if cuda.CUDA_ENABLED:\n state.model.cuda()\n state.scheduler.prepare(training_data, self.config.epochs)\n\n if cuda.DISTRIBUTED_WORLD_SIZE > 1:\n device_id = torch.cuda.current_device()\n state.model = DistributedModel(\n module=state.model,\n device_ids=[device_id],\n output_device=device_id,\n broadcast_buffers=False,\n find_unused_parameters=state.model.find_unused_parameters,\n )\n state.start_time = time.time()\n\n if self.config.num_batches_per_epoch:\n # Set the training_data iterator to cycle, so it will never run out,\n # but rather after reaching the end will loop back to the beginning.\n training_data = cycle(training_data)\n return training_data\n\n @timing.time(\"zero gradients\")\n def zero_grads(self, state):\n if state.stage != Stage.TRAIN:\n return\n state.optimizer.zero_grad()\n\n @timing.time(\"backprop\")\n def backprop(self, state, loss):\n if state.stage != Stage.TRAIN:\n return\n\n with timing.time(\"loss.backward\"):\n precision.backward(state.optimizer, loss)\n\n @timing.time(\"optimizer\")\n def optimizer_step(self, state):\n if state.stage != Stage.TRAIN:\n return\n\n state.scheduler.step_batch()\n\n if self.config.max_clip_norm is not None:\n grad_norm = precision.clip_grad_norm(\n state.model, state.optimizer, self.config.max_clip_norm\n )\n else:\n grad_norm = None\n\n with timing.time(\"optimizer.step\"):\n state.optimizer.step()\n\n state.step_counter += 1\n # grad_norm could be used to check grads sync in distributed training\n return grad_norm\n\n @timing.time(\"sparsifier\")\n def sparsification_step(self, state):\n # sparsification only if sparifier is used\n if not self.config.sparsifier:\n return\n\n if state.stage != Stage.TRAIN:\n return\n\n if state.sparsifier.sparsification_condition(state):\n state.sparsifier.sparsify(state)\n\n if state.rank == 0:\n current_sparsity = state.sparsifier.get_current_sparsity(state.model)\n print(f\"sparsity in the model: {current_sparsity}\")\n\n def continue_training(self, state: TrainingState) -> bool:\n # Are we done?\n if state.epoch >= self.config.epochs:\n return False\n\n # Check whether the model has improved recently enough\n # Only do this if we're bothering to evaluate the model\n if self.config.do_eval and state.epochs_since_last_improvement >= (\n self.config.early_stop_after or float(\"inf\")\n ):\n print(\n f\"Worker {state.rank}: Eval metric hasn't changed for \"\n + f\"{state.epochs_since_last_improvement} epochs. Stopping now.\"\n )\n return False\n\n # Check whether we think the next epoch will put us over the configured\n # time limit.\n epochs_run = state.epoch + 1\n time_elapsed = time.time() - state.start_time\n mean_epoch_time = time_elapsed / epochs_run\n expected_next_epoch_time = time_elapsed + mean_epoch_time\n target_time_limit = (\n float(\"inf\")\n if self.config.target_time_limit_seconds is None\n else self.config.target_time_limit_seconds\n )\n if expected_next_epoch_time > target_time_limit:\n print(\n f\"Worker {state.rank}: Stopping training after {epochs_run} epochs \"\n f\"and {int(time_elapsed)} seconds, due to the target max training \"\n f\"time of {self.config.target_time_limit_seconds} seconds.\"\n )\n return False\n\n return True\n\n def update_best_model(\n self, state: TrainingState, train_config: PyTextConfig, eval_metric\n ):\n # This should be updated by all workers so they agree on when to stop training\n # when `early_stop_after` is specified.\n state.epochs_since_last_improvement = 0\n state.best_model_metric = eval_metric\n print(f\"Found a better model!\")\n\n # Only one worker should save checkpoints\n if state.rank != 0:\n return\n\n model_state = state.model.state_dict()\n # save to cpu to avoid multiple model copies in gpu memory\n if cuda.CUDA_ENABLED:\n for key, parameter in model_state.items():\n model_state[key] = parameter.cpu()\n state.best_model_state = model_state\n\n @timing.time(\"save checkpoint\")\n def save_checkpoint(self, state: TrainingState, train_config: PyTextConfig) -> str:\n # Only one worker should save checkpoints\n if state.rank != 0:\n return\n\n if train_config.save_module_checkpoints or train_config.save_all_checkpoints:\n # saves per-epoch sub-modules when save_all_checkpoints or\n # save_module_checkpoints is enabled\n state.model.save_modules(\n base_path=train_config.modules_save_dir, suffix=f\"-ep{state.epoch}\"\n )\n if state.epochs_since_last_improvement == 0:\n # state.epochs_since_last_improvement == 0 means found a better\n # model in current epoch, thus update best model's sub-modules\n state.model.save_modules(base_path=train_config.modules_save_dir)\n\n # next to add new config and implementation of frequency on checkpointing\n if train_config.save_all_checkpoints:\n return save(\n config=train_config,\n model=state.model,\n meta=None,\n tensorizers=None,\n training_state=state,\n identifier=str(state.epoch),\n )\n\n def load_best_model(self, state: TrainingState):\n if cuda.CUDA_ENABLED:\n # Move current model to CPU to avoid multiple models in GPU memory\n state.model.cpu()\n state.model.load_state_dict(\n {k: v.cuda() for k, v in state.best_model_state.items()}\n )\n # Move model back to GPU\n state.model.cuda()\n else:\n state.model.load_state_dict(state.best_model_state)\n\n def train(\n self,\n training_data: BatchIterator,\n eval_data: BatchIterator,\n model: Model,\n metric_reporter: MetricReporter,\n train_config: PyTextConfig,\n rank: int = 0,\n ) -> Tuple[torch.nn.Module, Any]:\n \"\"\"\n Train and eval a model, the model states will be modified.\n Args:\n train_iter (BatchIterator): batch iterator of training data\n eval_iter (BatchIterator): batch iterator of evaluation data\n model (Model): model to be trained\n metric_reporter (MetricReporter): compute metric based on training\n output and report results to console, file.. etc\n train_config (PyTextConfig): training config\n training_result (Optional): only meaningful for Hogwild training. default\n is None\n rank (int): only used in distributed training, the rank of the current\n training thread, evaluation will only be done in rank 0\n\n Returns:\n model, best_metric: the trained model together with the best metric\n \"\"\"\n state = TrainingState(\n model=model,\n optimizer=self.optimizer,\n scheduler=self.scheduler,\n sparsifier=self.sparsifier,\n rank=rank,\n )\n return self.train_from_state(\n state, training_data, eval_data, metric_reporter, train_config\n )\n\n @timing.time(\"Trainer.train_from_state\")\n def train_from_state(\n self,\n state: TrainingState,\n training_data: BatchIterator,\n eval_data: BatchIterator,\n metric_reporter: MetricReporter,\n train_config: PyTextConfig,\n ) -> Tuple[torch.nn.Module, Any]:\n \"\"\"\n Train and eval a model from a given training state will be modified.\n This function iterates epochs specified in config, and for each epoch do:\n\n 1. Train model using training data, aggregate and report training results\n 2. Adjust learning rate if scheduler is specified\n 3. Evaluate model using evaluation data\n 4. Calculate metrics based on evaluation results and select best model\n\n Args:\n training_state (TrainingState): contrains stateful information to be\n able to restore a training job\n train_iter (BatchIterator): batch iterator of training data\n eval_iter (BatchIterator): batch iterator of evaluation data\n model (Model): model to be trained\n metric_reporter (MetricReporter): compute metric based on training\n output and report results to console, file.. etc\n train_config (PyTextConfig): training config\n\n Returns:\n model, best_metric: the trained model together with the best metric\n \"\"\"\n training_data = self.set_up_training(state, training_data)\n model = state.model\n rank = state.rank\n trainable_params = sum(\n p.numel() for p in state.model.parameters() if p.requires_grad\n )\n print(f\"Num trainable parameters: {trainable_params}\")\n\n while self.continue_training(state):\n state.epoch += 1\n state.epochs_since_last_improvement += 1\n lrs = learning_rates(state.optimizer)\n print(f\"\\nWorker {state.rank} starting epoch {state.epoch}\")\n print(f\"Learning rate(s): {', '.join(map(str, lrs))}\")\n\n with timing.time(\"train epoch\"):\n state.stage = Stage.TRAIN\n state.model.train()\n print(f\"start training epoch {state.epoch}\")\n epoch_data = training_data\n if self.config.num_batches_per_epoch:\n # We want to limit the number of batches in the epoch;\n # equivalent to epoch_data[:num_batches_per_epoch] for iterators.\n # In this case we set the training data iterator to cycle earlier\n # in the training process, so when it reaches the end it will\n # loop back to the beginning.\n epoch_data = itertools.islice(\n epoch_data, self.config.num_batches_per_epoch\n )\n self.run_epoch(state, epoch_data, metric_reporter)\n\n if not self.config.do_eval:\n continue\n\n with timing.time(\"eval epoch\"):\n state.stage = Stage.EVAL\n model.eval(Stage.EVAL)\n print(f\"start evaluating epoch {state.epoch}\")\n with torch.no_grad():\n eval_metric = self.run_epoch(state, eval_data, metric_reporter)\n\n # Step the learning rate scheduler(s)\n assert eval_metric is not None\n state.scheduler.step_epoch(\n metrics=metric_reporter.get_model_select_metric(eval_metric),\n epoch=state.epoch,\n )\n\n # Did we train a better model?\n better_model = metric_reporter.compare_metric(\n eval_metric, state.best_model_metric\n )\n if better_model:\n self.update_best_model(state, train_config, eval_metric)\n if better_model or train_config.save_all_checkpoints:\n self.save_checkpoint(state, train_config)\n\n if self.optimizer.finalize():\n state.stage = Stage.EVAL\n model.eval(Stage.EVAL)\n print(f\"start evaluating finalized state\")\n with torch.no_grad():\n eval_metric = self.run_epoch(state, eval_data, metric_reporter)\n better_model = metric_reporter.compare_metric(\n eval_metric, state.best_model_metric\n )\n if better_model:\n self.update_best_model(state, train_config, eval_metric)\n if better_model or train_config.save_all_checkpoints:\n self.save_checkpoint(state, train_config)\n # Only bother loading the best model for master worker\n if rank == 0 and state.best_model_state is not None:\n self.load_best_model(state)\n\n return state.model, state.best_model_metric\n\n @timing.report_snapshot\n def run_epoch(\n self, state: TrainingState, data: BatchIterator, metric_reporter: MetricReporter\n ):\n # This method is due for some refactoring, pushing it off because it interacts\n # with the metric reporter too much. Much of the logic here either changes in\n # the NewTaskTrainer or should change with a better metric reporter design.\n report_metric = state.stage != Stage.TRAIN or self.config.report_train_metrics\n model = state.model\n samples = []\n\n \"\"\"\n Sometimes, a batch of inputs is too large to fit into GPU, which has to\n be split into several micro-batches. However, to improve efficiency,\n it would be helpful to only apply params/gradients sync at original batch\n boundaries instead of micro-batch boundaries.\n num_accumulated_batches specified the number of accumulating gradients\n locally before sync gradients, total training_batch_size =\n train_batch_size x num_accumulated_batches and it will improve the system\n performance by reduce the total network transfer bytes.\n \"\"\"\n for sample in enumerate(data):\n samples.append(sample)\n if (\n state.stage != Stage.TRAIN\n or len(samples) == self.config.num_accumulated_batches\n ):\n self.run_step(samples, state, metric_reporter, report_metric)\n samples = []\n if samples:\n self.run_step(samples, state, metric_reporter, report_metric)\n samples = []\n\n metrics = None\n if report_metric:\n with timing.time(\"report metrics\"):\n metrics = metric_reporter.report_metric(\n model, state.stage, state.epoch, print_to_channels=(state.rank == 0)\n )\n else:\n metric_reporter._reset()\n\n return metrics\n\n @timing.time(\"run_step\")\n def run_step(\n self,\n samples: List[Any],\n state: TrainingState,\n metric_reporter: MetricReporter,\n report_metric: bool,\n ):\n sample_size = len(samples)\n assert sample_size <= self.config.num_accumulated_batches\n\n model = state.model\n self.zero_grads(state)\n for idx, (batch_id, (inputs, targets, context)) in enumerate(samples):\n with contextlib_ExitStack() as exit_stack:\n maybe_accumulate_gradients(exit_stack, model, idx, sample_size)\n # pass context to model to use in forward call if needed\n model.contextualize(context)\n with timing.time(\"model.forward\"):\n logits = model(*inputs)\n\n with timing.time(\"compute loss\"):\n loss = precision.maybe_float(\n model.get_loss(logits, targets, context)\n )\n if BatchContext.IGNORE_LOSS in context:\n loss *= 0\n elif sample_size > 1:\n # gradients averaged per batch and accumulated across samples.\n # divide sample_size to let gradients averaged per example\n loss = loss / sample_size\n\n self.backprop(state, loss)\n\n if report_metric:\n with timing.time(\"get pred\"):\n preds, scores = model.get_pred(\n logits, targets, context, state.stage, *inputs\n )\n\n with timing.time(\"add metrics\"):\n metric_reporter.add_batch_stats(\n batch_id, preds, targets, scores, loss.item(), inputs, **context\n )\n\n if batch_id % self.config.num_samples_to_log_progress == 0:\n print(\n f\"Running batch {batch_id} for epoch {state.epoch} in {state.stage} stage\",\n flush=True,\n )\n # update gradients after len(samples) forward & backward\n self.optimizer_step(state)\n self.sparsification_step(state)\n\n\nclass TaskTrainer(Trainer):\n __EXPANSIBLE__ = True\n\n class Config(Trainer.Config):\n \"\"\"Make mypy happy\"\"\"\n\n @timing.time(\"run_step\")\n def run_step(\n self,\n samples: List[Any],\n state: TrainingState,\n metric_reporter: MetricReporter,\n report_metric: bool,\n ):\n \"\"\"Our run_step is a bit different, because we're wrapping the model forward\n call with model.train_batch, which arranges tensors and gets loss, etc.\n\n Whenever \"samples\" contains more than one mini-batch (sample_size > 1),\n we want to accumulate gradients locally and only call all-reduce in the\n last backwards pass.\n \"\"\"\n sample_size = len(samples)\n assert sample_size <= self.config.num_accumulated_batches\n\n model = state.model\n self.zero_grads(state)\n for idx, (batch_id, (raw_batch, batch)) in enumerate(samples):\n with contextlib_ExitStack() as exit_stack:\n # enter ddp no_sync context and fp16 delay_scale context if needed\n maybe_accumulate_gradients(exit_stack, model, idx, sample_size)\n with timing.time(\"model.train_batch\"):\n loss, metric_data = model.train_batch(model, batch, state)\n if sample_size > 1:\n # gradients averaged per batch and accumulated across samples.\n # divide sample_size to let gradients averaged per example\n loss = loss / sample_size\n self.backprop(state, loss)\n\n if report_metric:\n with timing.time(\"add metrics\"):\n metric_reporter.add_batch_stats(\n batch_id,\n *metric_data,\n # TODO merge this step into add_batch_stats once all data\n # migration is done\n **metric_reporter.batch_context(raw_batch, batch),\n )\n if batch_id % self.config.num_samples_to_log_progress == 0:\n metric_reporter.report_realtime_metric(state.stage)\n # update gradients after #len(samples) forward & backward\n self.optimizer_step(state)\n self.sparsification_step(state)\n\n def _prepare_scheduler(self, training_batches, scheduler=None):\n \"\"\"Batch based schedulers require knowing the number of batches in\n the data. We're not supporting that yet with the Data api, need to figure out\n how to expose this info or restructure batch-based schedulers to not need it.\"\"\"\n if scheduler.batch_based_schedulers:\n raise Exception(\"New tasks don't yet support batch-based scheduling\")\n return scheduler\n" ]
[ [ "torch.no_grad", "torch.cuda.current_device" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
a1600012888/fairseq
[ "dbd2cd08fc396f919d2e737513095fcb966896c0" ]
[ "fairseq/criterions/masked_adlm.py" ]
[ "# Copyright (c) Facebook, Inc. and its affiliates.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nimport math\n\nimport torch\nimport torch.nn.functional as F\n\nfrom fairseq import metrics, utils\nfrom fairseq.criterions import FairseqCriterion, register_criterion\n\n\n@register_criterion('masked_adlm')\nclass MaskedAdLmLoss(FairseqCriterion):\n \"\"\"\n Implementation for the loss used in masked language model (MLM) training.\n \"\"\"\n\n\n def __init__(self, args, task):\n super(MaskedAdLmLoss, self).__init__(args, task)\n\n self.vocab = self.task.source_dictionary\n print(len(self.vocab.count))\n self.register_buffer('margins', torch.zeros((len(self.vocab.count), 1)))\n self.margins.requires_grad = False\n\n self.margin_lambda = args.margin_lambda\n self.margin_lr = args.margin_lr\n self.margin_norm = args.margin_norm\n\n @staticmethod\n def add_args(parser):\n \"\"\"Add criterion-specific arguments to the parser.\"\"\"\n super(MaskedAdLmLoss,\n MaskedAdLmLoss).add_args(parser)\n parser.add_argument('--margin_lambda', default=0.5, type=float, metavar='D',\n help='weight for the adaptive margin loss')\n parser.add_argument('--margin_lr', default=0.0001, type=float, metavar='D',\n help='weight for the adaptive margin loss')\n parser.add_argument('--margin-norm', default='l1', type=str,\n help='Type of margin norm in the loss')\n\n def forward(self, model, sample, reduce=True):\n \"\"\"Compute the loss for the given sample.\n\n Returns a tuple with three elements:\n 1) the loss\n 2) the sample size, which is used as the denominator for the gradient\n 3) logging outputs to display while training\n \"\"\"\n # compute MLM loss\n #self.margins.requires_grad = model.training\n\n masked_tokens = sample['target'].ne(self.padding_idx)\n sample_size = masked_tokens.int().sum().item()\n\n # (Rare case) When all tokens are masked, the model results in empty\n # tensor and gives CUDA error.\n if sample_size == 0:\n masked_tokens = None\n\n logits = model(**sample['net_input'], masked_tokens=masked_tokens)[0]\n targets = model.get_targets(sample, [logits])\n\n #import IPython\n #IPython.embed()\n if sample_size != 0:\n targets = targets[masked_tokens]\n\n\n # targets shape: [x]\n # logits.shape: [x, 32769]\n one_hot = F.one_hot(targets, len(self.vocab.count)) # [x, 32769]\n\n #import IPython\n #IPython.embed()\n\n m = F.embedding(targets, self.margins) # [x, 1]\n #m = self.margins(targets).squeeze(dim=-1)\n margin = m * one_hot # [x, 32769]\n\n #import IPython\n #IPython.embed()\n\n logits_minus_margin = logits - margin\n log_softmax = F.log_softmax(\n logits_minus_margin.view(-1, logits.size(-1)),\n dim=-1,\n dtype=torch.float32,\n ) # [x, 32769]\n\n\n adm_loss = F.nll_loss(\n log_softmax, \n targets.view(-1),\n reduction='sum',\n ignore_index=self.padding_idx,\n )\n\n # cal margin grad\n with torch.no_grad():\n margin_log_grad = torch.gather(log_softmax.detach(), dim=-1,\n index=targets.unsqueeze(-1)) # [x, 1]\n margin_grad_cross = torch.exp(margin_log_grad) - \\\n torch.ones_like(margin_log_grad)\n\n if self.margin_norm == 'l1':\n margin_grad = margin_grad_cross - torch.ones_like(m) * self.margin_lambda\n else:\n # l2 norm\n margin_grad = margin_grad_cross - m * self.margin_lambda * 2.0\n margin_update = -1.0 * margin_grad * self.margin_lr\n\n self.margins.scatter_add_(0, targets.unsqueeze(-1), margin_update.half())\n\n # for logging below! margin_norm; normal loss\n margin_norm = torch.mean(self.margins) * sample['nsentences']# used for log!\n\n normal_loss = F.nll_loss(\n F.log_softmax(\n logits.view(-1, logits.size(-1)),\n dim=-1,\n dtype=torch.float32,\n ),\n targets.view(-1),\n reduction='sum',\n ignore_index=self.padding_idx,\n )\n\n logging_output = {\n 'loss': utils.item(normal_loss.data) if reduce else normal_loss.data,\n 'margin_n':utils.item(margin_norm.data) if reduce else margin_norm.data,\n 'ntokens': sample['ntokens'],\n 'nsentences': sample['nsentences'],\n 'sample_size': sample_size,\n 'admloss': utils.item(adm_loss.data) if reduce else adm_loss.data,\n }\n return adm_loss, sample_size, logging_output\n\n @staticmethod\n def reduce_metrics(logging_outputs) -> None:\n \"\"\"Aggregate logging outputs from data parallel training.\"\"\"\n loss_sum = sum(log.get('loss', 0) for log in logging_outputs)\n admloss_sum = sum(log.get('admloss', 0) for log in logging_outputs)\n margin_n = sum(log.get('margin_n', 0) for log in logging_outputs)\n sample_size = sum(log.get('sample_size', 0) for log in logging_outputs)\n nsentences = sum(log.get('nsentences', 0) for log in logging_outputs)\n\n metrics.log_scalar('loss', loss_sum / sample_size / math.log(2), sample_size, round=3)\n metrics.log_scalar('admloss', admloss_sum / sample_size / math.log(2), sample_size, round=3)\n metrics.log_scalar('margin_norm', margin_n / nsentences, 32, round=3)\n metrics.log_derived('ppl', lambda meters: round(2**meters['loss'].avg, 3))\n\n @staticmethod\n def logging_outputs_can_be_summed() -> bool:\n \"\"\"\n Whether the logging outputs returned by `forward` can be summed\n across workers prior to calling `reduce_metrics`. Setting this\n to True will improves distributed training speed.\n \"\"\"\n return True\n" ]
[ [ "torch.nn.functional.embedding", "torch.mean", "torch.exp", "torch.no_grad", "torch.ones_like" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
terra-submersa/opensfm-camera-coverage
[ "a9ad2bff799a5d0d07d7900fc7d1bf10bc489632" ]
[ "src/odm_report_shot_coverage/models/reconstruction.py" ]
[ "import json\nimport logging\n\nimport geojson\nimport numpy as np\nfrom tqdm import tqdm\nfrom scipy import stats\n\nfrom odm_report_shot_coverage.models.camera import Camera, json_parse_camera\nfrom odm_report_shot_coverage.models.shot import Shot, shot_boundaries_from_points, Boundaries\nfrom odm_report_shot_coverage.models.wavefront_25d import Wavefront25D, parse_wavefront_25d_obj\n\n\nclass Reconstruction:\n cameras: 'dict[str, Camera]' = {}\n _shots: 'list[Shot]' = []\n mesh = Wavefront25D\n orthophoto_boundaries: Boundaries\n\n @property\n def shots(self) -> 'list[Shot]':\n self._shots.sort(key=lambda s: s.image_name)\n return self._shots\n\n def add_camera(self, name: str, camera: Camera):\n self.cameras[name] = camera\n\n def add_shot(self, shot: Shot):\n self._shots.append(shot)\n\n def to_json(self) -> dict:\n return {\n 'cameras': {n: c.to_json() for n, c in self.cameras.items()},\n 'shots': [s.to_json() for s in self.shots],\n # 'mesh': self.mesh.to_json(),\n 'boundaries': self.mesh.boundaries.to_json(),\n 'orthophotoBoundaries': self.orthophoto_boundaries.to_json(),\n }\n\n def compute_shot_boundaries(self):\n \"\"\"\n From shots and points, fill the shot_boundaries\n :rtype: None\n \"\"\"\n\n for shot in tqdm(self.shots, desc='Computing shot boundaries'):\n points = []\n for i, point in enumerate(self.mesh.points):\n pixel = shot.camera_pixel(point)\n if shot.camera.in_frame(pixel):\n points.append(point)\n shot.boundaries = shot_boundaries_from_points(points)\n\n def find_camera_by_width_height(self, width: int, height: int) -> Camera:\n cs = [c for c in self.cameras.values() if c.width == width and c.height == height]\n if len(cs) != 1:\n raise Exception('Not exactly one camera found with size %s x %s' % (width, height))\n return cs[0]\n\n\nclass ReconstructionCollection:\n reconstructions: 'list[Reconstruction]' = []\n\n def append(self, reconstruction: Reconstruction):\n self.reconstructions.append(reconstruction)\n\n def __getitem__(self, i: int):\n return self.reconstructions[i]\n\n def __len__(self):\n return len(self.reconstructions)\n\n\ndef lin_reg(pairs: 'list[(float, float)]') -> (float, float, float, float):\n x = [p[0] for p in pairs]\n y = [p[1] for p in pairs]\n return stats.linregress(x, y)\n\n\ndef _parse_point_cloud_boundaries(path: str) -> Boundaries:\n with open('%s/odm_report/stats.json' % path, 'r') as fd:\n stats_json = json.load(fd)\n bbox = stats_json['point_cloud_statistics']['stats']['bbox']['native']['bbox']\n return Boundaries(\n x_min=bbox['minx'],\n x_max=bbox['maxx'],\n y_min=bbox['miny'],\n y_max=bbox['maxy'],\n z_min=bbox['minz'],\n z_max=bbox['maxz'],\n )\n\n\ndef _parse_camera_shotgeojson(path: str, reconstruction: Reconstruction, native_to_25d_coordinates):\n with open('%s/cameras.json' % path, 'r') as fd:\n cameras_json = json.load(fd)\n for n, j in cameras_json.items():\n camera = json_parse_camera(n, j)\n reconstruction.add_camera(n, camera)\n\n (tr_x, tr_y, tr_z) = native_to_25d_coordinates\n with open('%s/odm_report/shots.geojson' % path, 'r') as fd:\n shots_geojson = geojson.load(fd)\n for feat in shots_geojson['features']:\n shot = Shot()\n props = feat['properties']\n shot.camera = reconstruction.find_camera_by_width_height(props['width'], props['height'])\n shot.image_name = props['filename']\n translation = props['translation']\n shot.translation = (tr_x(translation[0]), tr_y(translation[1]), tr_z(translation[2]))\n shot.rotation = props['rotation']\n reconstruction.add_shot(shot)\n\n\ndef _native_to_model_25d_coordinates(native_boundaries: Boundaries, model_25d_boundaries: Boundaries):\n width_25d = model_25d_boundaries.x_max - model_25d_boundaries.x_min\n height_25d = model_25d_boundaries.y_max - model_25d_boundaries.y_min\n elevation_25d = model_25d_boundaries.y_max - model_25d_boundaries.y_min\n width_native = native_boundaries.x_max - native_boundaries.x_min\n height_native = native_boundaries.y_max - native_boundaries.y_min\n elevation_native = native_boundaries.y_max - native_boundaries.y_min\n width_ratio = np.abs(1 - width_native / width_25d)\n height_ratio = np.abs(1 - height_native / height_25d)\n elevation_ratio = np.abs(1 - elevation_native / elevation_25d)\n logging.info(\n 'native/25d model boundaries discrepancies width=%.2f%% height=%.2f%% elevation=%.2f%%' % (\n width_ratio * 100, height_ratio * 100, elevation_ratio * 100))\n\n return (\n lambda x: (x - (native_boundaries.x_max + native_boundaries.x_min) / 2) + (\n model_25d_boundaries.x_max + model_25d_boundaries.x_min) / 2,\n lambda y: (y - (native_boundaries.y_max + native_boundaries.y_min) / 2) + (\n model_25d_boundaries.y_max + model_25d_boundaries.y_min) / 2,\n lambda z: (z - (native_boundaries.z_max + native_boundaries.z_min) / 2) + (\n model_25d_boundaries.z_max + model_25d_boundaries.z_min) / 2\n )\n\n\ndef parse_reconstruction(path: str) -> Reconstruction:\n reconstruction = Reconstruction()\n\n wf = parse_wavefront_25d_obj('%s/odm_texturing_25d/odm_textured_model_geo.obj' % path)\n reconstruction.mesh = wf\n reconstruction.orthophoto_boundaries = wf.boundaries\n\n native_boundaries = _parse_point_cloud_boundaries(path)\n _parse_camera_shotgeojson(path, reconstruction,\n _native_to_model_25d_coordinates(native_boundaries, wf.boundaries))\n\n return reconstruction\n" ]
[ [ "scipy.stats.linregress", "numpy.abs" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "0.16", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "1.3", "1.8" ], "tensorflow": [] } ]
SeongSuKim95/ReID-Baseline-swin
[ "f30db86eb2690c20c4fbb0189eb52b57358705df" ]
[ "demo.py" ]
[ "import argparse\nimport scipy.io\nimport torch\nimport numpy as np\nimport os\nfrom torchvision import datasets\nimport matplotlib\nmatplotlib.use('agg')\nimport matplotlib.pyplot as plt\n#######################################################################\n# Evaluate\nparser = argparse.ArgumentParser(description='Demo')\nparser.add_argument('--query_index', default=777, type=int, help='test_image_index')\nparser.add_argument('--test_dir',default='/mnt/hdd_data/Dataset/market1501_ss/pytorch',type=str, help='./test_data')\nopts = parser.parse_args()\n\ndata_dir = opts.test_dir\nimage_datasets = {x: datasets.ImageFolder( os.path.join(data_dir,x) ) for x in ['gallery','query']}\n\n#####################################################################\n#Show result\ndef imshow(path, title=None):\n \"\"\"Imshow for Tensor.\"\"\"\n im = plt.imread(path)\n plt.imshow(im)\n if title is not None:\n plt.title(title)\n plt.pause(0.001) # pause a bit so that plots are updated\n\n######################################################################\nresult = scipy.io.loadmat('pytorch_result.mat')\nquery_feature = torch.FloatTensor(result['query_f'])\nquery_cam = result['query_cam'][0]\nquery_label = result['query_label'][0]\ngallery_feature = torch.FloatTensor(result['gallery_f'])\ngallery_cam = result['gallery_cam'][0]\ngallery_label = result['gallery_label'][0]\n\nmulti = os.path.isfile('multi_query.mat')\n\nif multi:\n m_result = scipy.io.loadmat('multi_query.mat')\n mquery_feature = torch.FloatTensor(m_result['mquery_f'])\n mquery_cam = m_result['mquery_cam'][0]\n mquery_label = m_result['mquery_label'][0]\n mquery_feature = mquery_feature.cuda()\n\nquery_feature = query_feature.cuda()\ngallery_feature = gallery_feature.cuda()\n\n#######################################################################\n# sort the images\ndef sort_img(qf, ql, qc, gf, gl, gc):\n query = qf.view(-1,1)\n # print(query.shape)\n score = torch.mm(gf,query)\n score = score.squeeze(1).cpu()\n score = score.numpy()\n # predict index\n index = np.argsort(score) #from small to large\n index = index[::-1]\n # index = index[0:2000]\n # good index\n query_index = np.argwhere(gl==ql)\n #same camera\n camera_index = np.argwhere(gc==qc)\n\n #good_index = np.setdiff1d(query_index, camera_index, assume_unique=True)\n junk_index1 = np.argwhere(gl==-1)\n junk_index2 = np.intersect1d(query_index, camera_index)\n junk_index = np.append(junk_index2, junk_index1) \n\n mask = np.in1d(index, junk_index, invert=True)\n index = index[mask]\n return index\n\ni = opts.query_index\nindex = sort_img(query_feature[i],query_label[i],query_cam[i],gallery_feature,gallery_label,gallery_cam)\n\n########################################################################\n# Visualize the rank result\n\nquery_path, _ = image_datasets['query'].imgs[i]\nquery_label = query_label[i]\nprint(query_path)\nprint('Top 10 images are as follow:')\ntry: # Visualize Ranking Result \n # Graphical User Interface is needed\n fig = plt.figure(figsize=(16,4))\n ax = plt.subplot(1,11,1)\n ax.axis('off')\n imshow(query_path,'query')\n for i in range(10):\n ax = plt.subplot(1,11,i+2)\n ax.axis('off')\n img_path, _ = image_datasets['gallery'].imgs[index[i]]\n label = gallery_label[index[i]]\n imshow(img_path)\n if label == query_label:\n ax.set_title('%d'%(i+1), color='green')\n else:\n ax.set_title('%d'%(i+1), color='red')\n print(img_path)\nexcept RuntimeError:\n for i in range(10):\n img_path = image_datasets.imgs[index[i]]\n print(img_path[0])\n print('If you want to see the visualization of the ranking result, graphical user interface is needed.')\n\nfig.savefig(\"show.png\")\n" ]
[ [ "matplotlib.pyplot.imshow", "torch.mm", "matplotlib.pyplot.title", "matplotlib.use", "matplotlib.pyplot.imread", "numpy.in1d", "numpy.argwhere", "numpy.intersect1d", "numpy.append", "matplotlib.pyplot.subplot", "torch.FloatTensor", "numpy.argsort", "matplotlib.pyplot.pause", "matplotlib.pyplot.figure" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
erinaldi/MetaRL
[ "6dfb8d2e63a1802ca7ef9c28f6ab1a758d07f871" ]
[ "rlkit/envs/point_robot.py" ]
[ "import numpy as np\nfrom gym import spaces\nfrom gym import Env\n\nfrom . import register_env\n\n\n@register_env('point-robot')\nclass PointEnv(Env):\n \"\"\"\n point robot on a 2-D plane with position control\n tasks (aka goals) are positions on the plane\n\n - tasks sampled from unit square\n - reward is L2 distance\n \"\"\"\n\n def __init__(self, randomize_tasks=False, n_tasks=2):\n\n if randomize_tasks:\n np.random.seed(1337)\n goals = [[np.random.uniform(-1., 1.), np.random.uniform(-1., 1.)] for _ in range(n_tasks)]\n else:\n # some hand-coded goals for debugging\n goals = [np.array([10, -10]),\n np.array([10, 10]),\n np.array([-10, 10]),\n np.array([-10, -10]),\n np.array([0, 0]),\n\n np.array([7, 2]),\n np.array([0, 4]),\n np.array([-6, 9])\n ]\n goals = [g / 10. for g in goals]\n self.goals = goals\n\n self.reset_task(0)\n self.observation_space = spaces.Box(low=-np.inf, high=np.inf, shape=(2,))\n self.action_space = spaces.Box(low=-0.1, high=0.1, shape=(2,))\n\n def reset_task(self, idx):\n ''' reset goal AND reset the agent '''\n self._goal = self.goals[idx]\n self.reset()\n\n def get_all_task_idx(self):\n return range(len(self.goals))\n\n def reset_model(self):\n # reset to a random location on the unit square\n self._state = np.random.uniform(-1., 1., size=(2,))\n return self._get_obs()\n\n def reset(self):\n return self.reset_model()\n\n def _get_obs(self):\n return np.copy(self._state)\n\n def step(self, action):\n self._state = self._state + action\n x, y = self._state\n x -= self._goal[0]\n y -= self._goal[1]\n reward = - (x ** 2 + y ** 2) ** 0.5\n done = False\n ob = self._get_obs()\n return ob, reward, done, dict()\n\n def viewer_setup(self):\n print('no viewer')\n pass\n\n def render(self):\n print('current state:', self._state)\n\n\n@register_env('sparse-point-robot')\nclass SparsePointEnv(PointEnv):\n '''\n - tasks sampled from unit half-circle\n - reward is L2 distance given only within goal radius\n\n NOTE that `step()` returns the dense reward because this is used during meta-training\n the algorithm should call `sparsify_rewards()` to get the sparse rewards\n '''\n def __init__(self, randomize_tasks=False, n_tasks=2, goal_radius=0.2):\n super().__init__(randomize_tasks, n_tasks)\n self.goal_radius = goal_radius\n\n if randomize_tasks:\n np.random.seed(1337)\n radius = 1.0\n angles = np.linspace(0, np.pi, num=n_tasks)\n xs = radius * np.cos(angles)\n ys = radius * np.sin(angles)\n goals = np.stack([xs, ys], axis=1)\n np.random.shuffle(goals)\n goals = goals.tolist()\n\n self.goals = goals\n self.reset_task(0)\n\n def sparsify_rewards(self, r):\n ''' zero out rewards when outside the goal radius '''\n mask = (r >= -self.goal_radius).astype(np.float32)\n r = r * mask\n return r\n\n def reset_model(self):\n self._state = np.array([0, 0])\n return self._get_obs()\n\n def step(self, action):\n ob, reward, done, d = super().step(action)\n sparse_reward = self.sparsify_rewards(reward)\n # make sparse rewards positive\n if reward >= -self.goal_radius:\n sparse_reward += 1\n d.update({'sparse_reward': sparse_reward})\n return ob, reward, done, d\n" ]
[ [ "numpy.random.seed", "numpy.linspace", "numpy.cos", "numpy.stack", "numpy.random.shuffle", "numpy.sin", "numpy.copy", "numpy.random.uniform", "numpy.array" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
sejongjoa/openpilot_083
[ "301500dff6bd53e64257898cac939b24f56befac" ]
[ "selfdrive/locationd/locationd.py" ]
[ "#!/usr/bin/env python3\nimport json\nimport numpy as np\nimport sympy as sp\nimport cereal.messaging as messaging\nfrom cereal import log\nfrom common.params import Params\nimport common.transformations.coordinates as coord\nfrom common.transformations.orientation import ecef_euler_from_ned, \\\n euler_from_quat, \\\n ned_euler_from_ecef, \\\n quat_from_euler, euler_from_rot, \\\n rot_from_quat, rot_from_euler\nfrom rednose.helpers import KalmanError\nfrom selfdrive.locationd.models.live_kf import LiveKalman, States, ObservationKind\nfrom selfdrive.locationd.models.constants import GENERATED_DIR\nfrom selfdrive.swaglog import cloudlog\n\n#from datetime import datetime\n#from laika.gps_time import GPSTime\n\nfrom sympy.utilities.lambdify import lambdify\nfrom rednose.helpers.sympy_helpers import euler_rotate\n\nSensorSource = log.SensorEventData.SensorSource\n\n\nVISION_DECIMATION = 2\nSENSOR_DECIMATION = 10\nPOSENET_STD_HIST = 40\n\n\ndef to_float(arr):\n return [float(arr[0]), float(arr[1]), float(arr[2])]\n\n\ndef get_H():\n # this returns a function to eval the jacobian\n # of the observation function of the local vel\n roll = sp.Symbol('roll')\n pitch = sp.Symbol('pitch')\n yaw = sp.Symbol('yaw')\n vx = sp.Symbol('vx')\n vy = sp.Symbol('vy')\n vz = sp.Symbol('vz')\n\n h = euler_rotate(roll, pitch, yaw).T*(sp.Matrix([vx, vy, vz]))\n H = h.jacobian(sp.Matrix([roll, pitch, yaw, vx, vy, vz]))\n H_f = lambdify([roll, pitch, yaw, vx, vy, vz], H)\n return H_f\n\n\nclass Localizer():\n def __init__(self, disabled_logs=None, dog=None):\n if disabled_logs is None:\n disabled_logs = []\n\n self.kf = LiveKalman(GENERATED_DIR)\n self.reset_kalman()\n self.max_age = .1 # seconds\n self.disabled_logs = disabled_logs\n self.calib = np.zeros(3)\n self.device_from_calib = np.eye(3)\n self.calib_from_device = np.eye(3)\n self.calibrated = False\n self.H = get_H()\n\n self.posenet_invalid_count = 0\n self.posenet_speed = 0\n self.car_speed = 0\n self.posenet_stds = 10*np.ones((POSENET_STD_HIST))\n\n self.converter = coord.LocalCoord.from_ecef(self.kf.x[States.ECEF_POS])\n\n self.unix_timestamp_millis = 0\n self.last_gps_fix = 0\n self.device_fell = False\n\n @staticmethod\n def msg_from_state(converter, calib_from_device, H, predicted_state, predicted_cov, calibrated):\n predicted_std = np.sqrt(np.diagonal(predicted_cov))\n\n fix_ecef = predicted_state[States.ECEF_POS]\n fix_ecef_std = predicted_std[States.ECEF_POS_ERR]\n vel_ecef = predicted_state[States.ECEF_VELOCITY]\n vel_ecef_std = predicted_std[States.ECEF_VELOCITY_ERR]\n fix_pos_geo = coord.ecef2geodetic(fix_ecef)\n #fix_pos_geo_std = np.abs(coord.ecef2geodetic(fix_ecef + fix_ecef_std) - fix_pos_geo)\n orientation_ecef = euler_from_quat(predicted_state[States.ECEF_ORIENTATION])\n orientation_ecef_std = predicted_std[States.ECEF_ORIENTATION_ERR]\n device_from_ecef = rot_from_quat(predicted_state[States.ECEF_ORIENTATION]).T\n calibrated_orientation_ecef = euler_from_rot(calib_from_device.dot(device_from_ecef))\n\n acc_calib = calib_from_device.dot(predicted_state[States.ACCELERATION])\n acc_calib_std = np.sqrt(np.diagonal(calib_from_device.dot(\n predicted_cov[States.ACCELERATION_ERR, States.ACCELERATION_ERR]).dot(\n calib_from_device.T)))\n ang_vel_calib = calib_from_device.dot(predicted_state[States.ANGULAR_VELOCITY])\n ang_vel_calib_std = np.sqrt(np.diagonal(calib_from_device.dot(\n predicted_cov[States.ANGULAR_VELOCITY_ERR, States.ANGULAR_VELOCITY_ERR]).dot(\n calib_from_device.T)))\n\n vel_device = device_from_ecef.dot(vel_ecef)\n device_from_ecef_eul = euler_from_quat(predicted_state[States.ECEF_ORIENTATION]).T\n idxs = list(range(States.ECEF_ORIENTATION_ERR.start, States.ECEF_ORIENTATION_ERR.stop)) + \\\n list(range(States.ECEF_VELOCITY_ERR.start, States.ECEF_VELOCITY_ERR.stop))\n condensed_cov = predicted_cov[idxs][:, idxs]\n HH = H(*list(np.concatenate([device_from_ecef_eul, vel_ecef])))\n vel_device_cov = HH.dot(condensed_cov).dot(HH.T)\n vel_device_std = np.sqrt(np.diagonal(vel_device_cov))\n\n vel_calib = calib_from_device.dot(vel_device)\n vel_calib_std = np.sqrt(np.diagonal(calib_from_device.dot(\n vel_device_cov).dot(calib_from_device.T)))\n\n orientation_ned = ned_euler_from_ecef(fix_ecef, orientation_ecef)\n #orientation_ned_std = ned_euler_from_ecef(fix_ecef, orientation_ecef + orientation_ecef_std) - orientation_ned\n ned_vel = converter.ecef2ned(fix_ecef + vel_ecef) - converter.ecef2ned(fix_ecef)\n #ned_vel_std = self.converter.ecef2ned(fix_ecef + vel_ecef + vel_ecef_std) - self.converter.ecef2ned(fix_ecef + vel_ecef)\n\n fix = messaging.log.LiveLocationKalman.new_message()\n\n # write measurements to msg\n measurements = [\n # measurement field, value, std, valid\n (fix.positionGeodetic, fix_pos_geo, np.nan*np.zeros(3), True),\n (fix.positionECEF, fix_ecef, fix_ecef_std, True),\n (fix.velocityECEF, vel_ecef, vel_ecef_std, True),\n (fix.velocityNED, ned_vel, np.nan*np.zeros(3), True),\n (fix.velocityDevice, vel_device, vel_device_std, True),\n (fix.accelerationDevice, predicted_state[States.ACCELERATION], predicted_std[States.ACCELERATION_ERR], True),\n (fix.orientationECEF, orientation_ecef, orientation_ecef_std, True),\n (fix.calibratedOrientationECEF, calibrated_orientation_ecef, np.nan*np.zeros(3), calibrated),\n (fix.orientationNED, orientation_ned, np.nan*np.zeros(3), True),\n (fix.angularVelocityDevice, predicted_state[States.ANGULAR_VELOCITY], predicted_std[States.ANGULAR_VELOCITY_ERR], True),\n (fix.velocityCalibrated, vel_calib, vel_calib_std, calibrated),\n (fix.angularVelocityCalibrated, ang_vel_calib, ang_vel_calib_std, calibrated),\n (fix.accelerationCalibrated, acc_calib, acc_calib_std, calibrated),\n ]\n\n for field, value, std, valid in measurements:\n # TODO: can we write the lists faster?\n field.value = to_float(value)\n field.std = to_float(std)\n field.valid = valid\n\n return fix\n\n def liveLocationMsg(self):\n fix = self.msg_from_state(self.converter, self.calib_from_device, self.H, self.kf.x, self.kf.P, self.calibrated)\n # experimentally found these values, no false positives in 20k minutes of driving\n old_mean, new_mean = np.mean(self.posenet_stds[:POSENET_STD_HIST//2]), np.mean(self.posenet_stds[POSENET_STD_HIST//2:])\n std_spike = new_mean/old_mean > 4 and new_mean > 7\n\n fix.posenetOK = not (std_spike and self.car_speed > 5)\n fix.deviceStable = not self.device_fell\n self.device_fell = False\n\n #fix.gpsWeek = self.time.week\n #fix.gpsTimeOfWeek = self.time.tow\n fix.unixTimestampMillis = self.unix_timestamp_millis\n\n if np.linalg.norm(fix.positionECEF.std) < 50 and self.calibrated:\n fix.status = 'valid'\n elif np.linalg.norm(fix.positionECEF.std) < 50:\n fix.status = 'uncalibrated'\n else:\n fix.status = 'uninitialized'\n return fix\n\n def update_kalman(self, time, kind, meas, R=None):\n try:\n self.kf.predict_and_observe(time, kind, meas, R)\n except KalmanError:\n cloudlog.error(\"Error in predict and observe, kalman reset\")\n self.reset_kalman()\n\n def handle_gps(self, current_time, log):\n # ignore the message if the fix is invalid\n if log.flags % 2 == 0:\n return\n\n self.last_gps_fix = current_time\n\n self.converter = coord.LocalCoord.from_geodetic([log.latitude, log.longitude, log.altitude])\n ecef_pos = self.converter.ned2ecef([0, 0, 0])\n ecef_vel = self.converter.ned2ecef(np.array(log.vNED)) - ecef_pos\n ecef_pos_R = np.diag([(3*log.verticalAccuracy)**2]*3)\n ecef_vel_R = np.diag([(log.speedAccuracy)**2]*3)\n\n #self.time = GPSTime.from_datetime(datetime.utcfromtimestamp(log.timestamp*1e-3))\n self.unix_timestamp_millis = log.timestamp\n gps_est_error = np.sqrt((self.kf.x[0] - ecef_pos[0])**2 +\n (self.kf.x[1] - ecef_pos[1])**2 +\n (self.kf.x[2] - ecef_pos[2])**2)\n\n orientation_ecef = euler_from_quat(self.kf.x[States.ECEF_ORIENTATION])\n orientation_ned = ned_euler_from_ecef(ecef_pos, orientation_ecef)\n orientation_ned_gps = np.array([0, 0, np.radians(log.bearingDeg)])\n orientation_error = np.mod(orientation_ned - orientation_ned_gps - np.pi, 2*np.pi) - np.pi\n initial_pose_ecef_quat = quat_from_euler(ecef_euler_from_ned(ecef_pos, orientation_ned_gps))\n if np.linalg.norm(ecef_vel) > 5 and np.linalg.norm(orientation_error) > 1:\n cloudlog.error(\"Locationd vs ubloxLocation orientation difference too large, kalman reset\")\n self.reset_kalman(init_pos=ecef_pos, init_orient=initial_pose_ecef_quat)\n self.update_kalman(current_time, ObservationKind.ECEF_ORIENTATION_FROM_GPS, initial_pose_ecef_quat)\n elif gps_est_error > 50:\n cloudlog.error(\"Locationd vs ubloxLocation position difference too large, kalman reset\")\n self.reset_kalman(init_pos=ecef_pos, init_orient=initial_pose_ecef_quat)\n\n self.update_kalman(current_time, ObservationKind.ECEF_POS, ecef_pos, R=ecef_pos_R)\n self.update_kalman(current_time, ObservationKind.ECEF_VEL, ecef_vel, R=ecef_vel_R)\n\n def handle_car_state(self, current_time, log):\n self.speed_counter += 1\n\n if self.speed_counter % SENSOR_DECIMATION == 0:\n self.update_kalman(current_time, ObservationKind.ODOMETRIC_SPEED, [log.vEgo])\n self.car_speed = abs(log.vEgo)\n if log.vEgo == 0:\n self.update_kalman(current_time, ObservationKind.NO_ROT, [0, 0, 0])\n\n def handle_cam_odo(self, current_time, log):\n self.cam_counter += 1\n\n if self.cam_counter % VISION_DECIMATION == 0:\n rot_device = self.device_from_calib.dot(log.rot)\n rot_device_std = self.device_from_calib.dot(log.rotStd)\n self.update_kalman(current_time,\n ObservationKind.CAMERA_ODO_ROTATION,\n np.concatenate([rot_device, 10*rot_device_std]))\n trans_device = self.device_from_calib.dot(log.trans)\n trans_device_std = self.device_from_calib.dot(log.transStd)\n self.posenet_speed = np.linalg.norm(trans_device)\n self.posenet_stds[:-1] = self.posenet_stds[1:]\n self.posenet_stds[-1] = trans_device_std[0]\n self.update_kalman(current_time,\n ObservationKind.CAMERA_ODO_TRANSLATION,\n np.concatenate([trans_device, 10*trans_device_std]))\n\n def handle_sensors(self, current_time, log):\n # TODO does not yet account for double sensor readings in the log\n for sensor_reading in log:\n sensor_time = 1e-9 * sensor_reading.timestamp\n # TODO: handle messages from two IMUs at the same time\n if sensor_reading.source == SensorSource.lsm6ds3:\n continue\n\n # Gyro Uncalibrated\n if sensor_reading.sensor == 5 and sensor_reading.type == 16:\n self.gyro_counter += 1\n if self.gyro_counter % SENSOR_DECIMATION == 0:\n v = sensor_reading.gyroUncalibrated.v\n self.update_kalman(sensor_time, ObservationKind.PHONE_GYRO, [-v[2], -v[1], -v[0]])\n\n # Accelerometer\n if sensor_reading.sensor == 1 and sensor_reading.type == 1:\n # check if device fell, estimate 10 for g\n # 40m/s**2 is a good filter for falling detection, no false positives in 20k minutes of driving\n self.device_fell = self.device_fell or (np.linalg.norm(np.array(sensor_reading.acceleration.v) - np.array([10, 0, 0])) > 40)\n\n self.acc_counter += 1\n if self.acc_counter % SENSOR_DECIMATION == 0:\n v = sensor_reading.acceleration.v\n self.update_kalman(sensor_time, ObservationKind.PHONE_ACCEL, [-v[2], -v[1], -v[0]])\n\n def handle_live_calib(self, current_time, log):\n if len(log.rpyCalib):\n self.calib = log.rpyCalib\n self.device_from_calib = rot_from_euler(self.calib)\n self.calib_from_device = self.device_from_calib.T\n self.calibrated = log.calStatus == 1\n\n def reset_kalman(self, current_time=None, init_orient=None, init_pos=None):\n self.filter_time = current_time\n init_x = LiveKalman.initial_x.copy()\n # too nonlinear to init on completely wrong\n if init_orient is not None:\n init_x[3:7] = init_orient\n if init_pos is not None:\n init_x[:3] = init_pos\n self.kf.init_state(init_x, covs=np.diag(LiveKalman.initial_P_diag), filter_time=current_time)\n\n self.observation_buffer = []\n\n self.gyro_counter = 0\n self.acc_counter = 0\n self.speed_counter = 0\n self.cam_counter = 0\n\n\ndef locationd_thread(sm, pm, disabled_logs=None):\n if disabled_logs is None:\n disabled_logs = []\n\n if sm is None:\n socks = ['gpsLocationExternal', 'sensorEvents', 'cameraOdometry', 'liveCalibration', 'carState']\n sm = messaging.SubMaster(socks, ignore_alive=['gpsLocationExternal'])\n if pm is None:\n pm = messaging.PubMaster(['liveLocationKalman'])\n\n params = Params()\n localizer = Localizer(disabled_logs=disabled_logs)\n\n while True:\n sm.update()\n\n for sock, updated in sm.updated.items():\n if updated and sm.valid[sock]:\n t = sm.logMonoTime[sock] * 1e-9\n if sock == \"sensorEvents\":\n localizer.handle_sensors(t, sm[sock])\n elif sock == \"gpsLocationExternal\":\n localizer.handle_gps(t, sm[sock])\n elif sock == \"carState\":\n localizer.handle_car_state(t, sm[sock])\n elif sock == \"cameraOdometry\":\n localizer.handle_cam_odo(t, sm[sock])\n elif sock == \"liveCalibration\":\n localizer.handle_live_calib(t, sm[sock])\n\n if sm.updated['cameraOdometry']:\n t = sm.logMonoTime['cameraOdometry']\n msg = messaging.new_message('liveLocationKalman')\n msg.logMonoTime = t\n\n msg.liveLocationKalman = localizer.liveLocationMsg()\n msg.liveLocationKalman.inputsOK = sm.all_alive_and_valid()\n msg.liveLocationKalman.sensorsOK = sm.alive['sensorEvents'] and sm.valid['sensorEvents']\n\n gps_age = (t / 1e9) - localizer.last_gps_fix\n msg.liveLocationKalman.gpsOK = gps_age < 1.0\n pm.send('liveLocationKalman', msg)\n\n if sm.frame % 1200 == 0 and msg.liveLocationKalman.gpsOK: # once a minute\n location = {\n 'latitude': msg.liveLocationKalman.positionGeodetic.value[0],\n 'longitude': msg.liveLocationKalman.positionGeodetic.value[1],\n 'altitude': msg.liveLocationKalman.positionGeodetic.value[2],\n }\n params.put(\"LastGPSPosition\", json.dumps(location))\n\n\ndef main(sm=None, pm=None):\n locationd_thread(sm, pm)\n\n\nif __name__ == \"__main__\":\n import os\n os.environ[\"OMP_NUM_THREADS\"] = \"1\"\n main()\n" ]
[ [ "numpy.diag", "numpy.radians", "numpy.sqrt", "numpy.eye", "numpy.linalg.norm", "numpy.ones", "numpy.concatenate", "numpy.mean", "numpy.mod", "numpy.array", "numpy.zeros", "numpy.diagonal" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
redhat6/cornac
[ "856cf0f546a0dc6b46f407128d89ef2534994c60", "856cf0f546a0dc6b46f407128d89ef2534994c60" ]
[ "cornac/models/hft/recom_hft.py", "cornac/models/ncf/recom_gmf.py" ]
[ "# Copyright 2018 The Cornac Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ============================================================================\n\nimport numpy as np\n\nfrom ..recommender import Recommender\nfrom ...exception import ScoreException\n\n\nclass HFT(Recommender):\n \"\"\"Hidden Factors and Hidden Topics\n\n Parameters\n ----------\n name: string, default: 'HFT'\n The name of the recommender model.\n\n k: int, optional, default: 10\n The dimension of the latent factors.\n\n max_iter: int, optional, default: 50\n Maximum number of iterations for EM.\n\n grad_iter: int, optional, default: 50\n Maximum number of iterations for L-BFGS.\n\n lambda_text: float, default: 0.1\n Weight of corpus likelihood in objective function.\n\n l2_reg: float, default: 0.001\n Regularization for user item latent factors.\n\n vocab_size: int, optional, default: 8000\n Size of vocabulary for review text.\n\n init_params: dictionary, optional, default: None\n List of initial parameters, e.g., init_params = {'alpha': alpha, 'beta_u': beta_u,\n 'beta_i': beta_i, 'gamma_u': gamma_u, 'gamma_v': gamma_v}\n\n alpha: float\n Model offset, optional initialization via init_params.\n\n beta_u: ndarray. shape (n_user, 1)\n User biases, optional initialization via init_params.\n\n beta_u: ndarray. shape (n_item, 1)\n Item biases, optional initialization via init_params.\n\n gamma_u: ndarray, shape (n_users,k)\n The user latent factors, optional initialization via init_params.\n\n gamma_v: ndarray, shape (n_items,k)\n The item latent factors, optional initialization via init_params.\n\n trainable: boolean, optional, default: True\n When False, the model will not be re-trained, and input of pre-trained parameters are required.\n\n verbose: boolean, optional, default: True\n When True, some running logs are displayed.\n \n seed: int, optional, default: None\n Random seed for weight initialization.\n\n References\n ----------\n Julian McAuley, Jure Leskovec. \"Hidden Factors and Hidden Topics: Understanding Rating Dimensions with Review Text\"\n RecSys '13 Proceedings of the 7th ACM conference on Recommender systems Pages 165-172\n \"\"\"\n\n def __init__(self, name='HFT', k=10, max_iter=50, grad_iter=50, \n lambda_text=0.1, l2_reg=0.001, vocab_size=8000,\n init_params=None, trainable=True, verbose=True, seed=None):\n super().__init__(name=name, trainable=trainable, verbose=verbose)\n \n self.k = k\n self.lambda_text = lambda_text\n self.l2_reg = l2_reg\n self.grad_iter = grad_iter\n self.name = name\n self.max_iter = max_iter\n self.verbose = verbose\n self.init_params = {} if not init_params else init_params\n self.seed = seed\n self.vocab_size = vocab_size\n\n def fit(self, train_set, val_set=None):\n \"\"\"Fit the model to observations.\n\n Parameters\n ----------\n train_set: :obj:`cornac.data.Dataset`, required\n User-Item preference data as well as additional modalities.\n\n val_set: :obj:`cornac.data.Dataset`, optional, default: None\n User-Item preference data for model selection purposes (e.g., early stopping).\n\n Returns\n -------\n self : object\n \"\"\"\n Recommender.fit(self, train_set, val_set)\n from ...utils.init_utils import normal\n\n self.n_item = self.train_set.num_items\n self.n_user = self.train_set.num_users\n\n self.alpha = self.init_params.get('alpha', train_set.global_mean)\n self.beta_u = self.init_params.get('beta_u', normal(self.n_user, std=0.01, random_state=self.seed))\n self.beta_i = self.init_params.get('beta_i', normal(self.n_item, std=0.01, random_state=self.seed))\n self.gamma_u = self.init_params.get('gamma_u', normal((self.n_user, self.k), std=0.01, random_state=self.seed))\n self.gamma_i = self.init_params.get('gamma_i', normal((self.n_item, self.k), std=0.01, random_state=self.seed))\n\n if self.trainable:\n self._fit_hft()\n\n return self\n\n @staticmethod\n def _build_data(csr_mat):\n index_list = []\n rating_list = []\n for i in range(csr_mat.shape[0]):\n j, k = csr_mat.indptr[i], csr_mat.indptr[i + 1]\n index_list.append(csr_mat.indices[j:k])\n rating_list.append(csr_mat.data[j:k])\n return index_list, rating_list\n\n def _fit_hft(self):\n from .hft import Model\n from tqdm import trange\n\n # document data\n bow_mat = self.train_set.item_text.batch_bow(np.arange(self.n_item), keep_sparse=True)\n documents, _ = self._build_data(bow_mat) # bag of word feature\n # Rating data\n user_data = self._build_data(self.train_set.matrix)\n item_data = self._build_data(self.train_set.matrix.T.tocsr())\n\n model = Model(n_user=self.n_user, n_item=self.n_item, alpha=self.alpha, beta_u=self.beta_u, beta_i=self.beta_i,\n gamma_u=self.gamma_u, gamma_i=self.gamma_i, n_vocab=self.vocab_size, k=self.k,\n lambda_text=self.lambda_text, l2_reg=self.l2_reg, grad_iter=self.grad_iter)\n\n model.init_count(docs=documents)\n\n # training\n loop = trange(self.max_iter, disable=not self.verbose)\n for _ in loop:\n model.assign_word_topics(docs=documents)\n loss = model.update_params(rating_data=(user_data, item_data))\n loop.set_postfix(loss=loss)\n\n self.alpha, self.beta_u, self.beta_i, self.gamma_u, self.gamma_i = model.get_parameter()\n\n if self.verbose:\n print('Learning completed!')\n\n def score(self, user_idx, item_idx=None):\n \"\"\"Predict the scores/ratings of a user for an item.\n\n Parameters\n ----------\n user_idx: int, required\n The index of the user for whom to perform score prediction.\n\n item_idx: int, optional, default: None\n The index of the item for that to perform score prediction.\n If None, scores for all known items will be returned.\n\n Returns\n -------\n res : A scalar or a Numpy array\n Relative scores that the user gives to the item or to all known items\n \"\"\"\n if item_idx is None:\n if self.train_set.is_unk_user(user_idx):\n raise ScoreException(\"Can't make score prediction for (user_id=%d)\" % user_idx)\n\n known_item_scores = self.alpha + self.beta_u[user_idx] + self.beta_i + self.gamma_i.dot(\n self.gamma_u[user_idx, :])\n return known_item_scores\n else:\n if self.train_set.is_unk_user(user_idx) or self.train_set.is_unk_item(item_idx):\n raise ScoreException(\"Can't make score prediction for (user_id=%d, item_id=%d)\" % (user_idx, item_idx))\n\n user_pred = self.alpha + self.beta_u[user_idx] + self.beta_i[item_idx] + self.gamma_i[item_idx, :].dot(\n self.gamma_u[user_idx, :])\n\n return user_pred\n", "# Copyright 2018 The Cornac Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ============================================================================\n\nimport numpy as np\n\nfrom ..recommender import Recommender\nfrom ...exception import ScoreException\n\n\nclass GMF(Recommender):\n \"\"\"Generalized Matrix Factorization.\n\n Parameters\n ----------\n num_factors: int, optional, default: 8\n Embedding size of MF model.\n\n regs: float, optional, default: 0.\n Regularization for user and item embeddings.\n\n num_epochs: int, optional, default: 20\n Number of epochs.\n\n batch_size: int, optional, default: 256\n Batch size.\n\n num_neg: int, optional, default: 4\n Number of negative instances to pair with a positive instance.\n\n lr: float, optional, default: 0.001\n Learning rate.\n\n learner: str, optional, default: 'adam'\n Specify an optimizer: adagrad, adam, rmsprop, sgd\n\n early_stopping: {min_delta: float, patience: int}, optional, default: None\n If `None`, no early stopping. Meaning of the arguments: \n \n - `min_delta`: the minimum increase in monitored value on validation set to be considered as improvement, \\\n i.e. an increment of less than min_delta will count as no improvement.\n - `patience`: number of epochs with no improvement after which training should be stopped.\n\n name: string, optional, default: 'GMF'\n Name of the recommender model.\n\n trainable: boolean, optional, default: True\n When False, the model is not trained and Cornac assumes that the model is already \\\n pre-trained.\n\n verbose: boolean, optional, default: False\n When True, some running logs are displayed.\n\n seed: int, optional, default: None\n Random seed for parameters initialization.\n\n References\n ----------\n * He, X., Liao, L., Zhang, H., Nie, L., Hu, X., & Chua, T. S. (2017, April). Neural collaborative filtering. \\\n In Proceedings of the 26th international conference on world wide web (pp. 173-182).\n \"\"\"\n\n def __init__(self, name='GMF',\n num_factors=8, regs=(0., 0.), num_epochs=20, batch_size=256, num_neg=4,\n lr=0.001, learner='adam', early_stopping=None, trainable=True, verbose=True, seed=None):\n super().__init__(name=name, trainable=trainable, verbose=verbose)\n\n self.num_factors = num_factors\n self.regs = regs\n self.num_epochs = num_epochs\n self.batch_size = batch_size\n self.num_neg = num_neg\n self.learning_rate = lr\n self.learner = learner\n self.early_stopping = early_stopping\n self.seed = seed\n\n def fit(self, train_set, val_set=None):\n \"\"\"Fit the model to observations.\n\n Parameters\n ----------\n train_set: :obj:`cornac.data.Dataset`, required\n User-Item preference data as well as additional modalities.\n\n val_set: :obj:`cornac.data.Dataset`, optional, default: None\n User-Item preference data for model selection purposes (e.g., early stopping).\n\n Returns\n -------\n self : object\n \"\"\"\n Recommender.fit(self, train_set, val_set)\n\n if self.trainable:\n self._fit_gmf()\n\n return self\n\n def _fit_gmf(self):\n import os\n import tensorflow as tf\n from tqdm import trange\n from .ops import gmf, loss_fn, train_fn\n\n np.random.seed(self.seed)\n os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\n tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)\n\n graph = tf.Graph()\n with graph.as_default():\n tf.set_random_seed(self.seed)\n\n self.user_id = tf.placeholder(shape=[None, ], dtype=tf.int32, name='user_id')\n self.item_id = tf.placeholder(shape=[None, ], dtype=tf.int32, name='item_id')\n self.labels = tf.placeholder(shape=[None, 1], dtype=tf.float32, name='labels')\n\n self.interaction = gmf(uid=self.user_id, iid=self.item_id, num_users=self.train_set.num_users,\n num_items=self.train_set.num_items, emb_size=self.num_factors,\n reg_user=self.regs[0], reg_item=self.regs[1], seed=self.seed)\n\n logits = tf.layers.dense(self.interaction, units=1, name='logits',\n kernel_initializer=tf.initializers.lecun_uniform(self.seed))\n self.prediction = tf.nn.sigmoid(logits)\n\n self.loss = loss_fn(labels=self.labels, logits=logits)\n train_op = train_fn(self.loss, learning_rate=self.learning_rate, learner=self.learner)\n\n initializer = tf.global_variables_initializer()\n\n config = tf.ConfigProto()\n config.gpu_options.allow_growth = True\n self.sess = tf.Session(graph=graph, config=config)\n self.sess.run(initializer)\n\n loop = trange(self.num_epochs, disable=not self.verbose)\n for _ in loop:\n count = 0\n sum_loss = 0\n for i, (batch_users, batch_items, batch_ratings) in enumerate(\n self.train_set.uir_iter(self.batch_size, shuffle=True, binary=True, num_zeros=self.num_neg)):\n _, _loss = self.sess.run([train_op, self.loss],\n feed_dict={\n self.user_id: batch_users,\n self.item_id: batch_items,\n self.labels: batch_ratings.reshape(-1, 1)\n })\n\n count += len(batch_ratings)\n sum_loss += _loss * len(batch_ratings)\n if i % 10 == 0:\n loop.set_postfix(loss=(sum_loss / count))\n\n if self.early_stopping is not None and self.early_stop(**self.early_stopping):\n break\n loop.close()\n\n def monitor_value(self):\n \"\"\"Calculating monitored value used for early stopping on validation set (`val_set`).\n This function will be called by `early_stop()` function.\n\n Returns\n -------\n res : float\n Monitored value on validation set.\n Return `None` if `val_set` is `None`.\n \"\"\"\n if self.val_set is None:\n return None\n\n from .ops import ndcg\n\n return ndcg(self, self.train_set, self.val_set)\n\n def score(self, user_idx, item_idx=None):\n \"\"\"Predict the scores/ratings of a user for an item.\n\n Parameters\n ----------\n user_idx: int, required\n The index of the user for whom to perform score prediction.\n\n item_idx: int, optional, default: None\n The index of the item for that to perform score prediction.\n If None, scores for all known items will be returned.\n\n Returns\n -------\n res : A scalar or a Numpy array\n Relative scores that the user gives to the item or to all known items\n \"\"\"\n if item_idx is None:\n if self.train_set.is_unk_user(user_idx):\n raise ScoreException(\"Can't make score prediction for (user_id=%d)\" % user_idx)\n\n known_item_scores = self.sess.run(self.prediction, feed_dict={\n self.user_id: [user_idx], self.item_id: np.arange(self.train_set.num_items)\n })\n return known_item_scores.ravel()\n else:\n if self.train_set.is_unk_user(user_idx) or self.train_set.is_unk_item(item_idx):\n raise ScoreException(\"Can't make score prediction for (user_id=%d, item_id=%d)\" % (user_idx, item_idx))\n\n user_pred = self.sess.run(self.prediction, feed_dict={\n self.user_id: [user_idx], self.item_id: [item_idx]\n })\n return user_pred.ravel()\n" ]
[ [ "numpy.arange" ], [ "tensorflow.Graph", "tensorflow.nn.sigmoid", "numpy.random.seed", "numpy.arange", "tensorflow.placeholder", "tensorflow.compat.v1.logging.set_verbosity", "tensorflow.ConfigProto", "tensorflow.global_variables_initializer", "tensorflow.initializers.lecun_uniform", "tensorflow.Session", "tensorflow.set_random_seed" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10", "1.12", "1.4", "1.13", "1.5", "1.7", "0.12", "1.0", "1.2" ] } ]
ryanloney/openvino-1
[ "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7", "4e0a740eb3ee31062ba0df88fcf438564f67edb7" ]
[ "tools/mo/unit_tests/mo/load/loader_test.py", "tools/mo/unit_tests/mo/ops/dft_signal_size_canonicalization_test.py", "tools/mo/unit_tests/mo/middle/InterpolateSequenceToInterpolate_test.py", "tools/mo/openvino/tools/mo/ops/constant_fill.py", "tools/mo/openvino/tools/mo/ops/prelu.py", "tools/mo/openvino/tools/mo/front/kaldi/extractors/tdnncomponent_ext.py", "tools/mo/unit_tests/mo/ops/priorbox_test.py", "tools/mo/openvino/tools/mo/ops/tile.py", "tools/mo/openvino/tools/mo/front/tf/common.py", "src/core/tests/frontend/paddle/test_models/gen_scripts/generate_exp.py", "tools/mo/unit_tests/mo/front/tf/pad_tf_to_pad_test.py", "tools/mo/openvino/tools/mo/middle/quantize_dequantize_linear_resolver.py", "src/bindings/python/tests_compatibility/test_inference_engine/test_InferRequest.py", "tools/mo/openvino/tools/mo/front/image_scaler.py", "tools/mo/unit_tests/mo/front/common/partial_infer/roipooling_test.py", "tools/mo/unit_tests/mo/front/onnx/activation_ext_test.py", "tools/mo/unit_tests/mo/front/mxnet/gluoncv_ssd_anchors_test.py", "tools/mo/openvino/tools/mo/middle/GatherNdNormalizer.py", "tools/mo/unit_tests/mo/front/onnx/priorbox_clustered_ext_test.py", "tools/mo/unit_tests/mo/back/add_outputs_recursive_test.py", "src/core/tests/frontend/paddle/test_models/gen_scripts/generate_unsupported_relu.py" ]
[ "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport unittest\n\nimport numpy as np\n\nfrom openvino.tools.mo.load.tf.loader import graph_or_sub_graph_has_nhwc_ops\nfrom unit_tests.utils.graph import build_graph, result, regular_op, const, connect_front\n\n\nclass TFLoaderTest(unittest.TestCase):\n @staticmethod\n def build_conv_graph():\n nodes = {\n **const('weights', np.random.randn(1, 1, 1, 1)),\n **regular_op('input', {'op': 'Parameter'}),\n **regular_op('conv', {'op': 'Conv2D', 'layout': 'NHWC'}),\n **result('result'),\n }\n edges = [*connect_front('input', '0:conv'),\n *connect_front('weights', '1:conv'),\n *connect_front('conv:0', 'result'),\n ]\n graph = build_graph(nodes, edges)\n\n graph.stage = 'front'\n return graph\n\n @staticmethod\n def build_parameter_result_graph():\n nodes = {\n **regular_op('input', {'op': 'Parameter'}),\n **result('result'),\n }\n edges = [*connect_front('input', '0:result'),\n ]\n graph = build_graph(nodes, edges)\n graph.stage = 'front'\n return graph\n\n @staticmethod\n def build_loop_graph(body_graph):\n # create fake Loop operation\n nodes = {\n **regular_op('input', {'op': 'Parameter'}),\n **regular_op('loop', {'op': 'Loop', 'body': body_graph, 'sub_graphs': ['body']}),\n **result('result'),\n }\n edges = [*connect_front('input', '0:loop'),\n *connect_front('loop:0', 'result'),\n ]\n graph = build_graph(nodes, edges)\n graph.stage = 'front'\n return graph\n\n def test_convolution_main_graph(self):\n self.assertTrue(graph_or_sub_graph_has_nhwc_ops(self.build_conv_graph()))\n\n def test_convolution_loop_body_graph(self):\n self.assertTrue(graph_or_sub_graph_has_nhwc_ops(self.build_loop_graph(self.build_conv_graph())))\n\n def test_no_convolution_main_graph(self):\n self.assertFalse(graph_or_sub_graph_has_nhwc_ops(self.build_parameter_result_graph()))\n\n def test_no_convolution_main_and_sub_graph(self):\n self.assertFalse(graph_or_sub_graph_has_nhwc_ops(self.build_loop_graph(self.build_parameter_result_graph())))\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport unittest\n\nimport numpy as np\nfrom generator import generator, generate\n\nfrom openvino.tools.mo.ops.dft import FFTBase\nfrom openvino.tools.mo.front.common.partial_infer.utils import int64_array\n\n\n@generator\nclass DFTSignalSizeCanonicalizationTest(unittest.TestCase):\n @generate(*[\n (int64_array([-1, 77]), int64_array([1, 2]), int64_array([2, 180, 180, 2]), int64_array([180, 77])),\n (int64_array([390, 87]), int64_array([2, 0]), int64_array([2, 180, 180, 2]), int64_array([390, 87])),\n (int64_array([600, -1, 40]),\n int64_array([3, 0, 1]),\n int64_array([7, 50, 130, 400, 2]),\n int64_array([600, 7, 40])),\n (int64_array([-1, 16, -1]),\n int64_array([3, 0, 2]),\n int64_array([7, 50, 130, 400, 2]),\n int64_array([400, 16, 130])),\n (int64_array([16, -1, -1]),\n int64_array([3, 0, 2]),\n int64_array([7, 50, 130, 400, 2]),\n int64_array([16, 7, 130])),\n (int64_array([-1, -1, 16]),\n int64_array([3, 0, 2]),\n int64_array([7, 50, 130, 400, 2]),\n int64_array([400, 7, 16])),\n (int64_array([-1, -1, -1]),\n int64_array([3, 0, 2]),\n int64_array([7, 50, 130, 400, 2]),\n int64_array([400, 7, 130])),\n ])\n def test_canonicalization(self, signal_size, axes, input_shape, expected_result):\n canonicalized_signal_size = FFTBase.canonicalize_signal_size(signal_size, axes, input_shape)\n self.assertTrue(np.array_equal(canonicalized_signal_size, expected_result))\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport numpy as np\nimport unittest\n\nfrom openvino.tools.mo.middle.InterpolateSequenceToInterpolate import InterpolateSequenceToInterpolate\nfrom openvino.tools.mo.front.common.partial_infer.utils import int64_array\nfrom openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs\nfrom unit_tests.utils.graph import build_graph\n\ngraph_node_attrs_for_2d_case_1_opset4_case = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'size_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([660])\n },\n 'size_1_data': {'value': int64_array([660]), 'shape': [1], 'kind': 'data'},\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([3.0])\n },\n 'scale_1_data': {'value': np.array([3.0]), 'shape': [1], 'kind': 'data'},\n 'axes_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2])\n },\n 'axes_1_data': {'value': int64_array([2]), 'shape': [1], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'nearest',\n 'shape_calculation_mode': 'scales',\n 'version': 'opset4'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 4, 660, 350]), 'kind': 'data'},\n 'size_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([700])\n },\n 'size_2_data': {'value': int64_array([700]), 'shape': [1], 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([2.0])\n },\n 'scale_2_data': {'value': np.array([2.0]), 'shape': [1], 'kind': 'data'},\n 'axes_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([3])\n },\n 'axes_2_data': {'value': int64_array([3]), 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'nearest',\n 'shape_calculation_mode': 'scales',\n 'version': 'opset4'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([1, 4, 660, 700]), 'kind': 'data'},\n 'size_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([1320])\n },\n 'size_3_data': {'value': int64_array([1320]), 'shape': [1], 'kind': 'data'},\n 'scale_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([2.0])\n },\n 'scale_3_data': {'value': np.array([2.0]), 'shape': [1], 'kind': 'data'},\n 'axes_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2])\n },\n 'axes_3_data': {'value': int64_array([2]), 'shape': [1], 'kind': 'data'},\n 'interpolate_3': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'nearest',\n 'shape_calculation_mode': 'scales',\n 'version': 'opset4'\n },\n 'interpolate_3_data': {'value': None, 'shape': int64_array([1, 4, 1320, 700]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 1320, 700]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nedges_for_2d_case_1_opset4_case = [\n ('placeholder', 'placeholder_data'),\n\n ('placeholder_data', 'interpolate_1', {'in': 0}),\n ('size_1', 'size_1_data'),\n ('scale_1', 'scale_1_data'),\n ('axes_1', 'axes_1_data'),\n ('size_1_data', 'interpolate_1', {'in': 1}),\n ('scale_1_data', 'interpolate_1', {'in': 2}),\n ('axes_1_data', 'interpolate_1', {'in': 3}),\n ('interpolate_1', 'interpolate_1_data'),\n\n ('interpolate_1_data', 'interpolate_2', {'in': 0}),\n ('size_2', 'size_2_data'),\n ('scale_2', 'scale_2_data'),\n ('axes_2', 'axes_2_data'),\n ('size_2_data', 'interpolate_2', {'in': 1}),\n ('scale_2_data', 'interpolate_2', {'in': 2}),\n ('axes_2_data', 'interpolate_2', {'in': 3}),\n ('interpolate_2', 'interpolate_2_data'),\n\n ('interpolate_2_data', 'interpolate_3', {'in': 0}),\n ('size_3', 'size_3_data'),\n ('scale_3', 'scale_3_data'),\n ('axes_3', 'axes_3_data'),\n ('size_3_data', 'interpolate_3', {'in': 1}),\n ('scale_3_data', 'interpolate_3', {'in': 2}),\n ('axes_3_data', 'interpolate_3', {'in': 3}),\n ('interpolate_3', 'interpolate_3_data'),\n\n ('interpolate_3_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n]\n\n\nref_graph_node_attrs_for_2d_case_1_opset4_case = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'size_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([660, 700])\n },\n 'size_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([3.0, 2.0])\n },\n 'scale_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'axes_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2, 3])\n },\n 'axes_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'nearest',\n 'shape_calculation_mode': 'scales',\n 'antialias': 0,\n 'pads_begin': int64_array([0]),\n 'pads_end': int64_array([0]),\n 'coordinate_transformation_mode': 'half_pixel',\n 'nearest_mode': 'round_prefer_floor',\n 'cube_coeff': -0.75,\n 'version': 'opset4'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 4, 660, 700]), 'kind': 'data'},\n 'size_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([1320])\n },\n 'size_3_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'scale_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([2.0])\n },\n 'scale_3_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'axes_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2])\n },\n 'axes_3_data': {'value': int64_array([2]), 'shape': [1], 'kind': 'data'},\n 'interpolate_3': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'nearest',\n 'shape_calculation_mode': 'scales',\n 'version': 'opset4'\n },\n 'interpolate_3_data': {'value': None, 'shape': int64_array([1, 4, 1320, 700]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 1320, 700]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nref_edges_for_2d_case_1_opset4_case = [\n ('placeholder', 'placeholder_data'),\n\n ('placeholder_data', 'interpolate_1', {'in': 0}),\n ('size_1', 'size_1_data'),\n ('scale_1', 'scale_1_data'),\n ('axes_1', 'axes_1_data'),\n ('size_1_data', 'interpolate_1', {'in': 1}),\n ('scale_1_data', 'interpolate_1', {'in': 2}),\n ('axes_1_data', 'interpolate_1', {'in': 3}),\n ('interpolate_1', 'interpolate_1_data'),\n\n ('interpolate_1_data', 'interpolate_3', {'in': 0}),\n ('size_3', 'size_3_data'),\n ('scale_3', 'scale_3_data'),\n ('axes_3', 'axes_3_data'),\n ('size_3_data', 'interpolate_3', {'in': 1}),\n ('scale_3_data', 'interpolate_3', {'in': 2}),\n ('axes_3_data', 'interpolate_3', {'in': 3}),\n ('interpolate_3', 'interpolate_3_data'),\n\n ('interpolate_3_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n]\n\n\ngraph_node_attrs_for_2d_case_1 = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([660])\n },\n 'scale_1_data': {'value': int64_array([660]), 'shape': [1], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 4, 660, 350]), 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([700])\n },\n 'scale_2_data': {'value': int64_array([700]), 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([3]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([1, 4, 660, 700]), 'kind': 'data'},\n 'scale_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([1320])\n },\n 'scale_3_data': {'value': int64_array([1320]), 'shape': [1], 'kind': 'data'},\n 'interpolate_3': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_3_data': {'value': None, 'shape': int64_array([1, 4, 1320, 700]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 1320, 700]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nedges_for_2d_case_1 = [\n ('placeholder', 'placeholder_data'),\n\n ('placeholder_data', 'interpolate_1', {'in': 0}),\n ('scale_1', 'scale_1_data'),\n ('scale_1_data', 'interpolate_1', {'in': 1}),\n ('interpolate_1', 'interpolate_1_data'),\n\n ('interpolate_1_data', 'interpolate_2', {'in': 0}),\n ('scale_2', 'scale_2_data'),\n ('scale_2_data', 'interpolate_2', {'in': 1}),\n ('interpolate_2', 'interpolate_2_data'),\n\n ('interpolate_2_data', 'interpolate_3', {'in': 0}),\n ('scale_3', 'scale_3_data'),\n ('scale_3_data', 'interpolate_3', {'in': 1}),\n ('interpolate_3', 'interpolate_3_data'),\n\n ('interpolate_3_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n]\n\n\ngraph_node_attrs_for_2d_case_2 = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([660])\n },\n 'scale_1_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 4, 660, 350]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 660, 350]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nedges_for_2d_case_2 = [\n ('placeholder', 'placeholder_data'),\n\n ('placeholder_data', 'interpolate_1', {'in': 0}),\n ('scale_1', 'scale_1_data'),\n ('scale_1_data', 'interpolate_1', {'in': 1}),\n ('interpolate_1', 'interpolate_1_data'),\n\n ('interpolate_1_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n]\n\n\ngraph_node_attrs_for_2d_case_3 = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([660])\n },\n 'scale_1_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 4, 660, 350]), 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([700])\n },\n 'scale_2_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([3]),\n 'mode': 'linear',\n 'version': 'opset1'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([1, 4, 660, 700]), 'kind': 'data'},\n 'scale_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([1320])\n },\n 'scale_3_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'interpolate_3': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2]),\n 'mode': 'cubic',\n 'version': 'opset1'\n },\n 'interpolate_3_data': {'value': None, 'shape': int64_array([1, 4, 1320, 700]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 1320, 700]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nedges_for_2d_case_3 = edges_for_2d_case_1\n\n\nnew_graph_node_attrs_for_2d_case_4_opset4_case = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'size_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2200])\n },\n 'size_1_data': {'value': int64_array([2200]), 'shape': [1], 'kind': 'data'},\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([10.0])\n },\n 'scale_1_data': {'value': np.array([10.0]), 'shape': [1], 'kind': 'data'},\n 'axes_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2])\n },\n 'axes_1_data': {'value': int64_array([2]), 'shape': [1], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'linear',\n 'coordinate_transformation_mode': 'asymmetric',\n 'nearest_mode': 'simple',\n 'cube_coeff': -0.4,\n 'antialias': 1,\n 'shape_calculation_mode': 'scales',\n 'version': 'opset4'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 4, 2200, 350]), 'kind': 'data'},\n 'size_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([700])\n },\n 'size_2_data': {'value': int64_array([700]), 'shape': [1], 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([2.0])\n },\n 'scale_2_data': {'value': np.array([2.0]), 'shape': [1], 'kind': 'data'},\n 'axes_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([3])\n },\n 'axes_2_data': {'value': int64_array([3]), 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'linear',\n 'coordinate_transformation_mode': 'asymmetric',\n 'nearest_mode': 'simple',\n 'cube_coeff': -0.4,\n 'antialias': 1,\n 'shape_calculation_mode': 'scales',\n 'version': 'opset4'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([1, 4, 2200, 700]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 2200, 700]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nnew_edges_for_2d_case_4_opset4_case = [\n ('placeholder', 'placeholder_data'),\n\n ('placeholder_data', 'interpolate_1', {'in': 0}),\n ('size_1', 'size_1_data'),\n ('size_1_data', 'interpolate_1', {'in': 1}),\n ('scale_1', 'scale_1_data'),\n ('scale_1_data', 'interpolate_1', {'in': 2}),\n ('axes_1', 'axes_1_data'),\n ('axes_1_data', 'interpolate_1', {'in': 3}),\n ('interpolate_1', 'interpolate_1_data'),\n\n ('interpolate_1_data', 'interpolate_2', {'in': 0}),\n ('size_2', 'size_2_data'),\n ('size_2_data', 'interpolate_2', {'in': 1}),\n ('scale_2', 'scale_2_data'),\n ('scale_2_data', 'interpolate_2', {'in': 2}),\n ('axes_2', 'axes_2_data'),\n ('axes_2_data', 'interpolate_2', {'in': 3}),\n ('interpolate_2', 'interpolate_2_data'),\n\n ('interpolate_2_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n]\n\n\nnew_ref_graph_node_attrs_for_2d_case_4_opset4_case = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'size_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2200, 700])\n },\n 'size_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([10.0, 2.0])\n },\n 'scale_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'axes_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2, 3])\n },\n 'axes_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'linear',\n 'coordinate_transformation_mode': 'asymmetric',\n 'nearest_mode': 'simple',\n 'cube_coeff': -0.4,\n 'antialias': 1,\n 'shape_calculation_mode': 'scales',\n 'version': 'opset4'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 4, 2200, 700]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 2200, 700]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nnew_ref_edges_for_2d_case_4_opset4_case = [\n ('placeholder', 'placeholder_data'),\n\n ('placeholder_data', 'interpolate_1', {'in': 0}),\n ('size_1', 'size_1_data'),\n ('size_1_data', 'interpolate_1', {'in': 1}),\n ('scale_1', 'scale_1_data'),\n ('scale_1_data', 'interpolate_1', {'in': 2}),\n ('axes_1', 'axes_1_data'),\n ('axes_1_data', 'interpolate_1', {'in': 3}),\n ('interpolate_1', 'interpolate_1_data'),\n\n ('interpolate_1_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n]\n\n\ngraph_node_attrs_for_2d_case_4_opset4_case = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2200])\n },\n 'scale_1_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'axes_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2])\n },\n 'axes_1_data': {'value': int64_array([2]), 'shape': [1], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'linear',\n 'coordinate_transformation_mode': 'asymmetric',\n 'nearest_mode': 'simple',\n 'cube_coeff': -0.4,\n 'antialias': 1,\n 'version': 'opset4'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 4, 2200, 350]), 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([700])\n },\n 'scale_2_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'axes_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([3])\n },\n 'axes_2_data': {'value': int64_array([3]), 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'linear',\n 'coordinate_transformation_mode': 'asymmetric',\n 'nearest_mode': 'simple',\n 'cube_coeff': -0.4,\n 'antialias': 1,\n 'version': 'opset4'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([1, 4, 2200, 700]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 2200, 700]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nedges_for_2d_case_4_opset4_case = [\n ('placeholder', 'placeholder_data'),\n\n ('placeholder_data', 'interpolate_1', {'in': 0}),\n ('scale_1', 'scale_1_data'),\n ('scale_1_data', 'interpolate_1', {'in': 1}),\n ('axes_1', 'axes_1_data'),\n ('axes_1_data', 'interpolate_1', {'in': 2}),\n ('interpolate_1', 'interpolate_1_data'),\n\n ('interpolate_1_data', 'interpolate_2', {'in': 0}),\n ('scale_2', 'scale_2_data'),\n ('scale_2_data', 'interpolate_2', {'in': 1}),\n ('axes_2', 'axes_2_data'),\n ('axes_2_data', 'interpolate_2', {'in': 2}),\n ('interpolate_2', 'interpolate_2_data'),\n\n ('interpolate_2_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n]\n\n\ngraph_node_attrs_for_2d_case_4 = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2200])\n },\n 'scale_1_data': {'value': int64_array([2200]), 'shape': [1], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2]),\n 'mode': 'linear',\n 'align_corners': 0,\n 'antialias': 1,\n 'pads_begin': 5,\n 'pads_end': 3,\n 'version': 'opset1'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 4, 2200, 350]), 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([700])\n },\n 'scale_2_data': {'value': int64_array([700]), 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([3]),\n 'mode': 'linear',\n 'align_corners': 0,\n 'antialias': 1,\n 'pads_begin': 5,\n 'pads_end': 3,\n 'version': 'opset1'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([1, 4, 2200, 700]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 2200, 700]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nedges_for_2d_case_4 = [\n ('placeholder', 'placeholder_data'),\n\n ('placeholder_data', 'interpolate_1', {'in': 0}),\n ('scale_1', 'scale_1_data'),\n ('scale_1_data', 'interpolate_1', {'in': 1}),\n ('interpolate_1', 'interpolate_1_data'),\n\n ('interpolate_1_data', 'interpolate_2', {'in': 0}),\n ('scale_2', 'scale_2_data'),\n ('scale_2_data', 'interpolate_2', {'in': 1}),\n ('interpolate_2', 'interpolate_2_data'),\n\n ('interpolate_2_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n]\n\n\ngraph_node_attrs_for_2d_case_6 = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([220, 350])\n },\n 'scale_1_data': {'value': None, 'shape': [2], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2, 3]),\n 'mode': 'linear',\n 'align_corners': 0,\n 'antialias': 1,\n 'pads_begin': 5,\n 'pads_end': 3,\n 'version': 'opset1'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 4, 220, 350]), 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([220])\n },\n 'scale_2_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2]),\n 'mode': 'linear',\n 'align_corners': 0,\n 'antialias': 1,\n 'pads_begin': 5,\n 'pads_end': 3,\n 'version': 'opset1'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([1, 4, 220, 350]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 220, 350]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nedges_for_2d_case_6 = edges_for_2d_case_4\n\n\nnew_ref_graph_node_attrs_for_3d_case_1_opset4_case = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 5, 1024, 256, 800]),\n 'kind': 'data',\n 'data_type': None\n },\n 'size_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([4096, 1280, 2400])\n },\n 'size_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([4.0, 5.0, 3.0])\n },\n 'scale_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'axes_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2, 3, 4])\n },\n 'axes_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'nearest',\n 'shape_calculation_mode': 'sizes',\n 'version': 'opset4'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 2400]), 'kind': 'data'},\n 'size_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([512])\n },\n 'size_3_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'scale_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([512.0 / 2400.0])\n },\n 'scale_3_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'axes_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([4])\n },\n 'axes_3_data': {'value': int64_array([4]), 'shape': [1], 'kind': 'data'},\n 'interpolate_3': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'nearest',\n 'shape_calculation_mode': 'sizes',\n 'version': 'opset4'\n },\n 'interpolate_3_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 512]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 512]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\n\nnew_ref_edges_for_3d_case_1_opset4_case = ref_edges_for_2d_case_1_opset4_case\n\n\nnew_graph_node_attrs_for_3d_case_1_opset4_case = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 5, 1024, 256, 800]),\n 'kind': 'data',\n 'data_type': None\n },\n 'size_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([4096, 2400])\n },\n 'size_1_data': {'value': int64_array([4096, 2400]), 'shape': [2], 'kind': 'data'},\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([4.0, 3.0])\n },\n 'scale_1_data': {'value': np.array([4.0, 3.0]), 'shape': [2], 'kind': 'data'},\n 'axes_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2, 4])\n },\n 'axes_1_data': {'value': int64_array([2, 4]), 'shape': [2], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'nearest',\n 'shape_calculation_mode': 'sizes',\n 'version': 'opset4'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 5, 4096, 256, 2400]), 'kind': 'data'},\n 'size_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([1280])\n },\n 'size_2_data': {'value': int64_array([1280]), 'shape': [1], 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([5.0])\n },\n 'scale_2_data': {'value': np.array([5.0]), 'shape': [1], 'kind': 'data'},\n 'axes_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([3])\n },\n 'axes_2_data': {'value': int64_array([3]), 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'nearest',\n 'shape_calculation_mode': 'sizes',\n 'version': 'opset4'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 2400]), 'kind': 'data'},\n 'size_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([512])\n },\n 'size_3_data': {'value': int64_array([512]), 'shape': [1], 'kind': 'data'},\n 'scale_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([512.0 / 2400.0])\n },\n 'scale_3_data': {'value': np.array([512.0 / 2400.0]), 'shape': [1], 'kind': 'data'},\n 'axes_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([4])\n },\n 'axes_3_data': {'value': int64_array([4]), 'shape': [1], 'kind': 'data'},\n 'interpolate_3': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'nearest',\n 'shape_calculation_mode': 'sizes',\n 'version': 'opset4'\n },\n 'interpolate_3_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 512]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 512]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nnew_edges_for_3d_case_1_opset4_case = edges_for_2d_case_1_opset4_case\n\n\ngraph_node_attrs_for_3d_case_1 = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 5, 1024, 256, 800]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([4096, 2400])\n },\n 'scale_1_data': {'value': int64_array([4096, 2400]), 'shape': [2], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2, 4]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 5, 4096, 256, 2400]), 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([1280])\n },\n 'scale_2_data': {'value': int64_array([1280]), 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([3]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 2400]), 'kind': 'data'},\n 'scale_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([512])\n },\n 'scale_3_data': {'value': int64_array([512]), 'shape': [1], 'kind': 'data'},\n 'interpolate_3': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([4]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_3_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 512]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 512]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nedges_for_3d_case_1 = edges_for_2d_case_1\n\n\ngraph_node_attrs_for_3d_case_2 = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 5, 1024, 256, 800]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([4096, 1280])\n },\n 'scale_1_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2, 3]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 800]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 800]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nedges_for_3d_case_2 = edges_for_2d_case_2\n\n\ngraph_node_attrs_for_3d_case_3 = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([16, 44, 512, 87, 790]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([256])\n },\n 'scale_1_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([16, 44, 256, 87, 790]), 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2370])\n },\n 'scale_2_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([4]),\n 'mode': 'linear',\n 'version': 'opset1'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([16, 44, 256, 87, 2370]), 'kind': 'data'},\n 'scale_3': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([435])\n },\n 'scale_3_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'interpolate_3': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([3]),\n 'mode': 'cubic',\n 'version': 'opset1'\n },\n 'interpolate_3_data': {'value': None, 'shape': int64_array([16, 44, 256, 435, 2370]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([16, 44, 256, 435, 2370]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nedges_for_3d_case_3 = edges_for_2d_case_3\n\n\nnew_ref_graph_node_attrs_for_3d_case_4_opset4_case = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([10, 64, 511, 416, 10240]),\n 'kind': 'data',\n 'data_type': None\n },\n 'size_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([4599, 912, 133120])\n },\n 'size_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const',\n 'value': np.array([4599.0 / 511.0, 912.0 / 416.0, 133120.0 / 10240.0])\n },\n 'scale_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'axes_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2, 3, 4])\n },\n 'axes_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'linear',\n 'antialias': 1,\n 'shape_calculation_mode': 'sizes',\n 'version': 'opset4'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([10, 64, 4599, 912, 133120]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([10, 64, 4599, 912, 133120]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nnew_ref_edges_for_3d_case_4_opset4_case = new_ref_edges_for_2d_case_4_opset4_case\n\n\nnew_graph_node_attrs_for_3d_case_4_opset4_case = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([10, 64, 511, 416, 10240]),\n 'kind': 'data',\n 'data_type': None\n },\n 'size_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([4599, 133120])\n },\n 'size_1_data': {'value': int64_array([4599, 133120]), 'shape': [2], 'kind': 'data'},\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([4599.0 / 511.0, 133120.0 / 10240.0])\n },\n 'scale_1_data': {'value': np.array([4599.0 / 511.0, 133120.0 / 10240.0]), 'shape': [2], 'kind': 'data'},\n 'axes_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2, 4])\n },\n 'axes_1_data': {'value': int64_array([2, 4]), 'shape': [2], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'linear',\n 'antialias': 1,\n 'shape_calculation_mode': 'sizes',\n 'version': 'opset4'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([10, 64, 4599, 416, 133120]), 'kind': 'data'},\n 'size_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([912])\n },\n 'size_2_data': {'value': int64_array([912]), 'shape': [1], 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': np.array([912.0 / 416.0])\n },\n 'scale_2_data': {'value': np.array([912.0 / 416.0]), 'shape': [1], 'kind': 'data'},\n 'axes_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([3])\n },\n 'axes_2_data': {'value': int64_array([3]), 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'mode': 'linear',\n 'antialias': 1,\n 'shape_calculation_mode': 'sizes',\n 'version': 'opset4'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([10, 64, 4599, 912, 133120]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([10, 64, 4599, 912, 133120]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nnew_edges_for_3d_case_4_opset4_case = new_edges_for_2d_case_4_opset4_case\n\n\ngraph_node_attrs_for_3d_case_4 = {\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([10, 64, 511, 416, 10240]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([4599, 133120])\n },\n 'scale_1_data': {'value': int64_array([4599, 133120]), 'shape': [2], 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2, 4]),\n 'mode': 'linear',\n 'align_corners': 0,\n 'antialias': 1,\n 'pads_begin': 5,\n 'pads_end': 3,\n 'version': 'opset1'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([10, 64, 4599, 416, 133120]), 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([912])\n },\n 'scale_2_data': {'value': int64_array([912]), 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([3]),\n 'mode': 'linear',\n 'align_corners': 0,\n 'antialias': 1,\n 'pads_begin': 5,\n 'pads_end': 3,\n 'version': 'opset1'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([10, 64, 4599, 912, 133120]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([10, 64, 4599, 912, 133120]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n}\n\nedges_for_3d_case_4 = edges_for_2d_case_4\n\n\nclass InterpolateSequenceToInterpolateTest(unittest.TestCase):\n def test_2d_interpolate_sequence_1(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_1,\n edges=edges_for_2d_case_1\n )\n\n ref_graph = build_graph(\n nodes_attrs={\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([660, 700])\n },\n 'scale_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2, 3]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 4, 660, 700]), 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([1320])\n },\n 'scale_2_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([1, 4, 1320, 700]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 1320, 700]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n },\n edges=[\n ('placeholder', 'placeholder_data'),\n ('placeholder_data', 'interpolate_1', {'in': 0}),\n ('scale_1', 'scale_1_data'),\n ('scale_1_data', 'interpolate_1', {'in': 1}),\n ('interpolate_1', 'interpolate_1_data'),\n ('scale_2', 'scale_2_data'),\n ('interpolate_2', 'interpolate_2_data'),\n ('interpolate_1_data', 'interpolate_2', {'in': 0}),\n ('scale_2_data', 'interpolate_2', {'in': 1}),\n ('interpolate_2_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n ]\n )\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_2d_interpolate_sequence_1_opset4_case(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_1_opset4_case,\n edges=edges_for_2d_case_1_opset4_case\n )\n\n ref_graph = build_graph(\n nodes_attrs=ref_graph_node_attrs_for_2d_case_1_opset4_case,\n edges=ref_edges_for_2d_case_1_opset4_case\n )\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_2d_interpolate_sequence_2(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_2,\n edges=edges_for_2d_case_2\n )\n ref_graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_2,\n edges=edges_for_2d_case_2\n )\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_2d_interpolate_sequence_3(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_3,\n edges=edges_for_2d_case_3\n )\n\n ref_graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_3,\n edges=edges_for_2d_case_3\n )\n\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_2d_interpolate_sequence_4(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_4,\n edges=edges_for_2d_case_4\n )\n\n ref_graph = build_graph(\n nodes_attrs={\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 4, 220, 350]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([2200, 700])\n },\n 'scale_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'interpolate': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2, 3]),\n 'mode': 'linear',\n 'align_corners': 0,\n 'antialias': 1,\n 'pads_begin': 5,\n 'pads_end': 3,\n 'version': 'opset1'\n },\n 'interpolate_data': {'value': None, 'shape': int64_array([1, 4, 2200, 700]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 4, 2200, 700]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n },\n edges=[\n ('placeholder', 'placeholder_data'),\n\n ('placeholder_data', 'interpolate', {'in': 0}),\n ('scale', 'scale_data'),\n ('scale_data', 'interpolate', {'in': 1}),\n ('interpolate', 'interpolate_data'),\n\n ('interpolate_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n ]\n )\n\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_2d_interpolate_sequence_4_opset4_case(self):\n graph = build_graph(\n nodes_attrs=new_graph_node_attrs_for_2d_case_4_opset4_case,\n edges=new_edges_for_2d_case_4_opset4_case\n )\n\n ref_graph = build_graph(\n nodes_attrs=new_ref_graph_node_attrs_for_2d_case_4_opset4_case,\n edges=new_ref_edges_for_2d_case_4_opset4_case\n )\n\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_2d_interpolate_sequence_5(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_4,\n edges=edges_for_2d_case_4,\n update_attributes={\n 'interpolate_1': {\n 'align_corners': 1, 'antialias': 1, 'pads_begin': 3, 'pads_end': 0\n }\n }\n )\n\n ref_graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_4,\n edges=edges_for_2d_case_4,\n update_attributes={\n 'interpolate_1': {\n 'align_corners': 1, 'antialias': 1, 'pads_begin': 3, 'pads_end': 0\n }\n }\n )\n\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_2d_interpolate_sequence_5_opset4_case(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_4_opset4_case,\n edges=edges_for_2d_case_4_opset4_case,\n update_attributes={\n 'interpolate_1': {\n 'antialias': 0, 'cube_coeff': -0.1\n }\n }\n )\n\n ref_graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_4_opset4_case,\n edges=edges_for_2d_case_4_opset4_case,\n update_attributes={\n 'interpolate_1': {\n 'antialias': 0, 'cube_coeff': -0.1\n }\n }\n )\n\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_2d_interpolate_sequence_6(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_6,\n edges=edges_for_2d_case_6,\n )\n\n ref_graph = build_graph(\n nodes_attrs=graph_node_attrs_for_2d_case_6,\n edges=edges_for_2d_case_6\n )\n\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_3d_interpolate_sequence_1(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_3d_case_1,\n edges=edges_for_3d_case_1\n )\n\n ref_graph = build_graph(\n nodes_attrs={\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([1, 5, 1024, 256, 800]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale_1': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([4096, 1280, 2400])\n },\n 'scale_1_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'interpolate_1': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2, 3, 4]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_1_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 2400]), 'kind': 'data'},\n 'scale_2': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([512])\n },\n 'scale_2_data': {'value': None, 'shape': [1], 'kind': 'data'},\n 'interpolate_2': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([4]),\n 'mode': 'nearest',\n 'version': 'opset1'\n },\n 'interpolate_2_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 512]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([1, 5, 4096, 1280, 512]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n },\n edges=[\n ('placeholder', 'placeholder_data'),\n ('placeholder_data', 'interpolate_1', {'in': 0}),\n ('scale_1', 'scale_1_data'),\n ('scale_1_data', 'interpolate_1', {'in': 1}),\n ('interpolate_1', 'interpolate_1_data'),\n ('scale_2', 'scale_2_data'),\n ('interpolate_2', 'interpolate_2_data'),\n ('interpolate_1_data', 'interpolate_2', {'in': 0}),\n ('scale_2_data', 'interpolate_2', {'in': 1}),\n ('interpolate_2_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n ]\n )\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_3d_interpolate_sequence_1_opset4_case(self):\n graph = build_graph(\n nodes_attrs=new_graph_node_attrs_for_3d_case_1_opset4_case,\n edges=new_edges_for_3d_case_1_opset4_case\n )\n\n ref_graph = build_graph(\n nodes_attrs=new_ref_graph_node_attrs_for_3d_case_1_opset4_case,\n edges=new_ref_edges_for_3d_case_1_opset4_case\n )\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_3d_interpolate_sequence_2(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_3d_case_2,\n edges=edges_for_3d_case_2\n )\n ref_graph = build_graph(\n nodes_attrs=graph_node_attrs_for_3d_case_2,\n edges=edges_for_3d_case_2\n )\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_3d_interpolate_sequence_3(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_3d_case_3,\n edges=edges_for_3d_case_3\n )\n ref_graph = build_graph(\n nodes_attrs=graph_node_attrs_for_3d_case_3,\n edges=edges_for_3d_case_3\n )\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_3d_interpolate_sequence_4(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_3d_case_4,\n edges=edges_for_3d_case_4\n )\n\n ref_graph = build_graph(\n nodes_attrs={\n 'placeholder': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'placeholder_data': {\n 'value': None,\n 'shape': int64_array([10, 64, 511, 416, 10240]),\n 'kind': 'data',\n 'data_type': None\n },\n 'scale': {\n 'kind': 'op', 'op': 'Const', 'type': 'Const', 'value': int64_array([4599, 912, 133120])\n },\n 'scale_data': {'value': None, 'shape': None, 'kind': 'data'},\n 'interpolate': {\n 'type': 'Interpolate',\n 'kind': 'op',\n 'op': 'Interpolate',\n 'axes': int64_array([2, 3, 4]),\n 'mode': 'linear',\n 'align_corners': 0,\n 'antialias': 1,\n 'pads_begin': 5,\n 'pads_end': 3,\n 'version': 'opset1'\n },\n 'interpolate_data': {'value': None, 'shape': int64_array([10, 64, 4599, 912, 133120]), 'kind': 'data'},\n 'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},\n 'abs_data': {'value': None, 'shape': int64_array([10, 64, 4599, 912, 133120]), 'kind': 'data'},\n 'output': {'kind': 'op', 'op': 'Result'},\n },\n edges=[\n ('placeholder', 'placeholder_data'),\n\n ('placeholder_data', 'interpolate', {'in': 0}),\n ('scale', 'scale_data'),\n ('scale_data', 'interpolate', {'in': 1}),\n ('interpolate', 'interpolate_data'),\n\n ('interpolate_data', 'abs'),\n ('abs', 'abs_data'),\n ('abs_data', 'output'),\n ]\n )\n\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_3d_interpolate_sequence_4_opset4_case(self):\n graph = build_graph(\n nodes_attrs=new_graph_node_attrs_for_3d_case_4_opset4_case,\n edges=new_edges_for_3d_case_4_opset4_case\n )\n\n ref_graph = build_graph(\n nodes_attrs=new_ref_graph_node_attrs_for_3d_case_4_opset4_case,\n edges=new_ref_edges_for_3d_case_4_opset4_case\n )\n\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n\n def test_3d_interpolate_sequence_5(self):\n graph = build_graph(\n nodes_attrs=graph_node_attrs_for_3d_case_4,\n edges=edges_for_3d_case_4,\n update_attributes={\n 'interpolate_1': {\n 'align_corners': 1, 'antialias': 1, 'pads_begin': 3, 'pads_end': 7\n }\n }\n )\n\n ref_graph = build_graph(\n nodes_attrs=graph_node_attrs_for_3d_case_4,\n edges=edges_for_3d_case_4,\n update_attributes={\n 'interpolate_1': {\n 'align_corners': 1, 'antialias': 1, 'pads_begin': 3, 'pads_end': 7\n }\n }\n )\n\n InterpolateSequenceToInterpolate().find_and_replace_pattern(graph)\n (flag, resp) = compare_graphs(graph, ref_graph, 'output')\n self.assertTrue(flag, resp)\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport numpy as np\n\nfrom openvino.tools.mo.front.common.partial_infer.utils import is_fully_defined\nfrom openvino.tools.mo.graph.graph import Node, Graph\nfrom openvino.tools.mo.ops.op import Op\n\n\nclass ConstantFill(Op):\n \"\"\" Constant blob generation by broadcasting specified value to a given shape.\n\n It is assumed that there is no equivalent of this op in IE,\n so it is usually relevant to constant folding.\n \"\"\"\n op = 'ConstantFill'\n enabled = False\n\n def __init__(self, graph: Graph, attrs: dict):\n mandatory_props = {\n 'type': None,\n 'op': self.op,\n 'input_as_shape': 1,\n 'in_ports_count': 1,\n 'out_ports_count': 1,\n 'infer': self.infer\n }\n super().__init__(graph, mandatory_props, attrs)\n\n def supported_attrs(self):\n return [\n 'input_as_shape',\n 'fill_value'\n ]\n\n @staticmethod\n def infer(node: Node):\n assert len(node.in_nodes()) == 1\n assert node.fill_value is not None\n assert node.input_as_shape\n\n shape = node.in_port(0).data.get_value()\n assert shape is not None\n\n if is_fully_defined(shape):\n node.out_port(0).data.set_value(np.full(shape, node.fill_value, np.float32))\n else:\n node.out_port(0).data.set_shape(shape)\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport numpy as np\n\nfrom openvino.tools.mo.front.common.partial_infer.elemental import copy_shape_infer\nfrom openvino.tools.mo.graph.graph import Graph\nfrom openvino.tools.mo.ops.op import Op\n\n\nclass PReLU(Op):\n op = 'PReLU'\n enabled = True\n\n def __init__(self, graph: Graph, attrs: dict):\n super().__init__(graph, {\n 'op': self.op,\n 'type': self.op,\n 'version': 'opset1',\n\n 'infer': self.infer,\n\n 'force_precision_in_ports': {1: 'float'},\n\n 'in_ports_count': 2,\n 'out_ports_count': 1,\n }, attrs)\n\n @staticmethod\n def infer(node):\n if len(node.in_nodes()) == 2:\n gamma_vector = node.in_node(1)\n if np.all(gamma_vector.shape == [1]):\n node['channel_shared'] = 1\n else:\n node['channel_shared'] = 0\n node.in_node(1)['correct_data_type'] = True\n\n copy_shape_infer(node)\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport numpy as np\n\nfrom openvino.tools.mo.front.common.partial_infer.utils import mo_array\nfrom openvino.tools.mo.front.extractor import FrontExtractorOp\nfrom openvino.tools.mo.front.kaldi.loader.utils import read_binary_bool_token, read_binary_integer32_token, collect_until_token, \\\n read_binary_float_token\nfrom openvino.tools.mo.front.kaldi.utils import read_binary_vector, read_binary_matrix\nfrom openvino.tools.mo.ops.tdnncomponent import TdnnComponent\n\n\nclass TdnnComponentFrontExtractor(FrontExtractorOp):\n op = 'tdnncomponent'\n enabled = True\n\n @classmethod\n def extract(cls, node):\n pb = node.parameters\n\n collect_until_token(pb, b'<MaxChange>')\n max_change = read_binary_float_token(pb)\n\n collect_until_token(pb, b'<L2Regularize>')\n collect_until_token(pb, b'<LearningRate>')\n\n collect_until_token(pb, b'<TimeOffsets>')\n time_offsets = read_binary_vector(pb, False, np.int32)\n\n collect_until_token(pb, b'<LinearParams>')\n weights, weights_shape = read_binary_matrix(pb)\n collect_until_token(pb, b'<BiasParams>')\n bias_params = read_binary_vector(pb)\n\n collect_until_token(pb, b'<OrthonormalConstraint>')\n orthonormal_constraint = read_binary_float_token(pb) # used only on training\n\n collect_until_token(pb, b'<UseNaturalGradient>')\n use_natural_grad = read_binary_bool_token(pb) # used only on training\n collect_until_token(pb, b'<NumSamplesHistory>')\n num_samples_hist = read_binary_float_token(pb)\n\n collect_until_token(pb, b'<AlphaInOut>')\n alpha_in_out = read_binary_float_token(pb), read_binary_float_token(pb) # for training, usually (4, 4)\n\n # according to Kaldi documentation http://kaldi-asr.org/doc/classkaldi_1_1nnet3_1_1TdnnComponent.html#details\n # it looks like it's used only during training (but not 100% sure)\n collect_until_token(pb, b'<RankInOut>')\n rank_in_out = read_binary_integer32_token(pb), read_binary_integer32_token(pb)\n\n biases = mo_array(bias_params) if len(bias_params) != 0 else None\n attrs = {\n 'weights': np.reshape(weights, weights_shape),\n 'biases': biases,\n 'time_offsets': time_offsets,\n }\n TdnnComponent.update_node_stat(node, attrs)\n return cls.enabled\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport unittest\n\nimport numpy as np\n\nfrom openvino.tools.mo.ops.priorbox import PriorBoxOp\nfrom openvino.tools.mo.graph.graph import Node\nfrom unit_tests.utils.graph import build_graph\n\nnodes_attributes = {'node_1': {'type': 'Identity', 'value': None, 'kind': 'data'},\n 'pb': {'type': 'PriorBox', 'value': None, 'kind': 'op'},\n 'node_3': {'type': 'Identity', 'value': None, 'kind': 'data'},\n 'op_output': { 'kind': 'op', 'op': 'Result'}\n }\n\n\nclass TestPriorBoxPartialInfer(unittest.TestCase):\n def test_caffe_priorbox_infer(self):\n graph = build_graph(nodes_attributes,\n [\n ('node_1', 'pb'),\n ('pb', 'node_3'),\n ('node_3', 'op_output')\n ],\n {\n 'node_3': {'shape': None},\n 'node_1': {'shape': np.array([1, 384, 19, 19])},\n 'pb': {\n 'aspect_ratio': np.array([1]),\n 'flip': 0,\n 'min_size': np.array([1]),\n 'max_size': np.array([1])\n }\n })\n graph.graph['layout'] = 'NCHW'\n pb_node = Node(graph, 'pb')\n PriorBoxOp.priorbox_infer(pb_node)\n exp_shape = np.array([1, 2, 4 * 19 * 19 * 2])\n res_shape = graph.node['node_3']['shape']\n for i in range(0, len(exp_shape)):\n self.assertEqual(exp_shape[i], res_shape[i])\n\n def test_caffe_priorbox_flip_infer(self):\n graph = build_graph(nodes_attributes,\n [\n ('node_1', 'pb'),\n ('pb', 'node_3'),\n ('node_3', 'op_output')\n ],\n {\n 'node_3': {'shape': None},\n 'node_1': {'shape': np.array([1, 384, 19, 19])},\n 'pb': {\n 'aspect_ratio': np.array([1, 2, 0.5]),\n 'flip': 1,\n 'min_size': np.array([1]),\n 'max_size': np.array([1])\n }\n })\n graph.graph['layout'] = 'NCHW'\n pb_node = Node(graph, 'pb')\n PriorBoxOp.priorbox_infer(pb_node)\n exp_shape = np.array([1, 2, 4 * 19 * 19 * 4])\n res_shape = graph.node['node_3']['shape']\n for i in range(0, len(exp_shape)):\n self.assertEqual(exp_shape[i], res_shape[i])\n\n def test_tf_priorbox_infer(self):\n graph = build_graph(nodes_attributes,\n [\n ('node_1', 'pb'),\n ('pb', 'node_3'),\n ('node_3', 'op_output')\n ],\n {\n 'node_3': {'shape': None},\n 'node_1': {'shape': np.array([1, 19, 19, 384])},\n 'pb': {\n 'aspect_ratio': np.array([1]),\n 'flip': 0,\n 'min_size': np.array([1]),\n 'max_size': np.array([1])\n }\n })\n graph.graph['layout'] = 'NHWC'\n pb_node = Node(graph, 'pb')\n PriorBoxOp.priorbox_infer(pb_node)\n exp_shape = np.array([1, 2, 4 * 19 * 19 * 2])\n res_shape = graph.node['node_3']['shape']\n for i in range(0, len(exp_shape)):\n self.assertEqual(exp_shape[i], res_shape[i])\n\n def test_tf_priorbox_flip_infer(self):\n graph = build_graph(nodes_attributes,\n [\n ('node_1', 'pb'),\n ('pb', 'node_3'),\n ('node_3', 'op_output')\n ],\n {\n 'node_3': {'shape': None},\n 'node_1': {'shape': np.array([1, 19, 19, 384])},\n 'pb': {\n 'aspect_ratio': np.array([1, 2, 0.5]),\n 'flip': 1,\n 'min_size': np.array([1]),\n 'max_size': np.array([1])\n }\n })\n graph.graph['layout'] = 'NHWC'\n pb_node = Node(graph, 'pb')\n PriorBoxOp.priorbox_infer(pb_node)\n exp_shape = np.array([1, 2, 4 * 19 * 19 * 4])\n res_shape = graph.node['node_3']['shape']\n for i in range(0, len(exp_shape)):\n self.assertEqual(exp_shape[i], res_shape[i])\n\n def test_caffe_priorbox_density_infer(self):\n graph = build_graph(nodes_attributes,\n [\n ('node_1', 'pb'),\n ('pb', 'node_3')],\n {\n 'node_3': {'is_output': True, 'shape': None},\n 'node_1': {'shape': np.array([1, 128, 32, 32])},\n 'pb': {\n 'aspect_ratio': np.array([1]),\n 'flip': 1,\n 'min_size': np.array([]),\n 'max_size': np.array([]),\n 'fixed_size': np.array([32, 64, 128]),\n 'density': np.array([1, 2, 4]),\n }\n })\n graph.graph['layout'] = 'NCHW'\n pb_node = Node(graph, 'pb')\n PriorBoxOp.priorbox_infer(pb_node)\n exp_shape = np.array([1, 2, 4*32*32*21])\n res_shape = graph.node['node_3']['shape']\n for i in range(0, len(exp_shape)):\n self.assertEqual(exp_shape[i], res_shape[i])\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport numpy as np\n\nfrom openvino.tools.mo.front.common.partial_infer.utils import int64_array, dynamic_dimension_value, dynamic_dimension, \\\n is_fully_defined, shape_array, shape_insert\nfrom openvino.tools.mo.graph.graph import Node, Graph\nfrom openvino.tools.mo.graph.perm_inputs import PermuteInputs\nfrom openvino.tools.mo.ops.op import Op, PermuteAttrs\n\n\nclass Tile(Op):\n op = 'Tile'\n enabled = False\n\n def __init__(self, graph: Graph, attrs: dict):\n super().__init__(graph, {\n 'op': self.op,\n 'type': self.op,\n 'version': 'opset1',\n\n 'infer': self.infer,\n\n 'in_ports_count': 2,\n 'out_ports_count': 1,\n }, attrs)\n\n @staticmethod\n def infer(node: Node):\n name = node.soft_get('name', node.id)\n\n connected_in_ports = {idx: port for idx, port in node.in_ports().items() if not port.disconnected()}\n assert len(connected_in_ports) == 2 and 0 in connected_in_ports and 1 in connected_in_ports, \\\n \"Tile should have 2 connected input port, but it doesn't for node: `{}`. Ports: {}\" \\\n \"\".format(name, connected_in_ports)\n\n shape = node.in_port(0).data.get_shape()\n assert shape is not None, \"Undefined input shape for Tile node '{}'.\".format(name)\n tile_array = node.in_port(1).data.get_value()\n assert tile_array is not None, \"Undefined `repeats` (1st port input value) of Tile node '{}'\".format(name)\n\n # align ranks of the tile_array tensor and input shape node\n if shape.size < tile_array.size:\n shape = shape_insert(shape, 0, [1] * (tile_array.size - shape.size))\n elif shape.size > tile_array.size:\n tile_array = shape_insert(tile_array, 0, [1] * (shape.size - tile_array.size))\n\n input_value = node.in_port(0).data.get_value()\n if input_value is not None and is_fully_defined(shape) and is_fully_defined(tile_array):\n node.out_port(0).data.set_value(np.tile(input_value.reshape(shape), tile_array))\n else:\n node.out_port(0).data.set_shape(shape * tile_array)\n\n PermuteInputs().set_input_permutation(node.in_node(1), node, 'input:0', 'shape')\n\n\nclass AttributedTile(Op):\n op = 'AttributedTile'\n enabled = False\n\n def __init__(self, graph: Graph, attrs: dict):\n super().__init__(graph, {\n 'op': self.op,\n 'type': 'Tile',\n 'version': 'opset1',\n\n 'infer': self.infer,\n\n 'in_ports_count': 1,\n 'out_ports_count': 1,\n }, attrs)\n\n assert 'axis' in self.attrs\n assert 'tiles' in self.attrs\n\n def supported_attrs(self):\n return ['axis', 'tiles']\n\n @staticmethod\n def infer(node):\n name = node.soft_get('name', node.id)\n\n connected_in_ports = {idx: port for idx, port in node.in_ports().items() if not port.disconnected()}\n assert len(connected_in_ports) == 1 and 0 in connected_in_ports, \\\n \"AttributedTile should have 1 connected input port, but it doesn't for node: `{}`. Ports: {}\" \\\n \"\".format(name, connected_in_ports)\n\n shape = node.in_port(0).data.get_shape()\n assert shape is not None, \"Undefined input shape for AttributedTile node '{}'.\".format(name)\n axis = node.soft_get('axis', None)\n assert axis is not None\n tiles = node.soft_get('tiles', None)\n assert tiles is not None, \"Undefined `tiles` attribute of Tile node '{}'\".format(name)\n\n tile_array = int64_array(np.ones(shape.size))\n tile_array[node.axis] = node.tiles\n\n node.out_port(0).data.set_shape(shape * tile_array)\n if node.in_port(0).data.get_value() is not None:\n node.out_port(0).data.set_value(np.tile(node.in_port(0).data.get_value(), tile_array))\n\n PermuteAttrs.create_permute_attrs(node, attrs=[('axis', 'input:0')])\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport numpy as np\nfrom tensorflow.core.framework import types_pb2 as tf_types # pylint: disable=no-name-in-module,import-error\n\n# Suppress false positive pylint warning about function with too many arguments\n# pylint: disable=E1121\n# mapping between TF data type and numpy data type and function to extract data from TF tensor\n_tf_np_mapping = [('DT_BOOL', np.bool, lambda pb: pb.bool_val, lambda x: bool_cast(x)),\n ('DT_INT8', np.int8, lambda pb: pb.int_val, lambda x: np.int8(x)),\n ('DT_INT16', np.int16, lambda pb: pb.int_val, lambda x: np.int16(x)),\n ('DT_INT32', np.int32, lambda pb: pb.int_val, lambda x: np.int32(x)),\n ('DT_INT64', np.int64, lambda pb: pb.int64_val, lambda x: np.int64(x)),\n ('DT_UINT8', np.uint8, lambda pb: pb.uint8_val, lambda x: np.uint8(x)),\n ('DT_UINT16', np.uint16, lambda pb: pb.int_val, lambda x: np.uint16(x)),\n ('DT_UINT32', np.uint32, lambda pb: pb.uint32_val, lambda x: np.uint32(x)),\n ('DT_UINT64', np.uint64, lambda pb: pb.uint64_val, lambda x: np.uint64(x)),\n ('DT_HALF', np.float16, lambda pb: np.uint16(pb.half_val).view(np.float16), lambda x: np.float16(x)),\n ('DT_FLOAT', np.float32, lambda pb: pb.float_val, lambda x: np.float32(x)),\n ('DT_DOUBLE', np.double, lambda pb: pb.double_val, lambda x: np.double(x)),\n ('DT_STRING', np.str, lambda pb: pb.string_val, lambda x: np.str(x)),\n ]\n\ntf_data_type_decode = {getattr(tf_types, tf_dt): (np_type, func) for tf_dt, np_type, func, _ in _tf_np_mapping if\n hasattr(tf_types, tf_dt)}\n\ntf_data_type_cast = {np_type: cast for tf_dt, np_type, _, cast in _tf_np_mapping if hasattr(tf_types, tf_dt)}\n\n\ndef bool_cast(x):\n if isinstance(x, str):\n return False if x.lower() in ['false', '0'] else True if x.lower() in ['true', '1'] else 'unknown_boolean_cast'\n else:\n return np.bool(x)\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\n#\n# exp paddle model generator\n#\nimport numpy as np\nfrom save_model import saveModel\nimport sys\n\n\ndef exp(name: str, x):\n import paddle\n paddle.enable_static()\n\n with paddle.static.program_guard(paddle.static.Program(), paddle.static.Program()):\n node_x = paddle.static.data(name='x', shape=x.shape, dtype=x.dtype)\n out = paddle.fluid.layers.exp(x=node_x)\n cpu = paddle.static.cpu_places(1)\n exe = paddle.static.Executor(cpu[0])\n # startup program will call initializer to initialize the parameters.\n exe.run(paddle.static.default_startup_program())\n\n outs = exe.run(\n feed={'x': x},\n fetch_list=[out])\n\n saveModel(name, exe, feedkeys=['x'], fetchlist=[out], inputs=[\n x], outputs=[outs[0]], target_dir=sys.argv[1])\n\n return outs[0]\n\n\ndef main():\n input_shape = (1, 2, 3)\n input_data = np.random.rand(*input_shape).astype(np.float32)\n exp(\"exp_test_float32\", input_data)\n\n\nif __name__ == \"__main__\":\n main()\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport unittest\n\nimport numpy as np\n\nfrom openvino.tools.mo.front.tf.pad_tf_to_pad import PadTFToPad\nfrom openvino.tools.mo.front.common.partial_infer.utils import int64_array, float_array\nfrom openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs\nfrom unit_tests.utils.graph import build_graph, const\n\nnodes_attributes = {\n 'placeholder': {'shape': None, 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},\n 'tfpad': {'type': None, 'kind': 'op', 'op': 'TFPad', 'mode': 'constant', 'name': 'tfpad_name'},\n **const('paddings', int64_array([1, 2, 3, 4, 5, 6]).reshape([3, 2])),\n **const('fill', float_array(5.75)),\n 'result': {'type': 'Result', 'value': None, 'kind': 'op', 'op': 'Result'},\n\n # new Pad layer and sub-graph\n 'pad': {'type': 'Pad', 'kind': 'op', 'op': 'Pad', 'mode': 'constant'},\n 'transpose': {'type': 'Transpose', 'kind': 'op', 'op': 'Transpose'},\n **const('transpose_order', int64_array([1, 0])),\n 'split': {'type': 'Split', 'kind': 'op', 'op': 'Split', 'num_splits': 2},\n **const('split_axis', int64_array(0)),\n 'squeeze_1': {'type': 'Squeeze', 'kind': 'op', 'op': 'Squeeze'},\n **const('squeeze_1_axis', int64_array([0])),\n 'squeeze_2': {'type': 'Squeeze', 'kind': 'op', 'op': 'Squeeze'},\n **const('squeeze_2_axis', int64_array([0])),\n 'convert_like': {'type': 'ConvertLike', 'kind': 'op', 'op': 'ConvertLike'},\n\n **const('pad_fill', np.array(0.0)),\n}\n\ncommon_edges = [('placeholder', 'pad', {'in': 0, 'out': 0}),\n\n ('paddings', 'transpose', {'in': 0, 'out': 0}),\n ('transpose_order', 'transpose', {'in': 1, 'out': 0}),\n\n ('transpose', 'split', {'in': 0, 'out': 0}),\n ('split_axis', 'split', {'in': 1, 'out': 0}),\n\n ('split', 'squeeze_1', {'in': 0, 'out': 0}),\n ('squeeze_1_axis', 'squeeze_1', {'in': 1, 'out': 0}),\n\n ('split', 'squeeze_2', {'in': 0, 'out': 1}),\n ('squeeze_2_axis', 'squeeze_2', {'in': 1, 'out': 0}),\n\n ('squeeze_1', 'pad', {'in': 1, 'out': 0}),\n ('squeeze_2', 'pad', {'in': 2, 'out': 0}),\n\n ('pad', 'result')\n ]\n\n\nclass PadTFToPadTest(unittest.TestCase):\n def _run_test(self, graph, graph_ref):\n graph.graph['layout'] = 'NHWC'\n graph.stage = 'front'\n\n replacer = PadTFToPad()\n replacer.find_and_replace_pattern(graph)\n\n (flag, resp) = compare_graphs(graph, graph_ref, 'result', check_op_attrs=True)\n self.assertTrue(graph.node[graph.get_nodes_with_attributes(op='Pad')[0]]['name'] == 'tfpad_name')\n self.assertTrue(flag, resp)\n\n def test_2_inputs(self):\n graph = build_graph(nodes_attributes,\n [('placeholder', 'tfpad', {'in': 0, 'out': 0}),\n ('paddings', 'tfpad', {'in': 1, 'out': 0}),\n ('tfpad', 'result', {'in': 0, 'out': 0}),\n ],\n {}, nodes_with_edges_only=True)\n graph.get_op_nodes(op='TFPad')[0].add_input_port(2)\n\n graph_ref = build_graph(nodes_attributes, common_edges,\n {}, nodes_with_edges_only=True)\n self._run_test(graph, graph_ref)\n\n def test_3_inputs(self):\n graph = build_graph(nodes_attributes,\n [('placeholder', 'tfpad', {'in': 0, 'out': 0}),\n ('paddings', 'tfpad', {'in': 1, 'out': 0}),\n ('fill', 'tfpad', {'in': 2, 'out': 0}),\n ('tfpad', 'result', {'in': 0, 'out': 0}),\n ],\n {}, nodes_with_edges_only=True)\n\n graph_ref = build_graph(nodes_attributes, common_edges + [('fill', 'pad', {'in': 3, 'out': 0})],\n {}, nodes_with_edges_only=True)\n\n self._run_test(graph, graph_ref)\n\n def test_3_inputs_with_non_constant_pad(self):\n updated_paddings_attrs = {'type': 'Parameter', 'op': 'Parameter', 'value': None}\n graph = build_graph(nodes_attributes,\n [('placeholder', 'tfpad', {'in': 0, 'out': 0}),\n ('paddings', 'tfpad', {'in': 1, 'out': 0}),\n ('fill', 'tfpad', {'in': 2, 'out': 0}),\n ('tfpad', 'result', {'in': 0, 'out': 0}),\n ],\n {'paddings': updated_paddings_attrs}, nodes_with_edges_only=True)\n\n graph_ref = build_graph(nodes_attributes, common_edges + [('fill', 'pad', {'in': 3, 'out': 0})],\n {'paddings': updated_paddings_attrs}, nodes_with_edges_only=True)\n\n self._run_test(graph, graph_ref)\n\n def test_2_inputs_non_constant_mode(self):\n graph = build_graph(nodes_attributes,\n [('placeholder', 'tfpad', {'in': 0, 'out': 0}),\n ('paddings', 'tfpad', {'in': 1, 'out': 0}),\n ('tfpad', 'result', {'in': 0, 'out': 0}),\n ],\n {'tfpad': {'mode': 'reflect'}}, nodes_with_edges_only=True)\n graph.get_op_nodes(op='TFPad')[0].add_input_port(2)\n\n graph_ref = build_graph(nodes_attributes, common_edges,\n {'pad': {'mode': 'reflect'}}, nodes_with_edges_only=True)\n self._run_test(graph, graph_ref)\n", "# Copyright (C) 2018-2021 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport numpy as np\n\nfrom openvino.tools.mo.graph.graph import Graph, rename_nodes\nfrom openvino.tools.mo.middle.quantize_linear_resolver import QuantizeLinearResolver\nfrom openvino.tools.mo.middle.replacement import MiddleReplacementPattern\n\n\nclass QuantizeDequantizeLinearResolver(MiddleReplacementPattern):\n \"\"\"\n This transformation replaces QuantizeLinear in pair QuantizeLinear/DequantizeLinear with\n constant inputs to FakeQuantize with flag stop_value_propagation=True. This transformation prepare FakeQuantize for\n ConvertQuantizeDequantize in offline transformations.\n \"\"\"\n enabled = True\n graph_condition = [lambda graph: graph.graph['layout'] == 'NCHW']\n\n def pattern(self):\n return dict(\n nodes=[('const_input', dict(kind='op', op='Const')),\n ('const_input_d', dict(kind='data')),\n ('quantize', dict(kind='op', op='QuantizeLinear')),\n ('quantize_d', dict(kind='data')),\n ('dequantize', dict(kind='op', op='DequantizeLinear')),\n ],\n edges=[('const_input', 'const_input_d'),\n ('const_input_d', 'quantize', {'in': 0}),\n ('quantize', 'quantize_d'),\n ('quantize_d', 'dequantize', {'in': 0})\n ]\n )\n\n def run_after(self):\n from openvino.tools.mo.middle.quantize_fuses import MarkNodesToFuseUpToFakeQuantize\n return [MarkNodesToFuseUpToFakeQuantize]\n\n def replace_pattern(self, graph: Graph, match: dict):\n dequantize_node = match['dequantize']\n quantize_node = match['quantize']\n\n scale_zerop_is_exist = quantize_node.is_in_port_connected(1) and quantize_node.is_in_port_connected(2) and \\\n dequantize_node.is_in_port_connected(1) and dequantize_node.is_in_port_connected(2)\n if not scale_zerop_is_exist:\n return\n q_scale = quantize_node.in_port(1).get_source().node\n q_zerop = quantize_node.in_port(2).get_source().node\n dq_scale = dequantize_node.in_port(1).get_source().node\n dq_zerop = dequantize_node.in_port(2).get_source().node\n scales_and_zerop_is_const = q_scale.soft_get('type') == 'Const' and dq_scale.soft_get('type') == 'Const' and \\\n q_zerop.soft_get('type') == 'Const' and dq_zerop.soft_get('type') == 'Const'\n scales_and_zerop_equals = np.array_equal(q_scale.value, dq_scale.value) and \\\n np.array_equal(q_zerop.value, dq_zerop.value)\n\n # only constant as for zero_point/scale supported\n # only patterns with same scale/zero_point values for Q and DQ are supported\n if not (scales_and_zerop_is_const or scales_and_zerop_equals):\n return\n\n QuantizeLinearResolver.quantize_to_fakequantize(graph, quantize_node, True)\n quantize_node['isolated'] = True\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport numpy as np\nimport os\nimport pytest\nimport threading\nfrom datetime import datetime\nimport time\n\nfrom openvino.inference_engine import ie_api as ie\nfrom tests_compatibility.conftest import model_path, image_path, create_encoder\nimport ngraph as ng\nfrom ngraph.impl import Function, Type\n\nis_myriad = os.environ.get(\"TEST_DEVICE\") == \"MYRIAD\"\ntest_net_xml, test_net_bin = model_path(is_myriad)\npath_to_img = image_path()\n\n\ndef create_function_with_memory(input_shape, data_type):\n input_data = ng.parameter(input_shape, name=\"input_data\", dtype=data_type)\n rv = ng.read_value(input_data, \"var_id_667\")\n add = ng.add(rv, input_data, name=\"MemoryAdd\")\n node = ng.assign(add, \"var_id_667\")\n res = ng.result(add, \"res\")\n func = Function(results=[res], sinks=[node], parameters=[input_data], name=\"name\")\n caps = Function.to_capsule(func)\n return caps\n\n\ndef read_image():\n import cv2\n n, c, h, w = (1, 3, 32, 32)\n image = cv2.imread(path_to_img)\n if image is None:\n raise FileNotFoundError(\"Input image not found\")\n\n image = cv2.resize(image, (h, w)) / 255\n image = image.transpose((2, 0, 1)).astype(np.float32)\n image = image.reshape((n, c, h, w))\n return image\n\n\ndef load_sample_model(device, num_requests=1):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n executable_network = ie_core.load_network(net, device, num_requests=num_requests)\n return executable_network\n\n\ndef test_input_blobs(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n executable_network = ie_core.load_network(net, device, num_requests=2)\n td = ie.TensorDesc(\"FP32\", (1, 3, 32, 32), \"NCHW\")\n assert executable_network.requests[0].input_blobs['data'].tensor_desc == td\n\n\ndef test_output_blobs(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n executable_network = ie_core.load_network(net, device, num_requests=2)\n td = ie.TensorDesc(\"FP32\", (1, 10), \"NC\")\n assert executable_network.requests[0].output_blobs['fc_out'].tensor_desc == td\n\n\ndef test_inputs_list(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n executable_network = ie_core.load_network(net, device, num_requests=2)\n\n for req in executable_network.requests:\n assert len(req._inputs_list) == 1\n assert \"data\" in req._inputs_list\n del ie_core\n\n\ndef test_outputs_list(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n executable_network = ie_core.load_network(net, device, num_requests=2)\n\n for req in executable_network.requests:\n assert len(req._outputs_list) == 1\n assert \"fc_out\" in req._outputs_list\n del ie_core\n\n\ndef test_access_input_buffer(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n executable_network = ie_core.load_network(net, device, num_requests=1)\n buffer = executable_network.requests[0]._get_blob_buffer(\"data\".encode()).to_numpy()\n assert buffer.shape == (1, 3, 32, 32)\n assert buffer.strides == (12288, 4096, 128, 4)\n assert buffer.dtype == np.float32\n del executable_network\n del ie_core\n del net\n\n\ndef test_access_output_buffer(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n executable_network = ie_core.load_network(net, device, num_requests=1)\n buffer = executable_network.requests[0]._get_blob_buffer(\"fc_out\".encode()).to_numpy()\n assert buffer.shape == (1, 10)\n assert buffer.strides == (40, 4)\n assert buffer.dtype == np.float32\n del executable_network\n del ie_core\n del net\n\n\ndef test_write_to_input_blobs_directly(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n executable_network = ie_core.load_network(net, device, num_requests=1)\n img = read_image()\n request = executable_network.requests[0]\n input_data = request.input_blobs[\"data\"]\n input_data.buffer[:] = img\n assert np.array_equal(executable_network.requests[0].input_blobs[\"data\"].buffer, img)\n del executable_network\n del ie_core\n del net\n\n\ndef test_write_to_input_blobs_copy(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n executable_network = ie_core.load_network(net, device, num_requests=1)\n img = read_image()\n request = executable_network.requests[0]\n request.input_blobs[\"data\"].buffer[:] = img\n assert np.allclose(executable_network.requests[0].input_blobs[\"data\"].buffer, img)\n del executable_network\n del ie_core\n del net\n\n\ndef test_infer(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n img = read_image()\n request = exec_net.requests[0]\n request.infer({'data': img})\n res = request.output_blobs['fc_out'].buffer\n assert np.argmax(res) == 2\n del exec_net\n del ie_core\n del net\n\n\ndef test_async_infer_default_timeout(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n img = read_image()\n request = exec_net.requests[0]\n request.async_infer({'data': img})\n request.wait()\n res = request.output_blobs['fc_out'].buffer\n assert np.argmax(res) == 2\n del exec_net\n del ie_core\n del net\n\n\ndef test_async_infer_wait_finish(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n img = read_image()\n request = exec_net.requests[0]\n request.async_infer({'data': img})\n request.wait(ie.WaitMode.RESULT_READY)\n res = request.output_blobs['fc_out'].buffer\n assert np.argmax(res) == 2\n del exec_net\n del ie_core\n del net\n\n\ndef test_async_infer_wait_time(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=2)\n img = read_image()\n request = exec_net.requests[0]\n request.async_infer({'data': img})\n start_time = datetime.utcnow()\n status = request.wait(ie.WaitMode.RESULT_READY)\n assert status == ie.StatusCode.OK\n time_delta = datetime.utcnow() - start_time\n latency_ms = (time_delta.microseconds / 1000) + (time_delta.seconds * 1000)\n timeout = max(100, latency_ms)\n request = exec_net.requests[1]\n request.async_infer({'data': img})\n max_repeat = 10\n status = ie.StatusCode.REQUEST_BUSY\n i = 0\n while i < max_repeat and status != ie.StatusCode.OK:\n status = request.wait(timeout)\n i += 1\n assert status == ie.StatusCode.OK\n res = request.output_blobs['fc_out'].buffer\n assert np.argmax(res) == 2\n del exec_net\n del ie_core\n del net\n\n\ndef test_async_infer_wait_status(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n img = read_image()\n request = exec_net.requests[0]\n request.async_infer({'data': img})\n request.wait(ie.WaitMode.RESULT_READY)\n res = request.output_blobs['fc_out'].buffer\n assert np.argmax(res) == 2\n status = request.wait(ie.WaitMode.STATUS_ONLY)\n assert status == ie.StatusCode.OK\n del exec_net\n del ie_core\n del net\n\n\ndef test_async_infer_fill_inputs(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n img = read_image()\n request = exec_net.requests[0]\n request.input_blobs['data'].buffer[:] = img\n request.async_infer()\n status_end = request.wait()\n assert status_end == ie.StatusCode.OK\n res = request.output_blobs['fc_out'].buffer\n assert np.argmax(res[0]) == 2\n del exec_net\n del ie_core\n del net\n\n\ndef test_infer_modify_outputs(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n img = read_image()\n request = exec_net.requests[0]\n outputs0 = exec_net.infer({'data': img})\n status_end = request.wait()\n assert status_end == ie.StatusCode.OK\n assert np.argmax(outputs0['fc_out']) == 2\n outputs0['fc_out'][:] = np.zeros(shape=(1, 10), dtype=np.float32)\n outputs1 = request.output_blobs\n assert np.argmax(outputs1['fc_out'].buffer) == 2\n outputs1['fc_out'].buffer[:] = np.ones(shape=(1, 10), dtype=np.float32)\n outputs2 = request.output_blobs\n assert np.argmax(outputs2['fc_out'].buffer) == 2\n del exec_net\n del ie_core\n del net\n\n\ndef test_async_infer_callback(device):\n def static_vars(**kwargs):\n def decorate(func):\n for k in kwargs:\n setattr(func, k, kwargs[k])\n return func\n\n return decorate\n\n @static_vars(callback_called=0)\n def callback(self, status):\n callback.callback_called = 1\n\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n img = read_image()\n request = exec_net.requests[0]\n request.set_completion_callback(callback)\n request.async_infer({'data': img})\n status = request.wait()\n assert status == ie.StatusCode.OK\n res = request.output_blobs['fc_out'].buffer\n assert np.argmax(res) == 2\n assert callback.callback_called == 1\n del exec_net\n del ie_core\n\n\ndef test_async_infer_callback_wait_before_start(device):\n def static_vars(**kwargs):\n def decorate(func):\n for k in kwargs:\n setattr(func, k, kwargs[k])\n return func\n return decorate\n\n @static_vars(callback_called=0)\n def callback(self, status):\n callback.callback_called = 1\n\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n img = read_image()\n request = exec_net.requests[0]\n request.set_completion_callback(callback)\n status = request.wait()\n assert status == ie.StatusCode.INFER_NOT_STARTED\n request.async_infer({'data': img})\n status = request.wait()\n assert status == ie.StatusCode.OK\n res = request.output_blobs['fc_out'].buffer\n assert np.argmax(res) == 2\n assert callback.callback_called == 1\n del exec_net\n del ie_core\n\n\ndef test_async_infer_callback_wait_in_callback(device):\n class InferReqWrap:\n def __init__(self, request):\n self.request = request\n self.cv = threading.Condition()\n self.request.set_completion_callback(self.callback)\n self.status_code = self.request.wait(ie.WaitMode.STATUS_ONLY)\n assert self.status_code == ie.StatusCode.INFER_NOT_STARTED\n\n def callback(self, statusCode, userdata):\n self.status_code = self.request.wait(ie.WaitMode.STATUS_ONLY)\n self.cv.acquire()\n self.cv.notify()\n self.cv.release()\n\n def execute(self, input_data):\n self.request.async_infer(input_data)\n self.cv.acquire()\n self.cv.wait()\n self.cv.release()\n status = self.request.wait(ie.WaitMode.RESULT_READY)\n assert status == ie.StatusCode.OK\n assert self.status_code == ie.StatusCode.RESULT_NOT_READY\n\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n img = read_image()\n request_wrap = InferReqWrap(exec_net.requests[0])\n request_wrap.execute({'data': img})\n del exec_net\n del ie_core\n\n\ndef test_async_infer_wait_while_callback_will_not_finish(device):\n def callback(status, callback_status):\n time.sleep(0.01)\n callback_status['finished'] = True\n\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n callback_status = {}\n callback_status['finished'] = False\n request = exec_net.requests[0]\n request.set_completion_callback(callback, py_data=callback_status)\n img = read_image()\n request.async_infer({'data': img})\n request.wait()\n assert callback_status['finished'] == True\n\n\ndef test_get_perf_counts(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n ie_core.set_config({\"PERF_COUNT\": \"YES\"}, device)\n exec_net = ie_core.load_network(net, device)\n img = read_image()\n request = exec_net.requests[0]\n request.infer({'data': img})\n pc = request.get_perf_counts()\n assert pc['29'][\"status\"] == \"EXECUTED\"\n del exec_net\n del ie_core\n del net\n\n\[email protected](os.environ.get(\"TEST_DEVICE\", \"CPU\") != \"CPU\",\n reason=f\"Can't run test on device {os.environ.get('TEST_DEVICE', 'CPU')}, \"\n \"Dynamic batch fully supported only on CPU\")\ndef test_set_batch_size(device):\n ie_core = ie.IECore()\n if ie_core.get_metric(device, \"FULL_DEVICE_NAME\") == \"arm_compute::NEON\":\n pytest.skip(\"Can't run on ARM plugin due-to dynamic batch isn't supported\")\n ie_core.set_config({\"DYN_BATCH_ENABLED\": \"YES\"}, device)\n net = ie_core.read_network(test_net_xml, test_net_bin)\n net.batch_size = 10\n data = np.zeros(shape=net.input_info['data'].input_data.shape)\n exec_net = ie_core.load_network(net, device)\n data[0] = read_image()[0]\n request = exec_net.requests[0]\n request.set_batch(1)\n request.infer({'data': data})\n assert np.allclose(int(round(request.output_blobs['fc_out'].buffer[0][2])), 1), \"Incorrect data for 1st batch\"\n del exec_net\n del ie_core\n del net\n\n\ndef test_set_zero_batch_size(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n request = exec_net.requests[0]\n with pytest.raises(ValueError) as e:\n request.set_batch(0)\n assert \"Batch size should be positive integer number but 0 specified\" in str(e.value)\n del exec_net\n del ie_core\n del net\n\n\ndef test_set_negative_batch_size(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(net, device, num_requests=1)\n request = exec_net.requests[0]\n with pytest.raises(ValueError) as e:\n request.set_batch(-1)\n assert \"Batch size should be positive integer number but -1 specified\" in str(e.value)\n del exec_net\n del ie_core\n del net\n\n\ndef test_blob_setter(device):\n ie_core = ie.IECore()\n if device == \"CPU\":\n if ie_core.get_metric(device, \"FULL_DEVICE_NAME\") == \"arm_compute::NEON\":\n pytest.skip(\"Can't run on ARM plugin\")\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net_1 = ie_core.load_network(network=net, device_name=device, num_requests=1)\n\n net.input_info['data'].layout = \"NHWC\"\n exec_net_2 = ie_core.load_network(network=net, device_name=device, num_requests=1)\n\n img = read_image()\n res_1 = np.sort(exec_net_1.infer({\"data\": img})['fc_out'])\n\n img = np.transpose(img, axes=(0, 2, 3, 1)).astype(np.float32)\n tensor_desc = ie.TensorDesc(\"FP32\", [1, 3, 32, 32], \"NHWC\")\n img_blob = ie.Blob(tensor_desc, img)\n request = exec_net_2.requests[0]\n request.set_blob('data', img_blob)\n request.infer()\n res_2 = np.sort(request.output_blobs['fc_out'].buffer)\n assert np.allclose(res_1, res_2, atol=1e-2, rtol=1e-2)\n\n\ndef test_blob_setter_with_preprocess(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(network=net, device_name=device, num_requests=1)\n\n img = read_image()\n tensor_desc = ie.TensorDesc(\"FP32\", [1, 3, 32, 32], \"NCHW\")\n img_blob = ie.Blob(tensor_desc, img)\n preprocess_info = ie.PreProcessInfo()\n preprocess_info.mean_variant = ie.MeanVariant.MEAN_IMAGE\n\n request = exec_net.requests[0]\n request.set_blob('data', img_blob, preprocess_info)\n pp = request.preprocess_info[\"data\"]\n assert pp.mean_variant == ie.MeanVariant.MEAN_IMAGE\n\n\ndef test_getting_preprocess(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net = ie_core.load_network(network=net, device_name=device, num_requests=1)\n request = exec_net.requests[0]\n preprocess_info = request.preprocess_info[\"data\"]\n assert isinstance(preprocess_info, ie.PreProcessInfo)\n assert preprocess_info.mean_variant == ie.MeanVariant.NONE\n\n\ndef test_resize_algorithm_work(device):\n ie_core = ie.IECore()\n net = ie_core.read_network(test_net_xml, test_net_bin)\n exec_net_1 = ie_core.load_network(network=net, device_name=device, num_requests=1)\n\n img = read_image()\n res_1 = np.sort(exec_net_1.infer({\"data\": img})['fc_out'])\n\n net.input_info['data'].preprocess_info.resize_algorithm = ie.ResizeAlgorithm.RESIZE_BILINEAR\n\n exec_net_2 = ie_core.load_network(net, device)\n\n import cv2\n\n image = cv2.imread(path_to_img)\n if image is None:\n raise FileNotFoundError(\"Input image not found\")\n\n image = image / 255\n image = image.transpose((2, 0, 1)).astype(np.float32)\n image = np.expand_dims(image, 0)\n\n tensor_desc = ie.TensorDesc(\"FP32\", [1, 3, image.shape[2], image.shape[3]], \"NCHW\")\n img_blob = ie.Blob(tensor_desc, image)\n request = exec_net_2.requests[0]\n assert request.preprocess_info[\"data\"].resize_algorithm == ie.ResizeAlgorithm.RESIZE_BILINEAR\n request.set_blob('data', img_blob)\n request.infer()\n res_2 = np.sort(request.output_blobs['fc_out'].buffer)\n\n assert np.allclose(res_1, res_2, atol=1e-2, rtol=1e-2)\n\n\[email protected](\"mode\", [\"set_init_memory_state\", \"reset_memory_state\", \"normal\"])\[email protected](\"data_type\", [\"FP32\", \"FP16\", \"I32\"])\[email protected](\"input_shape\", [[10], [10, 10], [10, 10, 10], [2, 10, 10, 10]])\[email protected](os.environ.get(\"TEST_DEVICE\", \"CPU\") != \"CPU\",\n reason=f\"Can't run test on device {os.environ.get('TEST_DEVICE', 'CPU')}, \"\n \"Memory layers fully supported only on CPU\")\ndef test_query_state_write_buffer(device, input_shape, data_type, mode):\n ie_core = ie.IECore()\n if device == \"CPU\":\n if ie_core.get_metric(device, \"FULL_DEVICE_NAME\") == \"arm_compute::NEON\":\n pytest.skip(\"Can't run on ARM plugin\")\n\n layout = [\"C\", \"HW\", \"CHW\", \"NCHW\"]\n\n from openvino.inference_engine import TensorDesc, Blob, format_map\n\n net = ie.IENetwork(create_function_with_memory(input_shape, format_map[data_type]))\n ie_core = ie.IECore()\n exec_net = ie_core.load_network(network=net, device_name=device, num_requests=1)\n request = exec_net.requests[0]\n mem_states = request.query_state()\n mem_state = mem_states[0]\n\n assert mem_state.name == 'var_id_667'\n # todo: Uncomment after fix 45611,\n # CPU plugin returns outputs and memory state in FP32 in case of FP16 original precision\n #assert mem_state.state.tensor_desc.precision == data_type\n\n for i in range(1, 10):\n if mode == \"set_init_memory_state\":\n # create initial value\n const_init = 5\n init_array = np.full(input_shape, const_init, dtype=format_map[mem_state.state.tensor_desc.precision])\n tensor_desc = TensorDesc(mem_state.state.tensor_desc.precision, input_shape, layout[len(input_shape) - 1])\n blob = Blob(tensor_desc, init_array)\n mem_state.state = blob\n\n res = exec_net.infer({\"input_data\": np.full(input_shape, 1, dtype=format_map[data_type])})\n expected_res = np.full(input_shape, 1 + const_init, dtype=format_map[data_type])\n elif mode == \"reset_memory_state\":\n # reset initial state of ReadValue to zero\n mem_state.reset()\n res = exec_net.infer({\"input_data\": np.full(input_shape, 1, dtype=format_map[data_type])})\n\n # always ones\n expected_res = np.full(input_shape, 1, dtype=format_map[data_type])\n else:\n res = exec_net.infer({\"input_data\": np.full(input_shape, 1, dtype=format_map[data_type])})\n expected_res = np.full(input_shape, i, dtype=format_map[data_type])\n\n assert np.allclose(res['MemoryAdd'], expected_res, atol=1e-6), \\\n \"Expected values: {} \\n Actual values: {} \\n\".format(expected_res, res)\n\n\[email protected]_plugin\ndef test_set_blob_with_incorrect_name():\n function = create_encoder([4, 4, 20, 20])\n net = ng.function_to_cnn(function)\n ie_core = ie.IECore()\n ie_core.register_plugin(\"openvino_template_plugin\", \"TEMPLATE\")\n exec_net = ie_core.load_network(net, \"TEMPLATE\")\n tensor_desc = exec_net.requests[0].input_blobs[\"data\"].tensor_desc\n tensor_desc.dims = [4, 4, 20, 20]\n blob = ie.Blob(tensor_desc)\n with pytest.raises(RuntimeError) as e:\n exec_net.requests[0].set_blob(\"incorrect_name\", blob)\n assert f\"Failed to find input or output with name: 'incorrect_name'\" in str(e.value)\n\n\[email protected]_plugin\ndef test_set_blob_with_incorrect_size():\n function = create_encoder([4, 4, 20, 20])\n net = ng.function_to_cnn(function)\n ie_core = ie.IECore()\n ie_core.register_plugin(\"openvino_template_plugin\", \"TEMPLATE\")\n exec_net = ie_core.load_network(net, \"TEMPLATE\")\n tensor_desc = exec_net.requests[0].input_blobs[\"data\"].tensor_desc\n tensor_desc.dims = [tensor_desc.dims[0]*2, 4, 20, 20]\n blob = ie.Blob(tensor_desc)\n print(exec_net.requests[0].output_blobs)\n with pytest.raises(RuntimeError) as e:\n exec_net.requests[0].set_blob(\"data\", blob)\n assert f\"Input blob size is not equal network input size\" in str(e.value)\n with pytest.raises(RuntimeError) as e:\n exec_net.requests[0].set_blob(\"out\", blob)\n assert f\"Output blob size is not equal network output size\" in str(e.value)\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport numpy as np\n\nfrom openvino.tools.mo.ops.elementwise import Mul, Add\nfrom openvino.tools.mo.front.common.replacement import FrontReplacementOp\nfrom openvino.tools.mo.graph.graph import Graph\nfrom openvino.tools.mo.ops.const import Const\n\n\nclass ImageScaler(FrontReplacementOp):\n op = \"ImageScaler\"\n enabled = True\n\n def replace_sub_graph(self, graph: Graph, match: dict):\n # This replacer replace ImageScalar operation to Mul->Add sequence\n # Also it check that weights and biases are good\n op = match['op']\n\n # Check that weights and biases are not useless\n has_bias, has_weights = True, True\n if all([x == 1 for x in np.nditer(op.scale)]):\n has_weights = False\n if all([x == 0 for x in np.nditer(op.bias)]):\n has_bias = False\n\n assert len(op.in_ports()) == 1\n\n last_port = op.in_port(0).get_source()\n\n # Create Mul & Add nodes\n if has_weights:\n mul_weights = Const(graph, dict(value=op.scale, shape=op.scale.shape)).create_node()\n mul_op = Mul(graph, dict(name=op.id + '/mul_')).create_node()\n op.in_port(0).get_connection().set_destination(mul_op.in_port(0))\n mul_weights.out_port(0).connect(mul_op.in_port(1))\n last_port = mul_op.out_port(0)\n\n if has_bias:\n add_bias = Const(graph, dict(value=op.bias, shape=op.bias.shape)).create_node()\n add_op = Add(graph, dict(name=op.id + '/add_')).create_node()\n last_port.get_connection().set_destination(add_op.in_port(0))\n add_bias.out_port(0).connect(add_op.in_port(1))\n last_port = add_op.out_port(0)\n\n op.in_port(0).disconnect()\n op.out_port(0).get_connection().set_source(last_port)\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport unittest\n\nimport numpy as np\n\nfrom openvino.tools.mo.front.common.partial_infer.roipooling import roipooling_infer\nfrom openvino.tools.mo.graph.graph import Node\nfrom unit_tests.utils.graph import build_graph\n\nnodes_attributes = {'node_1': {'kind': 'data'},\n 'node_2': {'kind': 'data'},\n 'node_3': {'kind': 'data'},\n 'node_4': {'kind': 'data'},\n 'roipool': {'type': 'ROIPooling', 'kind': 'op', 'pooled_h': None, 'pooled_w': None},\n 'output': {'value': None, 'kind': 'data'},\n 'op_output': { 'kind': 'op', 'op': 'Result'},\n }\n\n\nclass TestRoipoolingInfer(unittest.TestCase):\n def test_roipooling_infer_ideal(self):\n graph = build_graph(nodes_attributes,\n [('node_1', 'roipool'),\n ('node_2', 'roipool'),\n ('roipool', 'output'),\n ('output', 'op_output')\n ],\n {'output': {'shape': None},\n 'node_1': {'shape': np.array([1, 256, 20, 20])},\n 'node_2': {'shape': np.array([150, 5])},\n 'roipool': {'pooled_h': 6, 'pooled_w': 6}\n })\n graph.graph['layout'] = 'NCHW'\n roipooling_node = Node(graph, 'roipool')\n\n roipooling_infer(roipooling_node)\n exp_shape = np.array([150, 256, 6, 6])\n res_shape = graph.node['output']['shape']\n for i in range(0, len(exp_shape)):\n self.assertEqual(exp_shape[i], res_shape[i])\n\n def test_roipooling_infer_no_shape(self):\n graph = build_graph(nodes_attributes,\n [('node_1', 'roipool'),\n ('node_2', 'roipool'),\n ('roipool', 'output'),\n ('output', 'op_output')\n ],\n {'output': {'shape': None},\n 'node_1': {'shape': None},\n 'node_2': {'shape': np.array([1, 256])},\n 'roipool': {'pooled_h': 6, 'pooled_w': 6}\n })\n graph.graph['layout'] = 'NCHW'\n\n roipooling_node = Node(graph, 'roipool')\n\n roipooling_infer(roipooling_node)\n self.assertIsNone(graph.node['output']['shape'])\n\n def test_roipooling_infer_tf(self):\n graph = build_graph(nodes_attributes,\n [('node_1', 'roipool'),\n ('node_2', 'roipool'),\n ('node_3', 'roipool'),\n ('node_4', 'roipool'),\n ('roipool', 'output'),\n ('output', 'op_output')\n ],\n {'output': {'shape': None},\n 'node_1': {'shape': np.array([1, 20, 20, 256])},\n 'node_2': {'shape': np.array([150, 5])},\n 'node_3': {'shape': np.array([150])},\n 'node_4': {'shape': np.array([2], dtype=np.int64), 'value': np.array([7, 6],\n dtype=np.int64)},\n })\n graph.graph['layout'] = 'NHWC'\n roipooling_node = Node(graph, 'roipool')\n\n roipooling_infer(roipooling_node)\n exp_shape = np.array([150, 7, 6, 256])\n res_shape = graph.node['output']['shape']\n for i in range(0, len(exp_shape)):\n self.assertEqual(exp_shape[i], res_shape[i])\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport unittest\n\nimport numpy as np\nimport onnx\nfrom generator import generator, generate\n\nimport openvino.tools.mo.front.onnx.activation_ext as extractors\nfrom openvino.tools.mo.ops.activation_ops import Elu\nfrom openvino.tools.mo.graph.graph import Node\nfrom openvino.tools.mo.ops.op import Op\nfrom unit_tests.utils.extractors import PB\nfrom unit_tests.utils.graph import build_graph\n\n\n@generator\nclass ActivationOpsONNXExtractorTest(unittest.TestCase):\n @staticmethod\n def _create_node(op_name: str):\n pb = onnx.helper.make_node(op_name, [\"X\"], [\"Y\"])\n graph = build_graph({'node_0': {'pb': pb}}, [])\n return Node(graph, 'node_0')\n\n @staticmethod\n def _base_attrs(op_name: str):\n # reference output Node attributes\n return (\n dict(\n op=op_name,\n )\n )\n\n def _match(self, out, ref):\n for key in ref.keys():\n status = out[key] == ref[key]\n if type(status) in [list, np.ndarray]:\n status = np.all(status)\n self.assertTrue(status, 'Mismatch for field {}, observed: {}, expected: {}'.format(key, out[key], ref[key]))\n\n @staticmethod\n def _extract(op_name):\n node = __class__._create_node(op_name)\n getattr(extractors, op_name + 'Extractor').extract(node)\n return node.graph.node[node.id]\n\n @generate(*['Abs', 'Acos', 'Asin', 'Atan', 'Acosh', 'Asinh', 'Atanh', 'Cos', 'Cosh', 'Erf', 'Exp', 'Floor', 'Log', 'Not', 'Sigmoid', 'Sin',\n 'Sinh', 'Tan', 'Tanh'])\n def test_default(self, op_name):\n ref = self._base_attrs(op_name)\n if ref['op'] == 'Not':\n ref['op'] = 'LogicalNot'\n out = self._extract(op_name)\n self._match(out, ref)\n\n\n@generator\nclass TestEluONNXExt(unittest.TestCase):\n @staticmethod\n def _create_elu_node(alpha=1.0):\n pb = onnx.helper.make_node(\n 'Elu',\n inputs=['x'],\n outputs=['y'],\n alpha=alpha\n )\n node = PB({'pb': pb})\n return node\n\n @classmethod\n def setUpClass(cls):\n Op.registered_ops['Elu'] = Elu\n\n @generate(*[1.0, 2.0, 3.0])\n def test_elu_ext(self, alpha):\n node = self._create_elu_node(alpha)\n extractors.EluExtractor.extract(node)\n\n exp_res = {\n 'type': 'Elu',\n 'alpha': alpha,\n 'infer': Elu.infer\n }\n\n for key in exp_res.keys():\n self.assertEqual(node[key], exp_res[key])\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport numpy as np\nimport unittest\n\nfrom openvino.tools.mo.front.mxnet.gluoncv_ssd_anchors import SsdAnchorsReplacer\nfrom openvino.tools.mo.front.common.partial_infer.utils import int64_array\nfrom openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs\nfrom unit_tests.utils.graph import build_graph\n\nnodes_attributes = {\n 'slice_like': {'kind': 'op', 'op': 'slice_like'},\n 'model_reshape0': {'kind': 'op', 'op': 'Reshape'},\n 'model_reshape0_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, -1, 4])},\n 'model_reshape1': {'kind': 'op', 'op': 'Reshape'},\n 'model_reshape1_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, -1, 4])},\n 'model_reshape2': {'kind': 'op', 'op': 'Reshape'},\n 'model_reshape2_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, -1])},\n 'reshape0': {'kind': 'op', 'op': 'Reshape'},\n 'reshape0_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, -1])},\n 'concat': {'kind': 'op', 'op': 'Concat'},\n 'reshape1': {'kind': 'op', 'op': 'Reshape'},\n 'reshape1_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, 2, -1])},\n 'split': {'kind': 'op', 'op': 'Split', 'num_splits': 2},\n 'split_const': {'kind': 'op', 'op': 'Const', 'value': int64_array(1)},\n 'reshape2': {'kind': 'op', 'op': 'Reshape'},\n 'reshape2_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([-1, 4])},\n 'value': {'kind': 'op', 'op': 'Split', 'num_splits': 4},\n 'value_const': {'kind': 'op', 'op': 'Const', 'value': int64_array(1)},\n 'div_1': {'kind': 'op', 'op': 'Div'},\n 'div_1_const': {'kind': 'op', 'op': 'Const', 'value': np.array([2], dtype=np.float32)},\n 'div_2': {'kind': 'op', 'op': 'Div'},\n 'div_2_const': {'kind': 'op', 'op': 'Const', 'value': np.array([2], dtype=np.float32)},\n 'xmin': {'kind': 'op', 'op': 'Sub'},\n 'ymin': {'kind': 'op', 'op': 'Sub'},\n 'xmax': {'kind': 'op', 'op': 'Add'},\n 'ymax': {'kind': 'op', 'op': 'Add'},\n 'concat_value': {'kind': 'op', 'op': 'Concat', 'axis': 1},\n 'reshape3': {'kind': 'op', 'op': 'Reshape'},\n 'reshape3_const': {'kind': 'op', 'op': 'Const', 'value': int64_array([1, 1, -1])},\n 'end_concat': {'kind': 'op', 'op': 'Concat'},\n 'detection_output': {'kind': 'op', 'op': 'DetectionOutput'}\n}\n\n\nclass SsdAnchorsReplacerTest(unittest.TestCase):\n\n def test_replacer(self):\n graph = build_graph(\n nodes_attrs=nodes_attributes,\n edges=[\n ('slice_like', 'model_reshape0', {'in': 0}),\n ('model_reshape0_const', 'model_reshape0', {'in': 1}),\n ('model_reshape0', 'model_reshape1', {'in': 0}),\n ('model_reshape1_const', 'model_reshape1', {'in': 1}),\n ('model_reshape1', 'model_reshape2', {'in': 0}),\n ('model_reshape2_const', 'model_reshape2', {'in': 1}),\n ('model_reshape2', 'reshape0', {'in': 0}),\n ('reshape0_const', 'reshape0', {'in': 1}),\n ('reshape0', 'concat'),\n ('concat', 'detection_output', {'in': 2})\n ],\n nodes_with_edges_only=True\n )\n\n ref_graph = build_graph(\n nodes_attrs=nodes_attributes,\n edges=[\n ('slice_like', 'model_reshape0', {'in': 0}),\n ('model_reshape0_const', 'model_reshape0', {'in': 1}),\n ('model_reshape0', 'model_reshape1', {'in': 0}),\n ('model_reshape1_const', 'model_reshape1', {'in': 1}),\n ('model_reshape1', 'model_reshape2', {'in': 0}),\n ('model_reshape2_const', 'model_reshape2', {'in': 1}),\n ('model_reshape2', 'reshape0', {'in': 0}),\n ('reshape0_const', 'reshape0', {'in': 1}),\n ('reshape0', 'concat'),\n ('concat', 'reshape1', {'in': 0}),\n ('reshape1_const', 'reshape1', {'in': 1}),\n ('reshape1', 'split', {'in': 0}),\n ('split_const', 'split', {'in': 1}),\n ('split', 'reshape2', {'out': 0, 'in': 0}),\n ('reshape2_const', 'reshape2', {'in': 1}),\n ('reshape2', 'value', {'in': 0}),\n ('value_const', 'value', {'in': 1}),\n ('value', 'xmin', {'out': 0, 'in': 0}),\n ('value', 'ymin', {'out': 1, 'in': 0}),\n ('value', 'xmax', {'out': 0, 'in': 1}),\n ('value', 'ymax', {'out': 1, 'in': 1}),\n ('value', 'div_1', {'out': 2, 'in': 0}),\n ('value', 'div_2', {'out': 3, 'in': 0}),\n ('div_1_const', 'div_1', {'in': 1}),\n ('div_2_const', 'div_2', {'in': 1}),\n ('div_1', 'xmin', {'in': 1, 'out': 0}),\n ('div_1', 'xmax', {'in': 0, 'out': 0}),\n ('div_2', 'ymin', {'in': 1, 'out': 0}),\n ('div_2', 'ymax', {'in': 0, 'out': 0}),\n ('xmin', 'concat_value', {'in': 0}),\n ('ymin', 'concat_value', {'in': 1}),\n ('xmax', 'concat_value', {'in': 2}),\n ('ymax', 'concat_value', {'in': 3}),\n ('concat_value', 'reshape3', {'in': 0}),\n ('reshape3_const', 'reshape3', {'in': 1}),\n ('reshape3', 'end_concat', {'in': 0}),\n ('split', 'end_concat', {'in': 1}),\n ('end_concat', 'detection_output', {'in': 2})\n ],\n update_attributes={\n 'concat': {'axis': 1}\n },\n nodes_with_edges_only=True\n )\n graph.stage = 'front'\n graph.graph['cmd_params'].data_type = 'FP32'\n SsdAnchorsReplacer().find_and_replace_pattern(graph)\n flag, resp = compare_graphs(graph, ref_graph, 'detection_output', check_op_attrs=True)\n self.assertTrue(flag, resp)\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport logging as log\n\nimport numpy as np\n\nfrom openvino.tools.mo.ops.gather import Gather\nfrom openvino.tools.mo.front.common.partial_infer.utils import int64_array\nfrom openvino.tools.mo.front.tf.graph_utils import create_op_node_with_second_input, create_op_with_const_inputs\nfrom openvino.tools.mo.graph.graph import Graph, rename_node\nfrom openvino.tools.mo.middle.replacement import MiddleReplacementPattern\nfrom openvino.tools.mo.ops.reshape import Reshape\n\n\nclass GatherNDNormalize(MiddleReplacementPattern):\n \"\"\"\n Hot fix for new speech-to-text model enabling while GatherND is not implemented in IE.\n We can replace GatherND to Reshape + Gather in case when GatherND indices have just one\n meaningful dimension.\n TODO: Investigate whether we must replace GatherND with Reshape + Gather always (due to performance benefits)\n for this particular case or only if the plugin does not support GatherND.\n And the best place for the transformation is nGraph so we need to move it.\n \"\"\"\n enabled = True\n force_clean_up = True\n\n def run_before(self):\n from openvino.tools.mo.middle.BlockLSTMtoLSTMSequence import BlockLSTMtoLSTMSequence\n return [BlockLSTMtoLSTMSequence]\n\n def run_after(self):\n from openvino.tools.mo.middle.pass_separator import MiddleStart\n return [MiddleStart]\n\n def pattern(self):\n return dict(\n nodes=[('GatherND', dict(kind='op', op='GatherND', batch_dims=0))],\n edges=[]\n )\n\n @staticmethod\n def indices_check(indices: np.array, input_shape: tuple):\n \"\"\"\n Check that indices have just one meaningful dimension and all other dimensions of input have size 1.\n \"\"\"\n n_dims = indices.shape[-1]\n non_zero = None\n for i in range(n_dims):\n if not all(np.take(indices, indices=[i], axis=-1) == 0):\n if non_zero is None:\n non_zero = i\n else:\n return None\n else:\n if input_shape[i] != 1:\n return None\n return non_zero\n\n def replace_pattern(self, graph: Graph, match: dict):\n gather = match['GatherND']\n gather_name = gather.soft_get('name', gather.id)\n input_shape = gather.in_node(0).shape\n indices = gather.in_node(1).value\n if indices is None:\n # We can't do such special pass without indices value\n return\n\n # 0. All needed checks that we can replace GatherND by Gather\n gather_idx = self.indices_check(indices, input_shape)\n if gather_idx is None:\n log.warning('Node {} with op=GatherND can\\'t be normalized to op=Gather.'.format(gather_name))\n return\n\n # 1. Add Reshape and connect\n new_shape = int64_array([-1] + list(input_shape[indices.shape[-1]:]))\n reshape = create_op_node_with_second_input(graph, Reshape, new_shape,\n {'name': gather_name + '/Reshape_for_GatherND/'})\n gather.in_port(0).get_connection().set_destination(reshape.in_port(0))\n\n # 2. Change indices from Nd to 1d:\n new_indices = np.reshape(np.take(indices, indices=[gather_idx], axis=-1), [-1])\n\n rename_node(gather, gather_name + '/to_delete')\n\n # 3. Create new Gather operation and reconnect all inputs/outputs\n new_gather = create_op_with_const_inputs(graph, Gather, {1: new_indices, 2: int64_array(0)},\n {'name': gather_name})\n rename_node(new_gather, gather_name)\n\n reshape.out_port(0).connect(new_gather.in_port(0))\n\n gather.out_port(0).get_connection().set_source(new_gather.out_port(0))\n\n # 4. Remove old Gather node\n graph.remove_node(gather.id)\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nimport unittest\n\nimport numpy as np\nimport onnx\n\nfrom openvino.tools.mo.front.onnx.priorbox_clustered_ext import PriorBoxClusteredFrontExtractor\nfrom openvino.tools.mo.ops.priorbox_clustered import PriorBoxClusteredOp\nfrom openvino.tools.mo.ops.op import Op\nfrom unit_tests.utils.extractors import PB\n\n\nclass TestPriorBoxClusteredExt(unittest.TestCase):\n @staticmethod\n def _create_priorbox_clustered_node(width=np.array([]), height=np.array([]),\n flip=False, clip=False, variance=None, img_size=0, img_h=0,\n img_w=0, step=0, step_h=0, step_w=0, offset=0):\n pb = onnx.helper.make_node(\n 'PriorBoxClustered',\n inputs=['x'],\n outputs=['y'],\n width=width,\n height=height,\n flip=flip,\n clip=clip,\n variance=variance,\n img_size=img_size,\n img_h=img_h,\n img_w=img_w,\n step=step,\n step_h=step_h,\n step_w=step_w,\n offset=offset,\n )\n\n node = PB({'pb': pb})\n return node\n\n @classmethod\n def setUpClass(cls):\n Op.registered_ops['PriorBoxClustered'] = PriorBoxClusteredOp\n\n def test_priorbox_clustered_no_pb_no_ml(self):\n self.assertRaises(AttributeError, PriorBoxClusteredFrontExtractor.extract, None)\n\n def test_priorbox_clustered_ext_ideal_numbers(self):\n node = self._create_priorbox_clustered_node(width= np.array([2, 3], dtype=np.float),\n height=np.array([4, 5], dtype=np.float),\n variance=np.array([0.2, 0.3, 0.2, 0.3]),\n img_size=300, step=5.0, offset=0.6, flip=True)\n\n PriorBoxClusteredFrontExtractor.extract(node)\n\n exp_res = {\n 'op': 'PriorBoxClustered',\n 'type': 'PriorBoxClustered',\n 'clip': 0,\n 'flip': 1,\n 'width': np.array([2, 3], dtype=np.float),\n 'height': np.array([4, 5], dtype=np.float),\n 'variance': [0.2, 0.3, 0.2, 0.3],\n 'img_size': 300,\n 'img_h': 0,\n 'img_w': 0,\n 'step': 5,\n 'step_h': 0,\n 'step_w': 0,\n 'offset': 0.6\n }\n\n for key in exp_res.keys():\n if key in ['variance', 'width', 'height', 'step_h', 'step_w', 'offset']:\n np.testing.assert_almost_equal(node[key], exp_res[key])\n else:\n self.assertEqual(node[key], exp_res[key])\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\nimport numpy as np\nimport unittest\n\nfrom openvino.tools.mo.back.add_outputs_recursive import AddOutputRecursive\nfrom openvino.tools.mo.ops.If import If\nfrom openvino.tools.mo.ops.loop import Loop\nfrom openvino.tools.mo.ops.tensor_iterator import TensorIterator\nfrom openvino.tools.mo.front.common.partial_infer.elemental import copy_shape_infer\nfrom openvino.tools.mo.front.common.partial_infer.utils import int64_array, dynamic_dimension_value, shape_array\nfrom openvino.tools.mo.graph.graph import Node\nfrom unit_tests.utils.graph import build_graph, regular_op_with_empty_data, result, connect, shaped_parameter, \\\n valued_const_with_data, shaped_const_with_data, regular_op_with_shaped_data\n\n# test for Loop\nmain_graph_nodes = {\n **shaped_parameter(\"IN_1\", [1, 4, 64, 54]),\n **shaped_parameter(\"IN_2\", [1, 4, 64, 54]),\n **valued_const_with_data(\"M\", int64_array([5])),\n **valued_const_with_data(\"cond\", int64_array([1])),\n **regular_op_with_empty_data(\"Loop\", {'op': \"Loop\", 'type': 'Loop', 'sub_graphs': ['body'], \"body\": None,\n 'input_port_map': [{'external_port_id': 1, 'internal_layer_id': 2,\n 'axis': None},\n {'external_port_id': 2, 'internal_layer_id': 0,\n 'axis': None},\n {'external_port_id': 3, 'internal_layer_id': 1,\n 'axis': None}],\n 'output_port_map': [{'external_port_id': 0, 'internal_layer_id': 4,\n 'axis': None},\n {'external_port_id': -1, 'internal_layer_id': 5,\n 'axis': None, 'purpose': \"execution_condition\"}],\n 'back_edges': [{'from_layer': 8, 'to_layer': 7},\n {'from_layer': 10, 'to_layer': 9}],\n 'infer': Loop.infer}),\n **result(\"OUT_1\")\n}\n\nsub_graph_1_nodes = {\n **shaped_parameter(\"IN_2\", int64_array([1, 4, 64, 54]), {'internal_layer_id': 0}),\n **valued_const_with_data(\"M_2\", int64_array([10])),\n **valued_const_with_data(\"cond_2\", int64_array([1])),\n **regular_op_with_empty_data(\"Loop_2\", {'op': \"Loop\", 'type': 'Loop', 'sub_graphs': ['body'], \"body\": None,\n 'input_port_map': [{'external_port_id': 1, 'internal_layer_id': 0,\n 'axis': None},\n {'external_port_id': 2, 'internal_layer_id': 2,\n 'axis': None}],\n 'output_port_map': [{'external_port_id': 0, 'internal_layer_id': 7,\n 'axis': None},\n {'external_port_id': -1, 'internal_layer_id': 6,\n 'axis': None,\n 'purpose': \"execution_condition\"}],\n 'back_edges': [{'from_layer': 1, 'to_layer': 0},\n {'from_layer': 8, 'to_layer': 2}],\n 'infer': Loop.infer}),\n **regular_op_with_empty_data('Loop_2_out', {'op': 'Result', 'type': 'Result', 'infer': lambda x: None,\n 'internal_layer_id': 3}),\n **shaped_parameter(\"in_1_int\", int64_array([1, 4, 64, 54]), {'internal_layer_id': 1}),\n **regular_op_with_empty_data(\"in_1_int_out\",\n {'op': 'Result', 'type': 'Result', 'infer': lambda x: None, 'internal_layer_id': 4}),\n **shaped_parameter(\"cond_1_int\", int64_array([1]), {'internal_layer_id': 2}),\n **regular_op_with_empty_data(\"cond_1_int_out\", {'op': 'Result', 'type': 'Result', 'infer': lambda x: None,\n 'internal_layer_id': 5}),\n}\n\nsub_graph_2_nodes = {\n **shaped_parameter('cond_2_int', [1, 4, 64, 54], {'internal_layer_id': 0}),\n **regular_op_with_empty_data(\"cond_2_int_out\",\n {'op': 'Result', 'type': 'Result', 'infer': lambda x: None, 'internal_layer_id': 8}),\n **shaped_parameter('in_2_int', [1, 4, 64, 54], {'internal_layer_id': 1}),\n **shaped_const_with_data('ones', int64_array([1, 4, 64, 54]), {'internal_layer_id': 9}),\n **regular_op_with_shaped_data('OUT_2', int64_array([1, 4, 64, 54]), {'op': \"Add\", 'infer': copy_shape_infer}),\n **regular_op_with_empty_data('OUT_2_out',\n {'op': 'Result', 'type': 'Result', 'infer': lambda x: None, 'internal_layer_id': 7}),\n **regular_op_with_shaped_data('in_2_int_out', int64_array([1, 4, 64, 54]),\n {'op': 'Result', 'type': 'Result', 'infer': lambda x: None, 'internal_layer_id': 6})\n}\n\n\ndef ti_create_main_graph(body):\n main_graph = build_graph(nodes_attrs=ti_main_graph_nodes,\n edges=[*connect('M', '0:Loop'),\n *connect('cond', '1:Loop'),\n *connect('IN_2', '2:Loop'),\n *connect('IN_1', \"3:Loop\"),\n *connect('Loop:0', 'OUT_1')],\n nodes_with_edges_only=True)\n loop_node = Node(main_graph, 'Loop')\n loop_node.body = body\n loop_node.in_edge(0)['external_port_id'] = 0\n loop_node.in_edge(1)['external_port_id'] = 1\n loop_node.in_edge(2)['external_port_id'] = 2\n loop_node.in_edge(3)['external_port_id'] = 3\n loop_node.out_edge(0)['external_port_id'] = 4\n\n return main_graph\n\n\ndef if_create_main_graph():\n sub_graph_2 = build_graph(nodes_attrs=if_sub_graph_2_then_nodes,\n edges=[*connect('in_2_int', 'OUT_2'),\n *connect('ones', 'OUT_2'),\n *connect('OUT_2', 'OUT_2_out')],\n nodes_with_edges_only=True)\n\n sub_graph_2_else = build_graph(nodes_attrs=if_sub_graph_2_else_nodes,\n edges=[*connect('in_2_int_else', 'OUT_2_else'),\n *connect('ones_else', 'OUT_2_else'),\n *connect('OUT_2_else', 'OUT_2_out_else')],\n nodes_with_edges_only=True)\n\n sub_graph_1 = build_graph(nodes_attrs=if_sub_graph_1_then_nodes,\n edges=[*connect('cond_2', '0:If_2'),\n *connect('IN_2', '1:If_2'),\n *connect('If_2:0', 'If_2_out'),\n *connect('in_1_int', 'in_1_int_out')],\n nodes_with_edges_only=True)\n if_node_1 = Node(sub_graph_1, 'If_2')\n if_node_1.then_graph = sub_graph_2\n if_node_1.else_graph = sub_graph_2_else\n\n return sub_graph_1\n\n\nclass AddOutputRecursiveTest(unittest.TestCase):\n\n def test_add_output_1(self):\n sub_graph_2 = build_graph(nodes_attrs=sub_graph_2_nodes,\n edges=[*connect('cond_2_int', 'cond_2_int_out'),\n *connect('in_2_int', 'OUT_2'),\n *connect('ones', 'OUT_2'),\n *connect('OUT_2', 'OUT_2_out'),\n *connect('in_2_int', 'in_2_int_out')],\n nodes_with_edges_only=True)\n\n sub_graph_1 = build_graph(nodes_attrs=sub_graph_1_nodes,\n edges=[*connect('M_2', '0:Loop_2'),\n *connect('cond_2', '1:Loop_2'),\n *connect('IN_2', '2:Loop_2'),\n *connect('Loop_2:0', 'Loop_2_out'),\n *connect('in_1_int', 'in_1_int_out'),\n *connect('cond_1_int', 'cond_1_int_out')],\n nodes_with_edges_only=True)\n loop_node_1 = Node(sub_graph_1, 'Loop_2')\n loop_node_1.body = sub_graph_2\n\n main_graph = build_graph(nodes_attrs=main_graph_nodes,\n edges=[*connect('M', '0:Loop'),\n *connect('cond', '1:Loop'),\n *connect('IN_2', '2:Loop'),\n *connect('IN_1', \"3:Loop\"),\n *connect('Loop:0', 'OUT_1')],\n nodes_with_edges_only=True)\n loop_node = Node(main_graph, 'Loop')\n loop_node.body = sub_graph_1\n main_graph.graph['additional_outputs'] = ['Loop', 'Loop_2']\n loop_node_1['out_ports_count'] = 2\n loop_node_1.add_output_port(1)\n loop_node_1['output_port_map'].append({'external_port_id': 1, 'internal_layer_id': 8, 'axis': None})\n\n loop_node_output_port_map_len = len(loop_node.output_port_map)\n loop_node_out_ports_len = len(loop_node.out_ports())\n loop_2_out_ports_len = len(loop_node_1.out_ports())\n max_layer_id = 5\n\n results = AddOutputRecursive().find_and_replace_pattern(main_graph)\n\n self.assertEqual(len(results), 2)\n loop_node = Node(main_graph, 'Loop')\n self.assertEqual(len(loop_node.output_port_map), loop_node_output_port_map_len + 2)\n self.assertEqual(len(loop_node.out_ports()), loop_node_out_ports_len + 2)\n self.assertEqual(loop_node.out_port(1).get_destination().node.op, 'Result')\n self.assertTrue(np.all(loop_node.out_port(1).data.get_shape() == int64_array([5, 10, 4, 64, 54])))\n last_node = Node(sub_graph_1, 'Loop_2')\n self.assertEqual(len(last_node.out_ports()), loop_2_out_ports_len)\n unsq_node = last_node.out_port(0).get_destinations()[1].node\n self.assertEqual(unsq_node.op, 'Unsqueeze')\n self.assertEqual(unsq_node.out_port(0).get_destination().node.op, 'Result')\n self.assertEqual(unsq_node.out_port(0).get_destination().node.internal_layer_id, max_layer_id + 3)\n self.assertTrue(np.all(unsq_node.out_port(0).data.get_shape() == int64_array([1, 10, 4, 64, 54])))\n\n\n# test for TensorIterator\nti_main_graph_nodes = {\n **shaped_parameter(\"IN_1\", [1, 4, 64, 54]),\n **shaped_parameter(\"IN_2\", [1, 4, 64, 54]),\n **valued_const_with_data(\"M\", int64_array([5])),\n **valued_const_with_data(\"cond\", int64_array([1])),\n **regular_op_with_empty_data(\"Loop\", {'op': \"TensorIterator\", 'type': 'TensorIterator',\n 'sub_graphs': ['body'], \"body\": None,\n 'input_port_map': [{'external_port_id': 1, 'internal_layer_id': 2, 'axis': None},\n {'external_port_id': 2, 'internal_layer_id': 0, 'axis': None},\n {'external_port_id': 3, 'internal_layer_id': 1, 'axis': None}],\n 'output_port_map': [{'external_port_id': 4, 'internal_layer_id': 4, 'axis': None}],\n 'back_edges': [{'from_layer': 8, 'to_layer': 7},\n {'from_layer': 10, 'to_layer': 9}],\n 'infer': TensorIterator.infer}),\n **result(\"OUT_1\")\n}\n\nti_sub_graph_1_nodes = {\n **shaped_parameter(\"IN_2\", int64_array([1, 4, 64, 54]), {'internal_layer_id': 0}),\n **valued_const_with_data(\"cond_2\", int64_array([1])),\n **regular_op_with_empty_data(\"Loop_2\", {'op': \"TensorIterator\", 'type': 'TensorIterator',\n 'sub_graphs': ['body'], \"body\": None,\n 'input_port_map': [{'external_port_id': 1, 'internal_layer_id': 0, 'axis': None},\n {'external_port_id': 0, 'internal_layer_id': 1, 'axis': 0}],\n 'output_port_map': [{'external_port_id': 2, 'internal_layer_id': 7,\n 'axis': None},\n ],\n 'back_edges': [{'from_layer': 1, 'to_layer': 0},\n {'from_layer': 8, 'to_layer': 2}],\n 'infer': TensorIterator.infer}),\n **regular_op_with_empty_data('Loop_2_out', {'op': 'Result', 'type': 'Result', 'infer': lambda x: None,\n 'internal_layer_id': 3}),\n **shaped_parameter(\"in_1_int\", int64_array([1, 4, 64, 54]), {'internal_layer_id': 1}),\n **regular_op_with_empty_data(\"in_1_int_out\", {'op': 'Result', 'type': 'Result', 'infer': lambda x: None,\n 'internal_layer_id': 4}),\n **shaped_parameter(\"cond_1_int\", int64_array([1]), {'internal_layer_id': 2}),\n **regular_op_with_empty_data(\"cond_1_int_out\", {'op': 'Result', 'type': 'Result', 'infer': lambda x: None,\n 'internal_layer_id': 5}),\n}\n\nti_sub_graph_2_nodes = {\n **shaped_parameter('cond_2_int', [1, 4, 64, 54], {'internal_layer_id': 0}),\n **result(\"cond_2_int_out\"),\n **shaped_parameter('in_2_int', [1, 4, 64, 54], {'internal_layer_id': 1}),\n **shaped_const_with_data('ones', int64_array([1, 4, 64, 54])),\n **regular_op_with_shaped_data('OUT_2', int64_array([1, 4, 64, 54]),\n {'op': \"Add\", 'infer': copy_shape_infer}),\n **regular_op_with_empty_data('OUT_2_out', {'op': 'Result', 'type': 'Result', 'infer': lambda x: None,\n 'internal_layer_id': 7}),\n **regular_op_with_empty_data('in_2_int_out', {'op': 'Result', 'type': 'Result', 'infer': lambda x: None,\n 'internal_layer_id': 6})\n}\n\n\nclass TI_AddOutputRecursiveTest(unittest.TestCase):\n @staticmethod\n def create_graph():\n sub_graph_2 = build_graph(nodes_attrs=ti_sub_graph_2_nodes,\n edges=[*connect('cond_2_int', 'cond_2_int_out'),\n *connect('in_2_int', 'OUT_2'),\n *connect('ones', 'OUT_2'),\n *connect('OUT_2', 'OUT_2_out'),\n *connect('in_2_int', 'in_2_int_out')],\n nodes_with_edges_only=True)\n\n sub_graph_1 = build_graph(nodes_attrs=ti_sub_graph_1_nodes,\n edges=[*connect('cond_2', '1:Loop_2'),\n *connect('IN_2', '0:Loop_2'),\n *connect('Loop_2:0', 'Loop_2_out'),\n *connect('in_1_int', 'in_1_int_out'),\n *connect('cond_1_int', 'cond_1_int_out')],\n nodes_with_edges_only=True)\n loop_node_1 = Node(sub_graph_1, 'Loop_2')\n loop_node_1.body = sub_graph_2\n loop_node_1.in_edge(0)['external_port_id'] = 0\n loop_node_1.in_edge(1)['external_port_id'] = 1\n loop_node_1.out_edge(0)['external_port_id'] = 2\n\n main_graph = ti_create_main_graph(sub_graph_1)\n main_graph.graph['additional_outputs'] = ['Loop', 'Loop_2']\n\n return main_graph, sub_graph_1\n\n def check_body_last_node(self, body, node_id, loop_2_node_out_ports_len):\n last_node = Node(body, node_id)\n max_layer_id = 5\n self.assertEqual(len(last_node.out_ports()), loop_2_node_out_ports_len)\n unsq_node = last_node.out_port(0).get_destinations()[1].node\n self.assertEqual(unsq_node.op, 'Unsqueeze')\n self.assertEqual(unsq_node.out_port(0).get_destination().node.op, 'Result')\n self.assertEqual(unsq_node.out_port(0).get_destination().node.internal_layer_id, max_layer_id + 3)\n self.assertTrue(np.all(unsq_node.out_port(0).data.get_shape() == int64_array([1, 1, 4, 64, 54])))\n\n def check_loop_node(self, graph, node_id, port_map_len, out_ports_len):\n loop_node = Node(graph, node_id)\n self.assertEqual(len(loop_node.output_port_map), port_map_len + 1)\n self.assertEqual(len(loop_node.out_ports()), out_ports_len + 1)\n self.assertEqual(loop_node.out_port(1).get_destination().node.op, 'Result')\n\n def test_add_output_1(self):\n main_graph, sub_graph_1 = self.create_graph()\n\n loop_node = Node(main_graph, 'Loop')\n loop_node_output_port_map_len = len(loop_node.output_port_map)\n loop_node_out_ports_len = len(loop_node.out_ports())\n loop_node_2 = Node(sub_graph_1, 'Loop_2')\n loop_2_node_out_ports_len = len(loop_node_2.out_ports())\n\n AddOutputRecursive().find_and_replace_pattern(main_graph)\n\n self.check_loop_node(main_graph, 'Loop', loop_node_output_port_map_len, loop_node_out_ports_len)\n self.assertTrue(np.all(loop_node.out_port(1).data.get_shape() == int64_array([1, 1, 4, 64, 54])))\n self.check_body_last_node(sub_graph_1, 'Loop_2', loop_2_node_out_ports_len)\n\n def test_add_output_dynamic(self):\n main_graph, sub_graph_1 = self.create_graph()\n\n loop_node = Node(main_graph, 'Loop')\n loop_node_output_port_map_len = len(loop_node.output_port_map)\n loop_node_out_ports_len = len(loop_node.out_ports())\n loop_node_2 = Node(sub_graph_1, 'Loop_2')\n loop_2_node_out_ports_len = len(loop_node_2.out_ports())\n\n loop_node.input_port_map[2]['axis'] = 1\n loop_node.input_port_map[2]['start'] = 0\n loop_node.input_port_map[2]['end'] = -1\n loop_node.input_port_map[2]['stride'] = 1\n in_1_node = Node(main_graph, 'IN_1')\n in_1_node['shape'] = shape_array([1, dynamic_dimension_value, 64, 54])\n\n AddOutputRecursive().find_and_replace_pattern(main_graph)\n\n self.check_loop_node(main_graph, 'Loop', loop_node_output_port_map_len, loop_node_out_ports_len)\n self.assertTrue(np.all(loop_node.out_port(1).data.get_shape() ==\n shape_array([dynamic_dimension_value, 1, 4, 64, 54])))\n self.check_body_last_node(sub_graph_1, 'Loop_2', loop_2_node_out_ports_len)\n\n def test_add_output_several_iterations(self):\n main_graph, sub_graph_1 = self.create_graph()\n\n loop_node = Node(main_graph, 'Loop')\n loop_node_output_port_map_len = len(loop_node.output_port_map)\n loop_node_out_ports_len = len(loop_node.out_ports())\n loop_node_2 = Node(sub_graph_1, 'Loop_2')\n loop_2_node_out_ports_len = len(loop_node_2.out_ports())\n\n loop_node.input_port_map[2]['axis'] = 1\n loop_node.input_port_map[2]['start'] = 0\n loop_node.input_port_map[2]['end'] = -1\n loop_node.input_port_map[2]['stride'] = 1\n loop_node.output_port_map[0]['axis'] = 1\n loop_node.output_port_map[0]['start'] = 0\n loop_node.output_port_map[0]['end'] = 10\n loop_node.output_port_map[0]['stride'] = 2\n\n AddOutputRecursive().find_and_replace_pattern(main_graph)\n\n self.check_loop_node(main_graph, 'Loop', loop_node_output_port_map_len, loop_node_out_ports_len)\n self.assertTrue(np.all(loop_node.out_port(1).data.get_shape() == shape_array([4, 1, 4, 64, 54])))\n self.assertTrue(np.all(loop_node.out_port(0).data.get_shape() == shape_array([1, 5, 64, 54])))\n self.check_body_last_node(sub_graph_1, 'Loop_2', loop_2_node_out_ports_len)\n\n def test_add_output_several_iterations_wo_start_end(self):\n main_graph, sub_graph_1 = self.create_graph()\n\n loop_node = Node(main_graph, 'Loop')\n loop_node_output_port_map_len = len(loop_node.output_port_map)\n loop_node_out_ports_len = len(loop_node.out_ports())\n loop_node.input_port_map[2]['axis'] = 1\n loop_node.input_port_map[2]['stride'] = 1\n\n loop_node_2 = Node(sub_graph_1, 'Loop_2')\n loop_2_node_out_ports_len = len(loop_node_2.out_ports())\n\n AddOutputRecursive().find_and_replace_pattern(main_graph)\n\n self.check_loop_node(main_graph, 'Loop', loop_node_output_port_map_len, loop_node_out_ports_len)\n self.assertTrue(np.all(loop_node.out_port(1).data.get_shape() == shape_array([4, 1, 4, 64, 54])))\n self.check_body_last_node(sub_graph_1, 'Loop_2', loop_2_node_out_ports_len)\n\n def test_add_output_several_iterations_negative_end(self):\n main_graph, sub_graph_1 = self.create_graph()\n\n loop_node = Node(main_graph, 'Loop')\n loop_node_output_port_map_len = len(loop_node.output_port_map)\n loop_node_out_ports_len = len(loop_node.out_ports())\n loop_node_2 = Node(sub_graph_1, 'Loop_2')\n loop_2_node_out_ports_len = len(loop_node_2.out_ports())\n\n loop_node.input_port_map[2]['axis'] = 1\n loop_node.input_port_map[2]['start'] = 0\n loop_node.input_port_map[2]['end'] = -3\n loop_node.input_port_map[2]['stride'] = 1\n loop_node.output_port_map[0]['axis'] = 1\n loop_node.output_port_map[0]['start'] = 0\n loop_node.output_port_map[0]['end'] = -1\n loop_node.output_port_map[0]['stride'] = 2\n\n AddOutputRecursive().find_and_replace_pattern(main_graph)\n\n self.check_loop_node(main_graph, 'Loop', loop_node_output_port_map_len, loop_node_out_ports_len)\n self.assertTrue(np.all(loop_node.out_port(1).data.get_shape() == shape_array([2, 1, 4, 64, 54])))\n self.assertTrue(np.all(loop_node.out_port(0).data.get_shape() == shape_array([1, 2, 64, 54])))\n self.check_body_last_node(sub_graph_1, 'Loop_2', loop_2_node_out_ports_len)\n\n def test_add_output_several_iterations_negative_stride(self):\n main_graph, sub_graph_1 = self.create_graph()\n\n loop_node = Node(main_graph, 'Loop')\n\n loop_node_output_port_map_len = len(loop_node.output_port_map)\n loop_node_out_ports_len = len(loop_node.out_ports())\n loop_node_2 = Node(sub_graph_1, 'Loop_2')\n loop_2_node_out_ports_len = len(loop_node_2.out_ports())\n\n loop_node.input_port_map[2]['axis'] = 1\n loop_node.input_port_map[2]['start'] = -1\n loop_node.input_port_map[2]['end'] = 0\n loop_node.input_port_map[2]['stride'] = -2\n loop_node.output_port_map[0]['axis'] = 1\n loop_node.output_port_map[0]['start'] = 0\n loop_node.output_port_map[0]['end'] = -1\n loop_node.output_port_map[0]['stride'] = 2\n\n AddOutputRecursive().find_and_replace_pattern(main_graph)\n\n self.check_loop_node(main_graph, 'Loop', loop_node_output_port_map_len, loop_node_out_ports_len)\n self.assertTrue(np.all(loop_node.out_port(1).data.get_shape() == shape_array([2, 1, 4, 64, 54])))\n self.assertTrue(np.all(loop_node.out_port(0).data.get_shape() == shape_array([1, 2, 64, 54])))\n self.check_body_last_node(sub_graph_1, 'Loop_2', loop_2_node_out_ports_len)\n\n def test_add_output_several_iterations_negative_start_end_input(self):\n main_graph, sub_graph_1 = self.create_graph()\n\n loop_node = Node(main_graph, 'Loop')\n loop_node_output_port_map_len = len(loop_node.output_port_map)\n loop_node_out_ports_len = len(loop_node.out_ports())\n loop_node_2 = Node(sub_graph_1, 'Loop_2')\n loop_2_node_out_ports_len = len(loop_node_2.out_ports())\n\n loop_node.input_port_map[2]['axis'] = 1\n loop_node.input_port_map[2]['start'] = -1\n loop_node.input_port_map[2]['end'] = -4\n loop_node.input_port_map[2]['stride'] = -2\n loop_node.output_port_map[0]['axis'] = 1\n loop_node.output_port_map[0]['start'] = 0\n loop_node.output_port_map[0]['end'] = -1\n loop_node.output_port_map[0]['stride'] = 2\n\n AddOutputRecursive().find_and_replace_pattern(main_graph)\n\n self.check_loop_node(main_graph, 'Loop', loop_node_output_port_map_len, loop_node_out_ports_len)\n self.assertTrue(np.all(loop_node.out_port(1).data.get_shape() == shape_array([2, 1, 4, 64, 54])))\n self.assertTrue(np.all(loop_node.out_port(0).data.get_shape() == shape_array([1, 2, 64, 54])))\n self.check_body_last_node(sub_graph_1, 'Loop_2', loop_2_node_out_ports_len)\n\n def test_add_output_several_iterations_negative_start_end_output(self):\n main_graph, sub_graph_1 = self.create_graph()\n\n loop_node = Node(main_graph, 'Loop')\n loop_node_output_port_map_len = len(loop_node.output_port_map)\n loop_node_out_ports_len = len(loop_node.out_ports())\n loop_node_2 = Node(sub_graph_1, 'Loop_2')\n loop_2_node_out_ports_len = len(loop_node_2.out_ports())\n\n loop_node.input_port_map[2]['axis'] = 1\n loop_node.input_port_map[2]['start'] = -1\n loop_node.input_port_map[2]['end'] = -4\n loop_node.input_port_map[2]['stride'] = -2\n loop_node.output_port_map[0]['axis'] = 1\n loop_node.output_port_map[0]['start'] = -4\n loop_node.output_port_map[0]['end'] = -1\n loop_node.output_port_map[0]['stride'] = 1\n\n AddOutputRecursive().find_and_replace_pattern(main_graph)\n\n self.check_loop_node(main_graph, 'Loop', loop_node_output_port_map_len, loop_node_out_ports_len)\n self.assertTrue(np.all(loop_node.out_port(1).data.get_shape() == shape_array([2, 1, 4, 64, 54])))\n self.assertTrue(np.all(loop_node.out_port(0).data.get_shape() == shape_array([1, 3, 64, 54])))\n self.check_body_last_node(sub_graph_1, 'Loop_2', loop_2_node_out_ports_len)\n\n\n# test for If\nif_main_graph_nodes = {\n **shaped_parameter(\"IN_1\", [1, 4, 64, 54]),\n **shaped_parameter(\"IN_2\", [1, 4, 64, 54]),\n **valued_const_with_data(\"cond\", int64_array([1])),\n **regular_op_with_empty_data(\"If\", {'op': \"If\", 'type': 'If', 'sub_graphs': ['then_graph', 'else_graph'],\n \"then_graph\": None, 'else_graph': None, 'infer': If.infer}),\n **result(\"OUT_1\")\n}\n\nif_sub_graph_1_then_nodes = {\n **shaped_parameter(\"IN_2\", int64_array([1, 4, 64, 54]), {'input_id': 2}),\n **valued_const_with_data(\"cond_2\", int64_array([1])),\n **regular_op_with_empty_data(\"If_2\", {'op': \"If\", 'type': 'If', 'sub_graphs': ['then_graph', 'else_graph'],\n \"then_graph\": None, 'else_graph': None, 'infer': If.infer}),\n **regular_op_with_empty_data('If_2_out', {'op': 'Result', 'type': 'Result', 'infer': lambda x: None}),\n **shaped_parameter(\"in_1_int\", int64_array([1, 4, 64, 54]), {'input_id': 1}),\n **regular_op_with_empty_data(\"in_1_int_out\", {'op': 'Result', 'type': 'Result', 'output_id': 0})\n}\n\nif_sub_graph_1_else_nodes = {\n **shaped_parameter(\"in_1_int\", int64_array([1, 4, 64, 54]), {'input_id': 1}),\n **regular_op_with_empty_data(\"in_1_int_out\", {'op': 'Result', 'type': 'Result', 'output_id': 0})\n}\n\nif_sub_graph_2_then_nodes = {\n **shaped_parameter('in_2_int', [1, 4, 64, 54], {'input_id': 1}),\n **shaped_const_with_data('ones', int64_array([1, 4, 64, 54])),\n **regular_op_with_shaped_data('OUT_2', int64_array([1, 4, 64, 54]), {'op': \"Add\"}),\n **regular_op_with_empty_data('OUT_2_out', {'op': 'Result', 'type': 'Result', 'output_id': 0}),\n}\n\nif_sub_graph_2_else_nodes = {\n **shaped_parameter('in_2_int_else', [1, 4, 64, 54], {'input_id': 1}),\n **shaped_const_with_data('ones_else', int64_array([1, 4, 64, 54])),\n **regular_op_with_shaped_data('OUT_2_else', int64_array([1, 4, 64, 54]), {'op': \"Sub\"}),\n **regular_op_with_empty_data('OUT_2_out_else', {'op': 'Result', 'type': 'Result', 'output_id': 0}),\n}\n\n\nclass IF_AddOutputRecursiveTest(unittest.TestCase):\n def test_add_output_1(self):\n sub_graph_1 = if_create_main_graph()\n if_node_1 = Node(sub_graph_1, 'If_2')\n\n sub_graph_1_else = build_graph(nodes_attrs=if_sub_graph_1_else_nodes,\n edges=[*connect('in_1_int', 'in_1_int_out')],\n nodes_with_edges_only=True)\n\n main_graph = build_graph(nodes_attrs=if_main_graph_nodes,\n edges=[*connect('cond', '0:If'),\n *connect('IN_1', '1:If'),\n *connect('IN_2', \"2:If\"),\n *connect('If:0', 'OUT_1')],\n nodes_with_edges_only=True)\n if_node = Node(main_graph, 'If')\n if_node.then_graph = sub_graph_1\n if_node.else_graph = sub_graph_1_else\n if_node_out_ports_len = len(if_node.out_ports())\n if_2_node_out_ports_len = len(if_node_1.out_ports())\n\n main_graph.graph['additional_outputs'] = ['If', ['If_2', 'in_1_int']]\n\n AddOutputRecursive().find_and_replace_pattern(main_graph)\n if_node = Node(main_graph, 'If')\n self.assertEqual(len(if_node.out_ports()), if_node_out_ports_len + 1)\n self.assertEqual(if_node.out_port(1).get_destination().node.op, 'Result')\n self.assertTrue(np.all(if_node.out_port(1).data.get_shape() == int64_array([1, 4, 64, 54])))\n last_node = Node(sub_graph_1, 'If_2')\n self.assertEqual(len(last_node.out_ports()), if_2_node_out_ports_len)\n self.assertEqual(last_node.out_port(0).get_destinations()[1].node.op, 'Result')\n self.assertTrue(np.all(last_node.out_port(0).data.get_shape() == int64_array([1, 4, 64, 54])))\n\n\nclass SplitUserPathTest(unittest.TestCase):\n\n @staticmethod\n def create_graph():\n sub_graph_1 = if_create_main_graph()\n out_node = Node(sub_graph_1, 'If_2_out')\n out_node['internal_layer_id'] = 4\n\n main_graph = ti_create_main_graph(sub_graph_1)\n\n return main_graph\n\n def test_linear_graph_change(self):\n graph = self.create_graph()\n path = ['Loop', 'in_1_int']\n ref_path = []\n loop_node = Node(graph, 'Loop')\n ref_path.append({'node': loop_node, 'graph': graph})\n ref_path.append({'node': Node(loop_node.body, 'in_1_int'), 'graph': loop_node.body})\n\n tracks = AddOutputRecursive().split_path_to_simple_tracks(graph, path)\n\n self.assertTrue(np.all(tracks[0] == ref_path))\n\n def test_1_if_graph_change(self):\n graph = self.create_graph()\n path = ['Loop', 'If_2', ['OUT_2', 'OUT_2_else']]\n ref_path = [[]]\n loop_node = Node(graph, 'Loop')\n ref_path[0].append({'node': loop_node, 'graph': graph})\n if_node = Node(loop_node.body, 'If_2')\n ref_path[0].append({'node': if_node, 'graph': loop_node.body})\n ref_path.append([])\n ref_path[1] = ref_path[0][:]\n ref_path[0].append({'node': Node(if_node.then_graph, 'OUT_2'), 'graph': if_node.then_graph})\n ref_path[1].append({'node': Node(if_node.else_graph, 'OUT_2_else'), 'graph': if_node.else_graph})\n\n tracks = AddOutputRecursive().split_path_to_simple_tracks(graph, path)\n\n self.assertTrue(np.all(tracks[0] == ref_path[0]))\n self.assertTrue(np.all(tracks[1] == ref_path[1]))\n\n def test_1_if_graph_change_add_output(self):\n graph = self.create_graph()\n graph.graph['additional_outputs'] = ['Loop', 'If_2', ['OUT_2', 'OUT_2_else']]\n\n AddOutputRecursive().find_and_replace_pattern(graph)\n\n loop_node = Node(graph, 'Loop')\n if_node = Node(loop_node.body, 'If_2')\n left_node = Node(if_node.then_graph, 'OUT_2')\n right_node = Node(if_node.else_graph, 'OUT_2_else')\n self.assertEqual(len(left_node.out_port(0).get_destinations()), 2)\n self.assertEqual(left_node.out_port(0).get_destinations()[1].node.op, 'Result')\n\n self.assertEqual(len(right_node.out_port(0).get_destinations()), 2)\n self.assertEqual(right_node.out_port(0).get_destinations()[1].node.op, 'Result')\n\n self.assertTrue(len(if_node.out_ports()), 2)\n self.assertTrue(if_node.out_port(1).get_destination().node.op, 'Result')\n\n self.assertTrue(len(loop_node.out_ports()), 2)\n self.assertTrue(loop_node.out_port(1).get_destination().node.op, 'Result')\n", "# Copyright (C) 2018-2022 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\n#\n# relu paddle model generator\n#\nimport os.path\n\nimport sys\n\nimport os\nimport numpy as np\nimport paddle\n\n\n# print numpy array like C structure\ndef print_alike(arr):\n shape = arr.shape\n rank = len(shape)\n\n # print(\"shape: \", shape, \"rank: %d\" %(rank))\n\n # for idx, value in np.ndenumerate(arr):\n # print(idx, value)\n\n def print_array(arr, end=' '):\n shape = arr.shape\n rank = len(arr.shape)\n if rank > 1:\n line = \"{\"\n for i in range(arr.shape[0]):\n line += print_array(arr[i, :], end=\"},\\n\" if i < arr.shape[0] - 1 else \"}\")\n line += end\n return line\n else:\n line = \"{\"\n for i in range(arr.shape[0]):\n line += \"{:.2f}\".format(arr[i]) # str(arr[i])\n line += \", \" if i < shape[0] - 1 else ' '\n line += end\n # print(line)\n return line\n\n print(print_array(arr, \"}\"))\n\n\ndef saveModel(name, exe, feedkeys: list, fetchlist: list, inputs: list, outputs: list, target_dir: str):\n model_dir = os.path.join(target_dir, name)\n if not os.path.exists(model_dir):\n os.makedirs(model_dir)\n\n print(\"\\n\\n------------- %s -----------\\n\" % (name))\n for i, input in enumerate(inputs):\n print(\"INPUT %s :\" % (feedkeys[i]), input.shape, input.dtype, \"\\n\")\n print_alike(input)\n np.save(os.path.join(model_dir, \"input{}\".format(i)), input)\n np.save(os.path.join(model_dir, \"input{}.{}.{}\".format(i, feedkeys[i], input.dtype)), input)\n print(\"\\n\")\n\n for i, output in enumerate(outputs):\n print(\"OUTPUT %s :\" % (fetchlist[i]), output.shape, output.dtype, \"\\n\")\n print_alike(output)\n np.save(os.path.join(model_dir, \"output{}\".format(i)), output)\n\n # composited model + scattered model\n paddle.fluid.io.save_inference_model(model_dir, feedkeys, fetchlist, exe)\n paddle.fluid.io.save_inference_model(model_dir, feedkeys, fetchlist, exe, model_filename=name + \".pdmodel\",\n params_filename=name + \".pdiparams\")\n\n\ndef relu(name: str, x):\n import paddle\n paddle.enable_static()\n\n node_x = paddle.static.data(name='x', shape=x.shape, dtype='float32')\n out = paddle.nn.functional.relu(node_x)\n\n cpu = paddle.static.cpu_places(1)\n exe = paddle.static.Executor(cpu[0])\n # startup program will call initializer to initialize the parameters.\n exe.run(paddle.static.default_startup_program())\n\n outs = exe.run(\n feed={'x': x},\n fetch_list=[out])\n\n saveModel(name, exe, feedkeys=['x'], fetchlist=[out],\n inputs=[x], outputs=[outs[0]], target_dir=sys.argv[1])\n\n return outs[0]\n\n\ndef main():\n data = np.array([-2, 0, 1]).astype('float32')\n\n relu(\"relu_unsupported\", data)\n\n with open(os.path.join(sys.argv[1], \"relu_unsupported\", \"relu_unsupported.pdmodel\"), mode='rb') as file:\n modelContent = file.read()\n\n modelContent = modelContent.replace(b\"relu\", b\"rxyz\")\n\n with open(os.path.join(sys.argv[1], \"relu_unsupported\", \"relu_unsupported.pdmodel\"), mode='wb') as file:\n file.write(modelContent)\n\n\nif __name__ == \"__main__\":\n main()" ]
[ [ "numpy.random.randn" ], [ "numpy.array_equal" ], [ "numpy.array" ], [ "numpy.full" ], [ "numpy.all" ], [ "numpy.reshape" ], [ "numpy.array" ], [ "numpy.ones" ], [ "numpy.uint32", "numpy.bool", "numpy.uint8", "numpy.float16", "numpy.int32", "numpy.int8", "numpy.int16", "numpy.int64", "numpy.uint16", "numpy.uint64", "numpy.float32", "numpy.str", "numpy.double" ], [ "numpy.random.rand" ], [ "numpy.array" ], [ "numpy.array_equal" ], [ "numpy.expand_dims", "numpy.allclose", "numpy.array_equal", "numpy.sort", "numpy.ones", "numpy.full", "numpy.argmax", "numpy.transpose", "numpy.zeros" ], [ "numpy.nditer" ], [ "numpy.array" ], [ "numpy.all" ], [ "numpy.array" ], [ "numpy.take" ], [ "numpy.testing.assert_almost_equal", "numpy.array" ], [ "numpy.all" ], [ "numpy.array" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
hades208002/mdp-project
[ "c242a8d00412cc3772d298986977f6acc47002ee", "c242a8d00412cc3772d298986977f6acc47002ee" ]
[ "client_server_test/NEWGUI.py", "client_server_test/LocalModel.py" ]
[ "from tkinter import *\nfrom tkinter import ttk\nimport tkinter.filedialog as fd\nimport pandas as pd\nfrom LocalModelCommunication import LocalModelCommunication\nfrom APP import APP\n\nclass GUI(object):\n\tdef __init__(self):\n\t\t# overall\n\t\tself.tabControl = None\n\t\tself.tab_step1 = None\n\t\tself.tab_step2 = None\n\t\tself.tab_step3 = None\n\t\tself.tab_step4 = None\n\t\tself.dataframe = None\n\t\tself.img_wait = PhotoImage(file='test.GIF')\n\n\t\t# 1 step\n\t\tself.fname = None\n\t\tself.data = None\n\t\tself.features = None\n\t\tself.import_lable = None\n\t\tself.import_label_text = StringVar()\n\t\tself.import_label_text.set(' ')\n\n\t\t# 2 step\n\t\tself.required = ['RR', 'QTm_old', 'sbjBeatConsidered', 'numRRaveraged', 'QR', 'QTn', 'QRS', 'IPG',\n\t\t\t\t\t\t\t\t\t'PQ', 'PCpos', 'PCneg', 'patsex', 'AFclass', 'Age']\n\t\tself.required_ordered = []\n\t\ti = 0\n\t\tfor item in self.required:\n\t\t\tself.required_ordered.append(str(i) + ': ' + item)\n\t\t\ti = i + 1\n\t\tself.leftbox = StringVar()\n\t\tself.rightbox = StringVar()\n\t\tself.rrightbox = StringVar()\n\t\tself.list_left = None\n\t\tself.list_right = None\n\t\tself.list_rright = None\n\n\t\t# 3 step\n\t\tself.model_label = None\n\t\tself.model_label_text = StringVar()\n\t\tself.model_label_text.set('Waiting for model training...')\n\t\tself.img_gif = PhotoImage(file='img.GIF')\n\t\t\n\n\t\t# 4 step\n\t\tself.connect_label = None\n\t\tself.connect_label_text = StringVar()\n\t\tself.connect_label_text.set('Waiting for central server response...')\n\n\t\t# 5 step\n\n\t# help functions\n\tdef add_tab(self, tabControl, tab_name):\n\t\ttab = ttk.Frame(tabControl) # Create a tab\n\t\ttabControl.add(tab, text=tab_name)\n\t\treturn tab\n\n\t# Callback functions\n\t## step 1\n\tdef get_csv(self): # open file system\n\t\tself.fname = fd.askopenfilename(filetypes=[(\".csv file\", \".csv\")])\n\t\tself.data = pd.read_csv(self.fname, delimiter=',')\n\t\tself.features = self.data.columns\n\n\t\tself.import_label_text.set('Import data from: ' + self.fname + '\\n' + str(self.features))\n\t\tself.import_lable.pack(side=TOP)\n\tdef go_next_step2(self):\n\t\tself.tab_step2 = self.add_tab(self.tabControl, \"Step 2: Match Features\")\n\t\tself.tab_match(self.tab_step2)\n\t\tself.tabControl.select(self.tab_step2)\n\t\tself.tabControl.forget(self.tab_step1)\n\t## step 2\n\tdef move_to_right(self):\n\n\t\tself.list_right.insert(END,\n\t\t\t\t\t\t\t str(self.list_right.size()) + ': ' + self.list_left.get(self.list_left.curselection()))\n\t\tself.list_left.delete(self.list_left.curselection())\n\tdef move_to_left(self):\n\t\tcontent = self.list_right.get(self.list_right.curselection())\n\t\tcontents = content.split(': ')\n\t\tself.list_left.insert(END, contents[1])\n\t\tself.list_right.delete(self.list_right.curselection())\n\tdef add_nan(self):\n\t\tself.list_right.insert(END, str(self.list_right.size()) + ': ' + 'NAN')\n\tdef go_next_step3(self):\n\t\t# prepare dataframe for localmodel\n\t\tcolumns = []\n\t\tcontents = self.rightbox.get()\n\t\tcontents = contents.replace('(', '')\n\t\tcontents = contents.replace(')', '')\n\t\tcontents = contents.replace(\"'\", '')\n\t\titem_list = contents.split(', ')\n\t\tfor item in item_list:\n\t\t\tcontent = item.split(': ')[1]\n\t\t\tif content != 'NAN':\n\t\t\t\tcolumns.append(content)\n\n\t\tself.dataframe = self.data[columns]\n\t\tprint(self.dataframe.head(2))\n\t\tself.tab_step3 = self.add_tab(self.tabControl, \"Step 3: Train Model\")\n\t\t# render tab3\n\t\tself.tab_model(self.tab_step3)\n\t\tself.tabControl.select(self.tab_step3)\n\t\tself.tabControl.forget(self.tab_step2)\n\tdef go_back_step1(self):\n\t\tself.tab_step1 = self.add_tab(self.tabControl, \"Step 1: Import Data\")\n\t\t# render tab1\n\t\tself.tab_import(self.tab_step1, self.tabControl)\n\t\tself.tabControl.select(self.tab_step1)\n\t\tself.tabControl.forget(self.tab_step2)\n\t## step 3\n\tdef go_next_step4(self):\n\t\tself.tab_step4 = self.add_tab(self.tabControl, \"Step 4: Connect to Central Server\")\n\t\t# render tab4\n\t\tself.tab_connect(self.tab_step4)\n\t\tself.tabControl.select(self.tab_step4)\n\t\tself.tabControl.forget(self.tab_step3)\n\tdef go_back_step2(self):\n\t\tself.tab_step2 = self.add_tab(self.tabControl, \"Step 2: Match Features\")\n\t\t# render tab2\n\t\tself.tab_match(self.tab_step2)\n\t\tself.tabControl.select(self.tab_step2)\n\t\tself.tabControl.forget(self.tab_step3)\n\t## step 4\n\tdef go_next_step5(self):\n\t\tself.tab_step5 = self.add_tab(self.tabControl, \"Step 5: Wait for Prediction Call\")\n\t\t# render tab5\n\t\tself.tab_wait(self.tab_step5)\n\t\tself.tabControl.select(self.tab_step5)\n\t\tself.tabControl.forget(self.tab_step4)\n\tdef go_back_step3(self):\n\t\tself.tab_step3 = self.add_tab(self.tabControl, \"Step 3: Train Model\")\n\t\t# render tab3\n\t\tself.tab_model(self.tab_step3)\n\t\tself.tabControl.select(self.tab_step3)\n\t\tself.tabControl.forget(self.tab_step4)\n\t## step 5\n\n\t# frames\n\tdef tab_import(self, root, tabControl):\n\t\t\"\"\"\n\t\tLoad local data (csv file)\n\t\t\"\"\"\n\t\tself.tabControl = tabControl\n\t\tself.tab_step1 = root\n\n\t\tframe = Frame(root)\n\t\tframe.pack(side=TOP)\n\t\tButton(frame, text='Import Data', command=self.get_csv, width=16).pack(side=TOP)\n\t\tlabel_frame = ttk.LabelFrame(frame, text='Press Button to Import Data')\n\t\tlabel_frame.pack(side=TOP)\n\t\tself.import_lable = ttk.Label(label_frame, textvariable=self.import_label_text)\n\t\tself.import_lable.pack(side=TOP)\n\n\t\tframe = Frame(root)\n\t\tframe.pack(side=BOTTOM)\n\t\tButton(frame, text='Next>>', command=self.go_next_step2, width=16).pack(side=TOP)\n\n\tdef tab_match(self, root):\n\t\t\"\"\"\n\t\tFeature matching\n\t\t\"\"\"\n\t\tself.leftbox.set(sorted(self.features))\n\t\tself.rightbox.set('')\n\t\tself.rrightbox.set(self.required_ordered)\n\n\t\tframe = Frame(root)\n\t\tframe.pack(side=BOTTOM)\n\t\tButton(frame, text='Next>>', command=self.go_next_step3, width=16).pack(side=RIGHT)\n\t\tButton(frame, text='<<Back', command=self.go_back_step1, width=16).pack(side=LEFT)\n\n\t\tframe = Frame(root)\n\t\tframe.pack(side=LEFT)\n\t\tcolumn_head = ttk.Label(frame, text='Local Features')\n\t\tcolumn_head.pack(side=TOP)\n\t\tself.list_left = Listbox(frame, listvariable=self.leftbox, width=25, height=20)\n\t\tself.list_left.pack(side=LEFT)\n\n\t\tscrollbar = Scrollbar(frame, orient=\"vertical\")\n\t\tscrollbar.config(command=self.list_left.yview)\n\t\tscrollbar.pack(side=\"right\", fill=\"y\")\n\n\t\tframe = Frame(root)\n\t\tframe.pack(side=LEFT)\n\t\tButton(frame, text='->', command=self.move_to_right, width=7).pack(side=TOP)\n\t\tButton(frame, text='<-', command=self.move_to_left, width=7).pack(side=TOP)\n\t\tButton(frame, text='NAN', command=self.add_nan, width=7).pack(side=TOP)\n\n\t\tframe = Frame(root)\n\t\tframe.pack(side=LEFT)\n\t\tcolumn_head = ttk.Label(frame, text='Matched Features')\n\t\tcolumn_head.pack(side=TOP)\n\t\tself.list_right = Listbox(frame, listvariable=self.rightbox,height=20, width=25)\n\t\tself.list_right.pack(side=LEFT)\n\n\t\tscrollbar = Scrollbar(frame, orient=\"vertical\")\n\t\tscrollbar.config(command=self.list_right.yview)\n\t\tscrollbar.pack(side=\"right\", fill=\"y\")\n\n\t\tframe = Frame(root)\n\t\tframe.pack(side=RIGHT)\n\t\tcolumn_head = ttk.Label(frame, text='Required Features')\n\t\tcolumn_head.pack(side=TOP)\n\t\tself.list_rright = Listbox(frame, listvariable=self.rrightbox,height=20, width=25)\n\t\tself.list_rright.pack(side=LEFT)\n\n\t\tscrollbar = Scrollbar(frame, orient=\"vertical\")\n\t\tscrollbar.config(command=self.list_rright.yview)\n\t\tscrollbar.pack(side=\"right\", fill=\"y\")\n\n\tdef tab_model(self, root):\n\t\t\"\"\"\n\t\tCall localmodel.init() and localmodel.train()\n\t\tDisplay model accuracy\n\t\t\"\"\"\n\t\tframe = Frame(root)\n\t\tframe.pack(side=TOP)\n\t\tself.label_frame = ttk.LabelFrame(frame)\n\t\tself.label_frame.pack(side=TOP)\n\t\tself.model_label = ttk.Label(self.label_frame, textvariable=self.model_label_text)\n\t\tself.model_label.pack(side=TOP)\n\t\tself.label_img = ttk.Label(self.label_frame, image=self.img_wait)\n\t\tself.label_img.pack()\n\t\tframe = Frame(root)\n\t\tframe.pack(side=BOTTOM)\n\n\t\tButton(frame, text='Next>>', command=self.go_next_step4, width=16).pack(side=RIGHT)\n\t\tButton(frame, text='<<Back', command=self.go_back_step2, width=16).pack(side=LEFT)\n\n\t\tprint (\"MODEL TRAINED -> \")\n\n\t\tself.loca = LocalModelCommunication(data= self.dataframe)\n\t\ttraining_result = self.loca.chooseModel_with_crossValidation_and_train()\n\n\t\tprint (training_result)\n\n\t\tself.trainingdone()\n\n\n\n\tdef trainingdone(self):\n\t\tself.label_img.config(image=self.img_gif)\n\t\tself.label_img.pack()\n\n\tdef tab_connect(self, root):\n\t\t\"\"\"\n\t\tConnect to center server\n\t\t\"\"\"\n\t\tframe = Frame(root)\n\t\tframe.pack(side=TOP)\n\t\tlabel_frame = ttk.LabelFrame(frame)\n\t\tlabel_frame.pack(side=TOP)\n\t\tself.connect_label = ttk.Label(label_frame, textvariable=self.connect_label_text)\n\t\tself.connect_label.pack(side=TOP)\n\t\tlabel_img = ttk.Label(label_frame, image=self.img_wait)\n\t\tlabel_img.pack()\n\n\t\tframe = Frame(root)\n\t\tframe.pack(side=BOTTOM)\n\t\tButton(frame, text='Next>>', command=self.go_next_step5, width=16).pack(side=RIGHT)\n\t\tButton(frame, text='<<Back', command=self.go_back_step3, width=16).pack(side=LEFT)\n\n\t\t## cannot get fast responce! -> get false even if we are connected :]\n\t\tif self.loca.connectToCentral() == False :\n\t\t\tprint (\"not connected\")\n\t\telse :\n\t\t\tprint (\"connected\")\n\t\t'''\n\t\tself.root = Tk()\n\t\tself.root.geometry(\"700x500\")\n\t\tself.root.title(\"Doctor Application\")\n\t\tself.root.resizable(width=False, height=False)\n\n\t\tself.app = APP(root)\n\n\t\tself.root.mainloop()\n\t\t'''\n\n\tdef tab_wait(self, root):\n\t\t\"\"\"\n\t\tCall localmodel.predict()\n\t\t:return:\n\t\t\"\"\"\n\t\tframe = Frame(root)\n\t\tframe.pack(side=TOP)\n\t\tlabel_frame = ttk.LabelFrame(frame)\n\t\tlabel_frame.pack(side=TOP)\n\t\tlabel = ttk.Label(label_frame, text='TODO')\n\t\tlabel.pack(side=TOP)\n\nif __name__ == '__main__':\n root = Tk()\n root.geometry(\"700x500\")\n root.title(\"Modeling Tool GUI\")\n root.resizable(width=False, height=False)\n\n tabControl = ttk.Notebook(root)\n tab_step1 = ttk.Frame(tabControl)\n tabControl.add(tab_step1, text=\"Step 1: Import Data\")\n tabControl.pack(expand=1, fill=\"both\") # Pack to make visible\n\n gui = GUI()\n gui.tab_import(tab_step1, tabControl)\n\n root.mainloop()\n", "# Import all the useful libraries\nimport numpy as np\nimport pandas as pd\nimport fancyimpute\nfrom sklearn import model_selection\nfrom sklearn.model_selection import StratifiedKFold\n\n\n\nfrom sklearn.ensemble import AdaBoostClassifier # PROBABILITY\nfrom sklearn.tree import DecisionTreeClassifier # PROBABILITY\nfrom sklearn.neighbors import RadiusNeighborsClassifier\nfrom sklearn.linear_model import RidgeClassifier\nfrom sklearn.ensemble import GradientBoostingClassifier\nfrom sklearn.ensemble import RandomForestClassifier # PROBABILITY\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis\nfrom sklearn.neighbors import KNeighborsClassifier # PROBABILITY\nfrom sklearn.linear_model import LogisticRegression # PROBABILITY\nfrom sklearn.naive_bayes import GaussianNB # PROBABILITY\nfrom sklearn.ensemble import ExtraTreesClassifier # PROBABILITY\nfrom sklearn.neighbors import KNeighborsClassifier # PROBABILITY\nfrom sklearn.ensemble import BaggingClassifier # PROBABILITY\n\nfrom imblearn.over_sampling import SMOTE\nfrom imblearn.over_sampling import ADASYN\nfrom imblearn.under_sampling import TomekLinks\n\n\n# MISSING PARTs\n# 1) send the distribution (mean and std) of the data if requested (for example, how the two classes are distrubuted over the age of the population (or any other feature))\n# 2) send other useful data ? ((if available) feature importance, decision_path)\n# ...\n\n# training data -> expected to be with all the listed features (IN ORDER -> like in the data we have). It is ok, if there are missing values\n\nclass LocalModel:\n\t\n\t# local model functions\n\t# train\n\t# predict \n \n\t\n\t# initialize the local model with the training data\n\n\tdef __init__(self, data = \"none\", target_name = \"AFclass\" , model_name = \"dt4\",random_state = 12345678, imputation_strategy = 'mice',balance_strategy = 'SMOTE'):\n\t\t# we train the model with all the available data \n\t\tself.target_name = target_name ## it the name of the target column\n\t\tself.target = None ## it is the target vector\n\t\tself.data = data ## it is the complete dataset -> will be modified\n\t\tself.original_data = data ## store a copy of the original data -> never modified\n\t\tself.X = None ## it is the data except the target\n\t\tself.features = None ## available features\n\t\tself.imputation_strategy = imputation_strategy\n\t\tself.balance_strategy = balance_strategy\n\t\t# for cross-validation \n\t\tself.cv_x = None # data -> in principle equal to self.X\n\t\tself.cv_y = None # target -> in principle equal to self.target\n\t\tself.random_state = random_state # random state -> fixed for testing\n\t\tself.selected_model_name = 'dt4' # name of the model -> default fixed\n\t\tself.selected_model = DecisionTreeClassifier(criterion='gini', splitter='best', max_depth=15, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_features=None, random_state=self.random_state, max_leaf_nodes=None, min_impurity_decrease=0.0, min_impurity_split=None, class_weight=None, presort=False) ## default model\n\t\tself.models = [] ## list of all the available models\n\t\t#if not isinstance(self, LocalModel):\n\t\t# self.chosen_model(model_name) # select the chosen model -> otherwise use the default one\n\t\t#self.check1, self.check2, self.check3 = self.fixDataset(imputation_strategy = imputation_strategy, balance_strategy = balance_strategy) ## fix data set before training -> clean data (remove unused columns, convert categotical attributes into numerical), recover missing values (use a strategy to impute the missing values), balance the data set\n\t\t#if isinstance(self, LocalModel):\n\t\t# self.chooseModel_with_crossValidation()\n\t\tself.localModelType = \"app\" ## gui or app -> gui can only respond to predictions , app can only send prediction requests or send data to central model\n\t\tif not str(self.data) == \"none\":\n\t\t\tself.localModelType = \"gui\"\n\t\t\tself.perfromLocalOperations()\n\n\tdef perfromLocalOperations(self):\n\t\tself.fixDataset(imputation_strategy = self.imputation_strategy, balance_strategy = self.balance_strategy) ## fix data set before training -> clean data (remove unused columns, convert categotical attributes into numerical), recover missing values (use a strategy to impute the missing values), balance the data set\n\t\t#self.train()\n\n\t# initiate the models\n\n\tdef chooseModel_with_crossValidation_and_train(self):\n\t\tif not str(self.data) == \"none\":\n\t\t\tself.models_definition(self.random_state)\n\t\t\tr = self.crossValidation(all_models = 1, k_fold = 10)\n\t\t\tfound = 0\n\t\t\tfor (n,i) in self.models: # n = name , i = model\n\t\t\t\tif n == r.iloc[0][0] and found == 0:\n\t\t\t\t\tfound = 1\n\t\t\t\t\tself.selected_model = i\n\t\t\t\t\tself.selected_model_name = n\n\t\t\tif found == 0:\n\t\t\t\tself.selected_model = DecisionTreeClassifier(criterion='gini', splitter='best', max_depth=15, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_features=None, random_state=self.random_state, max_leaf_nodes=None, min_impurity_decrease=0.0, min_impurity_split=None, class_weight=None, presort=False)\n\t\t\t\tself.selected_model_name = \"dt4\"\n\t\t\tself.train()\n\t\t\treturn r\n\t\telse:\n\t\t\tprint (\"no data\")\n\t\treturn \"no data\"\n\n\n\tdef models_definition(self,random_state):\n\t\t\n\t\t## here we can tune the paramenters of the models\n\t \n\n\t\t#self.models.append((\"ada1\",AdaBoostClassifier(DecisionTreeClassifier(max_depth=1, random_state = self.random_state),algorithm=\"SAMME\", n_estimators=200)))\n\t\t#self.models.append((\"ada2\",AdaBoostClassifier(DecisionTreeClassifier(max_depth=3, random_state = self.random_state),algorithm=\"SAMME\", n_estimators=200)))\n\t\t#self.models.append((\"ada3\",AdaBoostClassifier(DecisionTreeClassifier(max_depth=5, random_state = self.random_state),algorithm=\"SAMME\", n_estimators=100)))\n\t\tself.models.append((\"ada4\",AdaBoostClassifier(DecisionTreeClassifier(max_depth=10, random_state = self.random_state),algorithm=\"SAMME\", n_estimators=300)))\n\t\t#self.models.append((\"ada5\",AdaBoostClassifier(DecisionTreeClassifier(max_depth=20, random_state = self.random_state),algorithm=\"SAMME\", n_estimators=100)))\n\t\t#self.models.append((\"ada6\",AdaBoostClassifier(RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',max_depth=2, max_features='auto', max_leaf_nodes=None,min_impurity_decrease=0.0, min_impurity_split=None,min_samples_leaf=1, min_samples_split=2, min_weight_fraction_leaf=0.0, n_estimators=10, n_jobs=1,oob_score=False, random_state=self.random_state, verbose=0, warm_start=False))))\n\t\t#self.models.append((\"ada7\",AdaBoostClassifier(RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',max_depth=5, max_features='auto', max_leaf_nodes=None,min_impurity_decrease=0.0, min_impurity_split=None,min_samples_leaf=1, min_samples_split=2, min_weight_fraction_leaf=0.0, n_estimators=10, n_jobs=1,oob_score=False, random_state=self.random_state, verbose=0, warm_start=False))))\n\t\t#self.models.append((\"ada8\",AdaBoostClassifier(RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',max_depth=10, max_features='auto', max_leaf_nodes=None,min_impurity_decrease=0.0, min_impurity_split=None,min_samples_leaf=1, min_samples_split=2, min_weight_fraction_leaf=0.0, n_estimators=10, n_jobs=1,oob_score=False, random_state=self.random_state, verbose=0, warm_start=False))))\n\n\t\t#self.model.append(RadiusNeighborsClassifier(radius=10.0, weights='uniform', algorithm='auto', leaf_size=30, p=2, metric='minkowski'))\n\t \n\t\tself.models.append((\"ridge1\", RidgeClassifier(alpha=1.0, fit_intercept=True, normalize=False, copy_X=True, max_iter=None, tol=0.001, class_weight=None, solver='auto', random_state=self.random_state)))\n\t\t\n\t\tparamsGB1 = {'n_estimators': 120, 'max_depth': 3, 'subsample': 0.5,'learning_rate': 0.01, 'min_samples_leaf': 1, 'random_state': self.random_state}\n\t\tparamsGB2 = {'n_estimators': 120, 'max_depth': 6, 'subsample': 0.5,'learning_rate': 0.05, 'min_samples_leaf': 1, 'random_state': self.random_state} \n\t\tparamsGB3 = {'n_estimators': 60, 'max_depth': 15, 'subsample': 0.5,'learning_rate': 0.01, 'min_samples_leaf': 1, 'random_state': self.random_state}\n\t\tparamsGB4 = {'n_estimators': 320, 'max_depth': 10, 'subsample': 0.5,'learning_rate': 0.005, 'min_samples_leaf': 1, 'random_state': self.random_state}\n\t\t#self.models.append((\"gb1\",GradientBoostingClassifier(**paramsGB1)))\n\t\t#self.models.append((\"gb2\",GradientBoostingClassifier(**paramsGB2)))\n\t\t#self.models.append((\"gb3\",GradientBoostingClassifier(**paramsGB3)))\n\t\tself.models.append((\"gb4\",GradientBoostingClassifier(**paramsGB4)))\n\n\t\t#self.models.append((\"dt1\",DecisionTreeClassifier(random_state=self.random_state)))\n\t\t#self.models.append((\"dt2\",DecisionTreeClassifier(criterion='gini', splitter='best', max_depth=3, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_features=None, random_state=self.random_state, max_leaf_nodes=None, min_impurity_decrease=0.0, min_impurity_split=None, class_weight=None, presort=False)))\n\t\t#self.models.append((\"dt3\",DecisionTreeClassifier(criterion='gini', splitter='best', max_depth=7, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_features=None, random_state=self.random_state, max_leaf_nodes=None, min_impurity_decrease=0.0, min_impurity_split=None, class_weight=None, presort=False)))\n\t\tself.models.append((\"dt4\",DecisionTreeClassifier(criterion='gini', splitter='best', max_depth=15, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_features=None, random_state=self.random_state, max_leaf_nodes=None, min_impurity_decrease=0.0, min_impurity_split=None, class_weight=None, presort=False)))\n\t\tself.models.append((\"dt5\",DecisionTreeClassifier(criterion='entropy', splitter='best', max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_features=None, random_state=self.random_state, max_leaf_nodes=None, min_impurity_decrease=0.0, min_impurity_split=None, class_weight=None, presort=False)))\n\n\n\t\t#self.models.append((\"rf1\",RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',max_depth=2, max_features='auto', max_leaf_nodes=None,min_impurity_decrease=0.0, min_impurity_split=None,min_samples_leaf=1, min_samples_split=2, min_weight_fraction_leaf=0.0, n_estimators=10, n_jobs=1,oob_score=False, random_state=self.random_state, verbose=0, warm_start=False)))\n\t\tself.models.append((\"rf2\",RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',max_depth=5, max_features='auto', max_leaf_nodes=None,min_impurity_decrease=0.0, min_impurity_split=None,min_samples_leaf=1, min_samples_split=2, min_weight_fraction_leaf=0.0, n_estimators=20, n_jobs=1,oob_score=False, random_state=self.random_state, verbose=0, warm_start=False)))\n\t\t#self.models.append((\"rf3\",RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',max_depth=10, max_features='auto', max_leaf_nodes=None,min_impurity_decrease=0.0, min_impurity_split=None,min_samples_leaf=1, min_samples_split=2, min_weight_fraction_leaf=0.0, n_estimators=50, n_jobs=1,oob_score=False, random_state=self.random_state, verbose=0, warm_start=False)))\n\t\t\n\t\tself.models.append((\"ld1\",LinearDiscriminantAnalysis(n_components=None, priors=None, shrinkage=None,solver='svd', store_covariance=False, tol=0.0001)))\n\t \n\t\tself.models.append((\"lr1\",LogisticRegression(penalty='l2', dual=False, tol=0.0001, C=1.0, fit_intercept=True, intercept_scaling=1, class_weight=None, random_state=self.random_state, solver='liblinear', max_iter=100, multi_class='ovr', verbose=0, warm_start=False, n_jobs=1)))\n\t \n\t\t#self.models.append((\"knn1\",KNeighborsClassifier(n_neighbors=5, weights='uniform', algorithm='auto', leaf_size=30, p=2, metric='minkowski', metric_params=None, n_jobs=1)))\n\t\tself.models.append((\"knn2\",KNeighborsClassifier(n_neighbors=10, weights='uniform', algorithm='auto', leaf_size=30, p=2, metric='minkowski', metric_params=None, n_jobs=1)))\n\t\t#self.models.append((\"knn3\",KNeighborsClassifier(n_neighbors=15, weights='uniform', algorithm='auto', leaf_size=30, p=2, metric='minkowski', metric_params=None, n_jobs=1)))\n\t\tself.models.append((\"knn4\",KNeighborsClassifier(n_neighbors=20, weights='distance', algorithm='auto', leaf_size=30, p=2, metric='minkowski', metric_params=None, n_jobs=1)))\n\t\t#self.models.append((\"knn5\",KNeighborsClassifier(n_neighbors=50, weights='distance', algorithm='auto', leaf_size=30, p=2, metric='minkowski', metric_params=None, n_jobs=1)))\n\t \n\t\tself.models.append((\"nb1\",GaussianNB()))\n \n\t\t#self.models.append((\"et1\",ExtraTreesClassifier(n_estimators=50, random_state=self.random_state))) \n\t\tself.models.append((\"et2\",ExtraTreesClassifier(n_estimators=100, random_state=self.random_state))) \n\t\tself.models.append((\"et3\",ExtraTreesClassifier(n_estimators=200, random_state=self.random_state))) \n\n\t\t#self.models.append((\"bag1\",BaggingClassifier(base_estimator=None, n_estimators=5, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=self.random_state, verbose=0)))\n\t\t#self.models.append((\"bag2\",BaggingClassifier(base_estimator=None, n_estimators=10, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=self.random_state, verbose=0)))\n\t\t#self.models.append((\"bag3\",BaggingClassifier(base_estimator=None, n_estimators=20, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=self.random_state, verbose=0)))\n\t\t#self.models.append((\"bag4\",BaggingClassifier(base_estimator=None, n_estimators=50, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=self.random_state, verbose=0)))\n\t\t#self.models.append((\"bag5\",BaggingClassifier(base_estimator=None, n_estimators=100, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=self.random_state, verbose=0)))\n\t\tself.models.append((\"bag6\",BaggingClassifier(base_estimator=None, n_estimators=150, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=self.random_state, verbose=0)))\n\t\t#self.models.append((\"bag7\",BaggingClassifier(base_estimator=None, n_estimators=200, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=self.random_state, verbose=0)))\n\t\tself.models.append((\"bag8\",BaggingClassifier(base_estimator=RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',max_depth=2, max_features='auto', max_leaf_nodes=None,min_impurity_decrease=0.0, min_impurity_split=None,min_samples_leaf=1, min_samples_split=2, min_weight_fraction_leaf=0.0, n_estimators=10, n_jobs=1,oob_score=False, random_state=self.random_state, verbose=0, warm_start=False), n_estimators=200, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=self.random_state, verbose=0)))\n\t\t#self.models.append((\"bag9\",BaggingClassifier(base_estimator=RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',max_depth=5, max_features='auto', max_leaf_nodes=None,min_impurity_decrease=0.0, min_impurity_split=None,min_samples_leaf=1, min_samples_split=2, min_weight_fraction_leaf=0.0, n_estimators=10, n_jobs=1,oob_score=False, random_state=self.random_state, verbose=0, warm_start=False), n_estimators=200, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=self.random_state, verbose=0)))\n\t\t#self.models.append((\"bag10\",BaggingClassifier(base_estimator=RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',max_depth=10, max_features='auto', max_leaf_nodes=None,min_impurity_decrease=0.0, min_impurity_split=None,min_samples_leaf=1, min_samples_split=2, min_weight_fraction_leaf=0.0, n_estimators=10, n_jobs=1,oob_score=False, random_state=self.random_state, verbose=0, warm_start=False), n_estimators=200, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=self.random_state, verbose=0)))\n\t\t#self.models.append((\"bag11\",BaggingClassifier(base_estimator=RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',max_depth=20, max_features='auto', max_leaf_nodes=None,min_impurity_decrease=0.0, min_impurity_split=None,min_samples_leaf=1, min_samples_split=2, min_weight_fraction_leaf=0.0, n_estimators=10, n_jobs=1,oob_score=False, random_state=self.random_state, verbose=0, warm_start=False), n_estimators=200, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=self.random_state, verbose=0)))\n\n\t\t## add other models ...\n\t\t\n\n\tdef chosen_model(self, name):\n\t\t# initialize the available models\n\t\tself.models_definition(self.random_state)\n\t\tfound = 0\n\t\tfor (n,i) in self.models: # n = name , i = model\n\t\t\tif n == name and found == 0:\n\t\t\t\tfound = 1\n\t\t\t\tself.selected_model = i\n\t\t\t\tself.selected_model_name = name\n\t\tif found == 0 :\n\t\t\t# feel free to modify the model.. if another is better\n\t\t\tself.selected_model = DecisionTreeClassifier(criterion='gini', splitter='best', max_depth=15, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_features=None, random_state=self.random_state, max_leaf_nodes=None, min_impurity_decrease=0.0, min_impurity_split=None, class_weight=None, presort=False)\n\t\t\tself.selected_model_name = \"dt4\"\n\t\treturn\n\n\n\t## to choose the best model using cross validation\n\t## normally crossvalidate just the chosen model, if all_models = 1 -> crossvalidate all the models \n\tdef crossValidation(self, all_models = 0, k_fold = 10, random_state = 12345678):\n\t\t# cross validation\n\t\tif all_models == 1:\n\t\t\tprint (\"begin cross validation for all models\")\n\t\t\tevaluation = []\n\t\t\tcounter = 1\n\t\t\tnumberOfModels = len(self.models)\n\t\t\t#best = (\"BEST\", 0, 0)\n\t\t\tfor (name,i) in self.models:\n\t\t\t\tprint (round(counter / numberOfModels,3), \" is complete \\t\" )\n\t\t\t\te = model_selection.cross_val_score(i, self.cv_x, self.cv_y, cv=StratifiedKFold(n_splits=k_fold,random_state=random_state,shuffle=True))\n\t\t\t\tavg = round(np.average(e),4) * 100\n\t\t\t\tstd = round(np.std(e),4) * 100\n\t\t\t\tevaluation.append ((name, avg , std))\n\t\t\t\tcounter = counter + 1\n\t\t\tevaluation.sort(key = lambda tup: tup[1], reverse = True)\n\t\t\tdf_cv = pd.DataFrame (evaluation)\n\t\t\tprint (\"end cross validation\")\n\t\t\treturn df_cv\n\t\telse:\n\t\t\te = model_selection.cross_val_score(self.selected_model, self.cv_x, self.cv_y, cv=StratifiedKFold(n_splits=k_fold,random_state=random_state,shuffle=True))\n\t\t\tt = pd.DataFrame([(self.selected_model_name, round(np.average(e),4) * 100 , round(np.std(e),4) * 100 )])\n\t\t\treturn t\n\t\treturn\n\n\tdef showData(self, lines = 5, original_data = 0):\n\t\tif original_data == 1:\n\t\t\tprint (self.original_data.head(lines))\n\t\telse :\n\t\t\tprint(self.data.head(lines))\n\t \n\n\t# remove unused features, convert categorical attributes to numerical ones\n\tdef cleanData(self):\n\t\tprint (\"START CLEANING\")\n\t\t# re-start from the orginial data\n\t\tself.data = self.original_data\n\t\tif 'Soggetti' in self.data.columns:\n\t\t\tself.data = self.data.drop('Soggetti', axis = 1)\n\t\tif 'PCneg' in self.data.columns:\n\t\t\tself.data = self.data.drop('PCneg', axis = 1)\n\t\tif 'IPG' in self.data.columns:\n\t\t\tself.data = self.data.drop('IPG', axis = 1)\n\t\tif 'sbjBeatConsidered' in self.data.columns:\n\t\t\tself.data = self.data.drop('sbjBeatConsidered', axis=1)\n\t\tif 'numRRaveraged' in self.data.columns:\n\t\t\tself.data = self.data.drop('numRRaveraged', axis=1)\n\n\n\t\t# convert categorical variables into numerical \n\t\tif 'patsex' in self.data.columns and (\"männlich\" in self.data[\"patsex\"].values or \"weiblich\" in self.data[\"patsex\"].values):\n\t\t\tself.data['patsex'] = self.data['patsex'].map({'männlich' : 1, 'weiblich' : 0})\n\n\t\tif 'AFclass' in self.data.columns and (\"persistierend (>7 Tage, EKV)\" in self.data[\"AFclass\"].values or \"paroxysmal\" in self.data[\"AFclass\"].values):\n\t\t\tself.data[\"AFclass\"] = self.data[\"AFclass\"].map({'persistierend (>7 Tage, EKV)' : 1, 'paroxysmal' : 0}) \n\n\t\t# extract features\n\t\tself.features = self.data.columns[self.data.columns != self.target_name]\n\t\tself.X = self.data[self.features]\n\t\tself.target = self.data[self.target_name]\n\t\tprint (\"END CLEANING\")\n\t\n\t# clean the test data -> first drop unused data -> make it \"compliant\" to the features of the dataset \n\tdef cleanDataTest(self, test_x, features = \"self_features\"):\n\t\tprint (\"START TEST CLEANING\")\n\t\tprint(\"TEST_X : \", test_x.shape)\n\t\tprint (test_x)\n\n\t\t# convert categorical variables into numerical \n\t\tif 'patsex' in test_x.columns and (\"männlich\" in test_x[\"patsex\"].values or \"weiblich\" in test_x[\"patsex\"].values):\n\t\t\ttest_x['patsex'] = test_x['patsex'].map({'männlich' : 1, 'weiblich' : 0})\n\n\t\tif str(features) == \"self_features\":\n\t\t\tlist_of_features = self.features\n\t\telse :\n\t\t\tlist_of_features = features\n\n\t\t# drop all the columns that are not present in the training dataset\n\t\tfor i in test_x.columns:\n\t\t\tif i not in list_of_features:\n\t\t\t\ttest_x = test_x.drop(i, axis = 1)\n\n\t\t# add columns that are not present in the test set\n\t\tfor i in list_of_features:\n\t\t\tif i not in test_x.columns:\n\t\t\t\ttest_x[i] = np.nan\n\n\t\t## REORDER the features\n\t\ttest_x = test_x[list_of_features]\n\t\tprint (\"END TEST CLEANING\")\n\t\tprint(\"DATA shape : \", self.data.shape)\n\t\treturn test_x\n\n\t## data -> it is the dataset we want to 'recover'\n\t\n\tdef imputeData(self, dataframe,imputation_strategy = 'knn', features = \"self_features\" ):\n\t\ttry: \n\t\t\tif imputation_strategy == 'knn':\n\t\t\t\tx_complete_a = fancyimpute.KNN(15).complete(dataframe)\n\t\t## feel free to add other imputation methods \n\t\t# ... \n\t\t\telse : ## default case -> MICE impute method\n\t\t\t\tmice = fancyimpute.MICE(n_imputations=100, impute_type='col', n_nearest_columns=5, init_fill_method = \"mean\")\n\t\t\t\tx_complete_a = mice.complete(dataframe)\n\t\texcept:\n\t\t\tx_complete_a = dataframe\n\t\tprint (\"x_incomplete shape : \",x_complete_a.shape )\n\t\tif str(features) == \"self_features\":\n\t\t\tf = self.features\n\t\telse :\n\t\t\tf = features\n\t\tprint (\"FEATURESS : \",f.size, f )\n\t\treturn pd.DataFrame(x_complete_a, columns = f)\n\n\tdef recoverMissing(self, data = 'trainData', imputation_strategy = 'mice'):\n\t\tprint (\"START RecoverMissing VALUES\")\n\t\tif str(data) == 'trainData':\n\t\t\tx_incomplete = self.data[self.features]\t\n\t\telse:\n\t\t\tx_incomplete = data[self.features]\n\t\t#print (x_incomplete)\n\t\t# create a united dataset -> suppose it is possile -> if we clean first ->> then it is possible\n\n\t\tif str(data) != 'trainData':\n\n\t\t\tunited_df = pd.concat([x_incomplete, self.X])\n\t\t\tunited_complete = self.imputeData(united_df, features = x_incomplete.columns)\n\t\t\tx_complete = united_complete.iloc[:x_incomplete.shape[0], :x_incomplete.shape[1]]\n\t\t\t#print (\"united_complete shape : \",united_complete.shape )\n\n\t\telse :\n\t\t\tx_complete = self.imputeData(x_incomplete)\n\t\t'''\n\t\ttry: \n\t\t\tif imputation_strategy == 'knn':\n\t\t\t\tx_complete_a = fancyimpute.KNN(15).complete(x_incomplete)\n\t\t## feel free to add other imputation methods \n\t\t# ... \n\t\t\telse : ## default case -> MICE impute method\n\t\t\t\tmice = fancyimpute.MICE(n_imputations=100, impute_type='col', n_nearest_columns=5)\n\t\t\t\tx_complete_a = mice.complete(x_incomplete)\n\t\texcept:\n\t\t\tx_complete_a = x_incomplete\n\t\t\n\t\tx_complete = pd.DataFrame(x_complete_a, columns = self.features)\n\t\t'''\n\t\tif str(data) == 'trainData':\n\t\t\tself.X = x_complete\n\t\treturn x_complete\n\t\n\tdef balanceDataSet(self, data = \"trainData\",target_name = \"AFclass\", balance_strategy = 'SMOTE'):\n\t\tif str(data) == \"trainData\":\n\t\t\tX = self.X\n\t\t\ty = self.data[self.target_name].as_matrix()\n\t\t\ttarget_name = self.target_name\n\t\telse :\n\t\t\tX = data[data.columns[data.columns != target_name]]\n\t\t\ty = data[target_name].as_matrix()\n\t\ty_new = pd.DataFrame(y)\n\t\ty_new = y_new.rename(columns = {y_new.columns[0] : target_name})\n\t\tData_complete = pd.concat([X,y_new], axis = 1)\n\t\tif balance_strategy == 'ADASYN':\n\t\t\ttry:\n\t\t\t\tprint (\"Try ADASYN\")\n\t\t\t\tX_resampled, y_resampled = ADASYN().fit_sample(X, y_new)\n\t\t\texcept:\n\t\t\t\tprint (\"ADASYN FAILED -> used SMOTE\")\n\t\t\t\tX_resampled, y_resampled = SMOTE().fit_sample(X, y_new)\n\n\t\t\t## feel free to add other balancing strategies\n\t\t\t# ...\n\t\telse : # default SMOTE\n\t\t\tX_resampled, y_resampled = SMOTE().fit_sample(X, y_new)\n\n\n\n\t\tX_final = pd.DataFrame(X_resampled, columns = self.features)\n\t\tY_final = pd.DataFrame(y_resampled)\n\t\tY_final = Y_final.rename(columns = {Y_final.columns[0] : self.target_name})\n\n\t\tData_final = pd.concat([X_final,Y_final], axis = 1)\n\t\tif str(data) == \"trainData\" :\n\t\t\tself.X = X_final\n\t\t\tself.target = Y_final\n\t\t\tself.cv_x = X_final\n\t\t\tself.cv_y = Y_final\n\t\t\tself.data = Data_final\n\t\treturn Data_final\n\n\n\t# clean the data, recover missing values, balance the dataset\n\tdef fixDataset(self, imputation_strategy = 'mice', balance_strategy = 'SMOTE'):\n\t\tprint (\"begin fixing dataset\")\n\t\tself.cleanData()\n\t\tcheck1 = self.data.copy()\n\t\tself.recoverMissing(imputation_strategy = imputation_strategy)\n\t\tcheck2 = self.X.copy()\n\t\tself.balanceDataSet(balance_strategy = balance_strategy)\n\t\tcheck3 = self.data.copy()\n\t\tprint (\"end fixing dataset\")\n\t\treturn (check1, check2, check3)\n\n\t# train the selected model\n\tdef train(self):\n\t\t## use all the availble data -> we assume to know what is the best model -> otherwise use the crossvalidation function to choose a model\n\t\tprint (\"begin training\")\n\t\tself.selected_model.fit(self.X, self.target)\n\t\tprint (\"end training\")\n\n\n\t# predict using the trained model. x_test is a vector \n\t# return the prediction for all values in the vector x_test, and all the other useful data (according to the selected_model used to predict)\n\tdef predict(self, test):\n\t\toriginal_x = test\n\t\ttrain = test.copy()\n\t\tx_test = self.cleanDataTest(test_x = train)\n\t\tx_test = self.recoverMissing(data = x_test)\n\t\tresult = x_test.copy()\n\t\tprediction = self.selected_model.predict(x_test)\n\t\tresult['prediction'] = prediction\n\t\t#decision_path = None\n\t\t#features_importance = None\n\t\t\n\t\tif callable(hasattr(self.selected_model, \"predict_proba\" )):\n\t\t\tpredict_proba_df = pd.DataFrame(self.selected_model.predict_proba(x_test), columns=self.selected_model.classes_)\n\t\t\tresult['predict_proba_zero'] = predict_proba_df[predict_proba_df.columns[0]]\n\t\t\tresult['predict_proba_uno'] = predict_proba_df[predict_proba_df.columns[1]]\n\t\t''' \n\t\tif callable(hasattr(self.selected_model, \"predict_log_proba\" )):\n\t\t\tpredict_log_proba_df = pd.DataFrame(self.selected_model.predict_log_proba(x_test), columns=self.selected_model.classes_)\n\t\t\tresult['predict_log_proba_zero'] = predict_log_proba_df[predict_log_proba_df.columns[0]]\n\t\t\tresult['predict_log_proba_uno'] = predict_log_proba_df[predict_log_proba_df.columns[1]]\n\t\t'''\n\t\treturn pd.DataFrame(result)\n\n\tdef splitDataframe (self, data, step = 20):\n\t\tsplits = []\n\t\ti = 0\n\t\tn = data.shape[0]\n\t\tif n > step:\n\t\t\twhile i < n:\n\t\t\t\tl = i + step\n\t\t\t\ttemp = data.iloc[i: l, :]\n\t\t\t\tsplits.append(temp)\n\t\t\t\ti += step\n\t\telse:\n\t\t\tsplits.append(data)\n\t\treturn splits\n" ]
[ [ "pandas.read_csv" ], [ "sklearn.ensemble.BaggingClassifier", "pandas.concat", "sklearn.naive_bayes.GaussianNB", "sklearn.linear_model.LogisticRegression", "sklearn.ensemble.RandomForestClassifier", "sklearn.ensemble.ExtraTreesClassifier", "sklearn.discriminant_analysis.LinearDiscriminantAnalysis", "pandas.DataFrame", "sklearn.neighbors.KNeighborsClassifier", "sklearn.model_selection.StratifiedKFold", "sklearn.tree.DecisionTreeClassifier", "numpy.std", "sklearn.ensemble.GradientBoostingClassifier", "numpy.average", "sklearn.linear_model.RidgeClassifier" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.1", "1.5", "1.2", "1.3" ], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "1.3", "0.19", "1.1", "1.5", "0.24", "0.20", "1.0", "0.25", "1.2" ], "scipy": [], "tensorflow": [] } ]
sidneyp/bidirectional
[ "d3d1dbb727e5a25b4980646f1eb500245072f079", "d3d1dbb727e5a25b4980646f1eb500245072f079" ]
[ "cifar_cnn_three_conv.py", "mnist_nn_four_hidden.py" ]
[ "import tensorflow as tf\nimport keras\nfrom keras.datasets import cifar10\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.gridspec as gridspec\nimport os\nimport sys\nimport csv\nimport utils_csv\nimport utils_tf as utils\nfrom cleverhans.utils_tf import model_train, model_eval\nfrom cleverhans.attacks import FastGradientMethod\nfrom cleverhans.model import Model\nprint(\"Tensorflow version \" + tf.__version__)\n\nconfig_num = int(sys.argv[1]) if len(sys.argv) > 1 else 1 # Choose type of learning technique according to config_dict\nconfig_dict = {0: \"backprop\", 1: \"biprop\", 2: \"halfbiprop\", 3: \"nobias_backprop\", 4: \"nobias_biprop\", 5: \"nobias_halfbiprop\"}\n\nnum_classes = 10\n\nmodel_name = sys.argv[0].replace(\".py\", \"\") + \"_\" + config_dict[config_num]\nprint(\"Model name: \" + model_name)\n\n# load data\n# https://github.com/BIGBALLON/cifar-10-cnn/blob/master/1_Lecun_Network/LeNet_keras.py\n(x_train, y_train), (x_test, y_test) = cifar10.load_data()\ny_train = keras.utils.to_categorical(y_train, num_classes)\ny_test = keras.utils.to_categorical(y_test, num_classes)\nx_train = x_train.astype('float32')\nx_test = x_test.astype('float32')\nx_train /= 255\nx_test /= 255\n\n# for reproducibility\nnp.random.seed(0)\ntf.set_random_seed(0)\n\nsess = tf.InteractiveSession()\n\n# three convolutional layers with their channel counts, and a\n# fully connected layer (tha last layer has 10 softmax neurons)\nK = 4 # first convolutional layer output depth\nL = 8 # second convolutional layer output depth\nM = 12 # third convolutional layer\nN = 200 # fully connected layer\n\nwith tf.name_scope(\"input\"):\n # input X & output GX_: 28x28 grayscale images, the first dimension (None) will index the images in the mini-batch\n X = tf.placeholder(tf.float32, [None, 32, 32, 3])\n X_noisy = tf.placeholder(tf.float32, [None, 32, 32, 3])\n X_adv = tf.placeholder(tf.float32, [None, 32, 32, 3])\n\n GX_ = tf.placeholder(tf.float32, [None, 32, 32, 3])\n\n # output Y_ & input GY: labels for classification and generation\n Y_ = tf.placeholder(tf.float32, [None, num_classes])\n GY = tf.placeholder(tf.float32, [None, num_classes])\n\n # variable learning rate\n lr = tf.placeholder(tf.float32)\n\n # variable batch size\n BS = tf.placeholder(tf.int32)\n\n input_test_sum = tf.summary.image(\"input\", X, num_classes)\n input_noisy_sum = tf.summary.image(\"input-noisy\", X_noisy, num_classes)\n input_adv_sum = tf.summary.image(\"input-adv\", X_adv, num_classes)\n\nwith tf.name_scope(\"classifier-generator\"):\n C_W1 = utils.weight_variable([5, 5, 3, K], stddev=0.1, name=\"C_W1\")\n C_W2 = utils.weight_variable([5, 5, K, L], stddev=0.1, name=\"C_W2\")\n C_W3 = utils.weight_variable([4, 4, L, M], stddev=0.1, name=\"C_W3\")\n\n C_W4 = utils.weight_variable([8 * 8 * M, N], stddev=0.1, name=\"C_W4\")\n C_W5 = utils.weight_variable([N, num_classes], stddev=0.1, name=\"C_W5\")\n\ndef classifier(x, reuse=None):\n with tf.variable_scope(\"classifier\", reuse=reuse) as scope_c:\n # Variables for classifier\n C_B1 = utils.bias_variable([K], name=\"C_B1\")\n C_B2 = utils.bias_variable([L], name=\"C_B2\")\n C_B3 = utils.bias_variable([M], name=\"C_B3\")\n C_B4 = utils.bias_variable([N], name=\"C_B4\")\n C_B5 = utils.bias_variable([num_classes], name=\"C_B5\")\n\n stride = 1 # output is 32x32\n H1 = tf.nn.relu(tf.nn.conv2d(x, C_W1, strides=[1, stride, stride, 1], padding='SAME') + C_B1)\n stride = 2 # output is 16x16\n H2 = tf.nn.relu(tf.nn.conv2d(H1, C_W2, strides=[1, stride, stride, 1], padding='SAME') + C_B2)\n stride = 2 # output is 8x8\n H3 = tf.nn.relu(tf.nn.conv2d(H2, C_W3, strides=[1, stride, stride, 1], padding='SAME') + C_B3)\n\n # reshape the output from the third convolution for the fully connected layer\n HH3 = tf.reshape(H3, shape=[-1, 8 * 8 * M])\n\n H4 = tf.nn.relu(tf.matmul(HH3, C_W4) + C_B4)\n Ylogits = tf.matmul(H4, C_W5) + C_B5\n\n Ysigmoid = tf.nn.sigmoid(Ylogits)\n Ysoftmax = tf.nn.softmax(Ylogits)\n\n return Ysoftmax, Ysigmoid, Ylogits\n\nclass ClassifierModel(Model):\n def get_logits(self, x):\n Ysoftmax, Ysigmoid, Ylogits = classifier(x, reuse=True)\n return Ylogits\n\n# Generator of random input reuses weights of classifier\ndef generator(y, bs, reuse=None):\n with tf.variable_scope(\"generator\", reuse=reuse) as scope_g:\n # Variables for classifier\n G_B1 = utils.bias_variable([3], name=\"G_B1\")\n G_B2 = utils.bias_variable([K], name=\"G_B2\")\n G_B3 = utils.bias_variable([L], name=\"G_B3\")\n G_B4 = utils.bias_variable([M*8*8], name=\"G_B4\")\n G_B5 = utils.bias_variable([N], name=\"G_B5\")\n\n GH4 = tf.nn.relu(tf.matmul(y, tf.transpose(C_W5)) + G_B5)\n GH3 = tf.nn.relu(tf.matmul(GH4, tf.transpose(C_W4)) + G_B4)\n GHH3 = tf.reshape(GH3, shape=[-1, 8, 8, M])\n stride = 2 # output is 14x14\n GH2 = tf.nn.relu(tf.nn.conv2d_transpose(GHH3, C_W3, output_shape=[bs, 16, 16, L], strides=[1, stride, stride, 1]) + G_B3) #deconv2 W3\n stride = 2 # output is 28x28\n GH1 = tf.nn.relu(tf.nn.conv2d_transpose(GH2, C_W2, output_shape=[bs, 32, 32, K], strides=[1, stride, stride, 1]) + G_B2)#deconv2 W2\n stride = 1 # output is 28x28\n GXlogits = tf.nn.conv2d_transpose(GH1, C_W1, output_shape=[bs, 32, 32, 3], strides=[1, stride, stride, 1]) + G_B1#deconv2 W1\n GXsigmoid = tf.nn.sigmoid(GXlogits)\n\n return GXsigmoid, GXlogits\n\ndef plot_generator(samples):\n if num_classes == 10:\n fig = plt.figure(figsize=(5, 2))\n gs = gridspec.GridSpec(2, 5)\n else:\n fig = plt.figure(figsize=(10, 10))\n gs = gridspec.GridSpec(10, 10)\n gs.update(wspace=0.05, hspace=0.05)\n for i, sample in enumerate(samples):\n ax = plt.subplot(gs[i])\n plt.axis('off')\n ax.set_xticklabels([])\n ax.set_yticklabels([])\n ax.set_aspect('equal')\n plt.imshow(sample.reshape((32,32,3)))\n\n return fig\n\nGXsigmoid, GXlogits = generator(GY, BS)\nGXsigmoid_test, GXlogits_test = generator(GY, BS, reuse=True)\n\nYsoftmax, Ysigmoid, Ylogits = classifier(X)\nmodel_classifier = ClassifierModel()\n\nYsoftmax_noisy, Ysigmoid_noisy, Ylogits_noisy = classifier(X_noisy, reuse=True)\nYsoftmax_adv, Ysigmoid_adv, Ylogits_adv = classifier(X_adv, reuse=True)\n\nwith tf.name_scope(\"loss\"):\n c_loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=Ylogits, labels=Y_))\n\n g_loss = tf.reduce_mean(\n tf.nn.sigmoid_cross_entropy_with_logits(logits=GXlogits, labels=GX_))\n\n \"\"\" Summary \"\"\"\n g_loss_sum = tf.summary.scalar(\"g_loss\", g_loss)\n c_loss_sum = tf.summary.scalar(\"c_loss\", c_loss)\n\n# accuracy of the trained model, between 0 (worst) and 1 (best)\nwith tf.name_scope(\"accuracy\"):\n with tf.name_scope(\"correct_prediction\"):\n correct_prediction = tf.equal(tf.argmax(Ysoftmax, 1), tf.argmax(Y_, 1))\n with tf.name_scope(\"accuracy\"):\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n with tf.name_scope(\"correct_prediction_noisy\"):\n correct_prediction_noisy = tf.equal(tf.argmax(Ysoftmax_noisy, 1), tf.argmax(Y_, 1))\n with tf.name_scope(\"accuracy_noisy\"):\n accuracy_noisy = tf.reduce_mean(tf.cast(correct_prediction_noisy, tf.float32))\n with tf.name_scope(\"correct_prediction_adv\"):\n correct_prediction_adv = tf.equal(tf.argmax(Ysoftmax_adv, 1), tf.argmax(Y_, 1))\n with tf.name_scope(\"accuracy_adv\"):\n accuracy_adv = tf.reduce_mean(tf.cast(correct_prediction_adv, tf.float32))\n\n \"\"\" Summary \"\"\"\n accuracy_sum = tf.summary.scalar(\"accuracy\", accuracy)\n accuracy_noisy_sum = tf.summary.scalar(\"accuracy_noisy\", accuracy_noisy)\n accuracy_adv_sum = tf.summary.scalar(\"accuracy_adv\", accuracy_adv)\n\nwith tf.name_scope(\"max_output\"):\n with tf.name_scope(\"max_output_test\"):\n max_output_sigmoid_test = tf.reduce_max(Ysigmoid)\n max_output_softmax_test = tf.reduce_max(Ysoftmax)\n with tf.name_scope(\"max_output_noise\"):\n max_output_sigmoid_noise = tf.reduce_max(Ysigmoid_noisy)\n max_output_softmax_noise = tf.reduce_max(Ysoftmax_noisy)\n with tf.name_scope(\"max_output_adv\"):\n max_output_sigmoid_adv = tf.reduce_max(Ysigmoid_adv)\n max_output_softmax_adv = tf.reduce_max(Ysoftmax_adv)\n\n \"\"\" Summary \"\"\"\n max_output_sigmoid_test_sum = tf.summary.scalar(\"max_output_sigmoid_test\", max_output_sigmoid_test)\n max_output_softmax_test_sum = tf.summary.scalar(\"max_output_softmax_test\", max_output_softmax_test)\n max_output_sigmoid_noise_sum = tf.summary.scalar(\"max_output_sigmoid_noise\", max_output_sigmoid_noise)\n max_output_softmax_noise_sum = tf.summary.scalar(\"max_output_softmax_noise\", max_output_softmax_noise)\n max_output_sigmoid_adv_sum = tf.summary.scalar(\"max_output_sigmoid_adv\", max_output_sigmoid_adv)\n max_output_softmax_adv_sum = tf.summary.scalar(\"max_output_softmax_adv\", max_output_softmax_adv)\n\nutils.show_all_variables()\nt_vars = tf.trainable_variables()\nc_vars = [var for var in t_vars if 'C_' in var.name]\\\n if config_num < 3 else [var for var in t_vars if 'C_W' in var.name]\ng_vars = [var for var in t_vars if 'C_W' in var.name or 'G_' in var.name]\\\n if config_num < 3 else c_vars\n\n# training step\nlearning_rate_dis = lr\nlearning_rate_gen = lr\n\nwith tf.name_scope(\"train\"):\n c_train = tf.train.AdamOptimizer(learning_rate_dis).minimize(c_loss, var_list=c_vars)\n g_train = tf.train.AdamOptimizer(learning_rate_gen).minimize(g_loss, var_list=g_vars)\n\n# final summary operations\ng_sum = tf.summary.merge([g_loss_sum])\nc_sum = tf.summary.merge([input_test_sum, accuracy_sum, c_loss_sum, max_output_sigmoid_test_sum, max_output_softmax_test_sum])\nnoise_sum = tf.summary.merge([max_output_sigmoid_noise_sum, max_output_softmax_noise_sum])\nnoisy_sum = tf.summary.merge([input_noisy_sum, accuracy_noisy_sum])\nadv_sum = tf.summary.merge([input_adv_sum, accuracy_adv_sum, max_output_sigmoid_adv_sum, max_output_softmax_adv_sum])\n\nfolder_out = 'out/' + model_name + '/'\nif not os.path.exists(folder_out):\n os.makedirs(folder_out)\n\nfolder_csv = 'csv/' + model_name + '/'\nif not os.path.exists(folder_csv):\n os.makedirs(folder_csv)\n\nfolder_logs = 'logs/' + model_name\nif not os.path.exists(folder_csv):\n os.makedirs(folder_logs)\n\nwriter = tf.summary.FileWriter(folder_logs, sess.graph)\n\nbatch_size = 100\nnum_train_images = x_train.shape[0]\nnum_batches = num_train_images // batch_size\nall_classes = np.eye(num_classes)\n\ncounter = 0\n\nfgsm_params = {'eps': 0.03,\n 'clip_min': 0.,\n 'clip_max': 1.}\n\nrandom_noise = np.random.random_sample(x_test.shape)\ntest_image_with_noise = np.clip(x_test + 0.1*random_noise, 0., 1.)\n\naccuracy_list = []\nsigmoid_list = []\nsoftmax_list = []\n\n# initialize all variables\ntf.global_variables_initializer().run()\n\nfor i in range(50001):\n if i % num_batches == 0:\n idx_train = np.arange(x_train.shape[0])\n np.random.shuffle(idx_train)\n x_train, y_train = x_train[idx_train], y_train[idx_train]\n \n idx = i % num_batches\n batch_X = x_train[idx*batch_size:(idx+1)*batch_size]\n batch_Y = y_train[idx*batch_size:(idx+1)*batch_size]\n\n # learning rate decay\n max_learning_rate = 0.003\n min_learning_rate = 0.0001\n decay_speed = 2000.0\n learning_rate = min_learning_rate + (max_learning_rate - min_learning_rate) * np.exp(-i/decay_speed)\n\n if i % 500 == 0 or i == 50000:\n counter += 1\n # Saves generated images\n samples = sess.run(GXsigmoid_test, feed_dict={GY: all_classes, BS: num_classes})\n fig = plot_generator(samples)\n plt.savefig(folder_out+\"gen_\"+str(i).zfill(6)+'.png', bbox_inches='tight')\n plt.close(fig)\n\n attack_fgsm = FastGradientMethod(model_classifier, sess=sess)\n adv_x_np = attack_fgsm.generate_np(x_test, **fgsm_params)\n fig = plot_generator(adv_x_np[:num_classes])\n plt.savefig(folder_out+\"adv_\"+str(i).zfill(6)+'.png', bbox_inches='tight')\n plt.close(fig)\n\n accu_test, c_loss_test, sigmoid_test, softmax_test, sum_c = sess.run([accuracy, c_loss, max_output_sigmoid_test, max_output_softmax_test, c_sum], {X: x_test, Y_: y_test})\n writer.add_summary(sum_c, i)\n g_loss_test, sum_g = sess.run([g_loss, g_sum], {GY: batch_Y, GX_: batch_X, BS: batch_size})\n writer.add_summary(sum_g, i)\n\n print(str(i) + \": epoch \" + str(i*batch_size//x_train.shape[0]+1)\\\n + \" - test loss class: \" + str(c_loss_test) + \" test loss gen: \" + str(g_loss_test))\n print(\"Real test images - Sigmoid: \" + str(sigmoid_test) + \"\\tSoftmax: \" + str(softmax_test) + \"\\taccuracy: \"+ str(accu_test))\n\n sigmoid_random, softmax_random, sum_random = sess.run([max_output_sigmoid_noise, max_output_softmax_noise, noise_sum], {X_noisy: random_noise})\n writer.add_summary(sum_random, i)\n accu_random, sum_noisy = sess.run([accuracy_noisy, noisy_sum], {X_noisy: test_image_with_noise, Y_: y_test})\n writer.add_summary(sum_noisy, i)\n print(\"Random noise images - Sigmoid: \" + str(sigmoid_random) + \"\\tSoftmax: \" + str(softmax_random) + \"\\taccuracy: \"+ str(accu_random))\n\n accu_adv, sigmoid_adv, softmax_adv, sum_adv = sess.run([accuracy_adv, max_output_sigmoid_adv, max_output_softmax_adv, adv_sum], {X_adv: adv_x_np, Y_: y_test})\n writer.add_summary(sum_adv, i)\n print(\"Adversarial examples - Sigmoid: \" + str(sigmoid_adv) + \"\\tSoftmax: \" + str(softmax_adv) + \"\\taccuracy: \"+ str(accu_adv))\n print()\n accuracy_list.append([i, accu_test, accu_random, accu_adv, counter])\n sigmoid_list.append([i, sigmoid_test, sigmoid_random, sigmoid_adv, counter])\n softmax_list.append([i, softmax_test, softmax_random, softmax_adv, counter])\n\n sess.run(c_train, {X: batch_X, Y_: batch_Y, lr: learning_rate})\n if config_num == 1 or (config_num == 2 and i < 25000) or\\\n config_num == 4 or (config_num == 5 and i < 25000):\n sess.run(g_train, {GY: batch_Y, GX_: batch_X, lr: learning_rate, BS: batch_size})\n\nwriter.close()\n\n# Save data in csv\nwith open(folder_csv+\"accuracy.csv\", \"w\") as output:\n writer = csv.writer(output, lineterminator='\\n')\n writer.writerows(accuracy_list)\n\nwith open(folder_csv+\"sigmoid.csv\", \"w\") as output:\n writer = csv.writer(output, lineterminator='\\n')\n writer.writerows(sigmoid_list)\n\nwith open(folder_csv+\"softmax.csv\", \"w\") as output:\n writer = csv.writer(output, lineterminator='\\n')\n writer.writerows(softmax_list)\n\n# Load data in csv\naccu_data = utils_csv.get_data_csv_file(folder_csv+\"accuracy.csv\")\nsigmoid_data = utils_csv.get_data_csv_file(folder_csv+\"sigmoid.csv\")\nsoftmax_data = utils_csv.get_data_csv_file(folder_csv+\"softmax.csv\")\n\n# Print best values\nutils_csv.print_best(accu_data, sigmoid_data, softmax_data, folder_csv+\"summary.txt\")\n", "import tensorflow as tf\nfrom tensorflow.examples.tutorials.mnist import input_data\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.gridspec as gridspec\nimport os\nimport sys\nimport csv\nimport utils_csv\nimport utils_tf as utils\nfrom cleverhans.utils_tf import model_train, model_eval\nfrom cleverhans.attacks import FastGradientMethod\nfrom cleverhans.model import Model\nprint(\"Tensorflow version \" + tf.__version__)\n\nconfig_num = int(sys.argv[1]) if len(sys.argv) > 1 else 1 # Choose type of learning technique according to config_dict\nconfig_dict = {0: \"backprop\", 1: \"biprop\", 2: \"halfbiprop\", 3: \"nobias_backprop\", 4: \"nobias_biprop\", 5: \"nobias_halfbiprop\"}\n\nmodel_name = sys.argv[0].replace(\".py\", \"\") + \"_\" + config_dict[config_num]\nprint(\"Model name: \" + model_name)\n\n# for reproducibility\nnp.random.seed(0)\ntf.set_random_seed(0)\n\n# Download images and labels into mnist.test (10K images+labels) and mnist.train (60K images+labels)\nmnist = input_data.read_data_sets(\"data/mnist\", one_hot=True, reshape=False, validation_size=0)\n\nsess = tf.InteractiveSession()\n\n# four hidden layers and their number of neurons\nL = 200\nM = 100\nN = 60\nO = 30\n\nwith tf.name_scope(\"input\"):\n # input X & output GX_: 28x28 grayscale images, the first dimension (None) will index the images in the mini-batch\n X = tf.placeholder(tf.float32, [None, 28, 28, 1])\n X_noisy = tf.placeholder(tf.float32, [None, 28, 28, 1])\n X_adv = tf.placeholder(tf.float32, [None, 28, 28, 1])\n\n GX_ = tf.placeholder(tf.float32, [None, 28, 28, 1])\n\n # output Y_ & input GY: labels for classification and generation\n Y_ = tf.placeholder(tf.float32, [None, 10])\n GY = tf.placeholder(tf.float32, [None, 10])\n\n input_test_sum = tf.summary.image(\"input\", X, 10)\n input_noisy_sum = tf.summary.image(\"input-noisy\", X_noisy, 10)\n input_adv_sum = tf.summary.image(\"input-adv\", X_adv, 10)\n\nwith tf.name_scope(\"classifier-generator\"):\n # Weights for classifier and generator\n C_W1 = utils.weight_variable([784, L], stddev=0.1, name=\"C_W1\")\n C_W2 = utils.weight_variable([L, M], stddev=0.1, name=\"C_W2\")\n C_W3 = utils.weight_variable([M, N], stddev=0.1, name=\"C_W3\")\n C_W4 = utils.weight_variable([N, O], stddev=0.1, name=\"C_W4\")\n C_W5 = utils.weight_variable([O, 10], stddev=0.1, name=\"C_W5\")\n\ndef classifier(x, reuse=None):\n with tf.variable_scope(\"classifier\", reuse=reuse) as scope_c:\n # Variables for classifier\n C_B1 = utils.bias_variable([L], name=\"C_B1\")\n C_B2 = utils.bias_variable([M], name=\"C_B2\")\n C_B3 = utils.bias_variable([N], name=\"C_B3\")\n C_B4 = utils.bias_variable([O], name=\"C_B4\")\n C_B5 = utils.bias_variable([10], name=\"C_B5\")\n\n XX = tf.reshape(x, [-1, 784])\n H1 = tf.nn.sigmoid(tf.matmul(XX, C_W1) + C_B1)\n H2 = tf.nn.sigmoid(tf.matmul(H1, C_W2) + C_B2)\n H3 = tf.nn.sigmoid(tf.matmul(H2, C_W3) + C_B3)\n H4 = tf.nn.sigmoid(tf.matmul(H3, C_W4) + C_B4)\n Ylogits = tf.matmul(H4, C_W5) + C_B5\n\n Ysigmoid = tf.nn.sigmoid(Ylogits)\n Ysoftmax = tf.nn.softmax(Ylogits)\n\n return Ysoftmax, Ysigmoid, Ylogits\n\n\nclass ClassifierModel(Model):\n def get_logits(self, x):\n Ysoftmax, Ysigmoid, Ylogits = classifier(x, reuse=True)\n return Ylogits\n\n# Generator of random input reuses weights of classifier\ndef generator(y, reuse=None):\n with tf.variable_scope(\"generator\", reuse=reuse) as scope_g:\n # Variables for classifier\n G_B1 = utils.bias_variable([784], name=\"G_B1\")\n G_B2 = utils.bias_variable([L], name=\"G_B2\")\n G_B3 = utils.bias_variable([M], name=\"G_B3\")\n G_B4 = utils.bias_variable([N], name=\"G_B4\")\n G_B5 = utils.bias_variable([O], name=\"G_B5\")\n\n GH4 = tf.nn.sigmoid(tf.matmul(y, tf.transpose(C_W5)) + G_B5)\n GH3 = tf.nn.sigmoid(tf.matmul(GH4, tf.transpose(C_W4)) + G_B4)\n GH2 = tf.nn.sigmoid(tf.matmul(GH3, tf.transpose(C_W3)) + G_B3)\n GH1 = tf.nn.sigmoid(tf.matmul(GH2, tf.transpose(C_W2)) + G_B2)\n GX = tf.matmul(GH1, tf.transpose(C_W1)) + G_B1\n GXlogits = tf.reshape(GX, [-1, 28, 28, 1])\n GXsigmoid = tf.nn.sigmoid(GXlogits)\n\n return GXsigmoid, GXlogits\n\ndef plot_generator(samples):\n fig = plt.figure(figsize=(5, 2))\n gs = gridspec.GridSpec(2, 5)\n gs.update(wspace=0.05, hspace=0.05)\n for i, sample in enumerate(samples):\n ax = plt.subplot(gs[i])\n plt.axis('off')\n ax.set_xticklabels([])\n ax.set_yticklabels([])\n ax.set_aspect('equal')\n plt.imshow(sample.reshape((28,28)), cmap='gray')\n\n return fig\n\ndef plot_first_hidden(weights):\n max_abs_val = max(abs(np.max(weights)), abs(np.min(weights)))\n fig = plt.figure(figsize=(20, 10))\n gs = gridspec.GridSpec(10, 20)\n gs.update(wspace=0.1, hspace=0.1)\n\n for i, weight in enumerate(np.transpose(weights)):\n ax = plt.subplot(gs[i])\n ax.set_xticklabels([])\n ax.set_yticklabels([])\n ax.set_aspect('equal')\n im = plt.imshow(weight.reshape((28,28)), cmap=\"seismic_r\", vmin=-max_abs_val, vmax=max_abs_val)\n\n # Adding colorbar\n # https://stackoverflow.com/questions/13784201/matplotlib-2-subplots-1-colorbar\n fig.subplots_adjust(right=0.8)\n cbar_ax = fig.add_axes([0.85, 0.15, 0.015, 0.7])\n fig.colorbar(im, cax=cbar_ax, ticks=[-max_abs_val, 0, max_abs_val])\n\n return fig\n\nGXsigmoid, GXlogits = generator(GY)\nGXsigmoid_test, GXlogits_test = generator(GY, reuse=True)\n\nYsoftmax, Ysigmoid, Ylogits = classifier(X)\nmodel_classifier = ClassifierModel()\n\nYsoftmax_noisy, Ysigmoid_noisy, Ylogits_noisy = classifier(X_noisy, reuse=True)\nYsoftmax_adv, Ysigmoid_adv, Ylogits_adv = classifier(X_adv, reuse=True)\n\nwith tf.name_scope(\"loss\"):\n c_loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=Ylogits, labels=Y_))\n\n g_loss = tf.reduce_mean(\n tf.nn.sigmoid_cross_entropy_with_logits(logits=GXlogits, labels=GX_))\n\n \"\"\" Summary \"\"\"\n g_loss_sum = tf.summary.scalar(\"g_loss\", g_loss)\n c_loss_sum = tf.summary.scalar(\"c_loss\", c_loss)\n\n# accuracy of the trained model, between 0 (worst) and 1 (best)\nwith tf.name_scope(\"accuracy\"):\n with tf.name_scope(\"correct_prediction\"):\n correct_prediction = tf.equal(tf.argmax(Ysoftmax, 1), tf.argmax(Y_, 1))\n with tf.name_scope(\"accuracy\"):\n accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n with tf.name_scope(\"correct_prediction_noisy\"):\n correct_prediction_noisy = tf.equal(tf.argmax(Ysoftmax_noisy, 1), tf.argmax(Y_, 1))\n with tf.name_scope(\"accuracy_noisy\"):\n accuracy_noisy = tf.reduce_mean(tf.cast(correct_prediction_noisy, tf.float32))\n with tf.name_scope(\"correct_prediction_adv\"):\n correct_prediction_adv = tf.equal(tf.argmax(Ysoftmax_adv, 1), tf.argmax(Y_, 1))\n with tf.name_scope(\"accuracy_adv\"):\n accuracy_adv = tf.reduce_mean(tf.cast(correct_prediction_adv, tf.float32))\n\n \"\"\" Summary \"\"\"\n accuracy_sum = tf.summary.scalar(\"accuracy\", accuracy)\n accuracy_noisy_sum = tf.summary.scalar(\"accuracy_noisy\", accuracy_noisy)\n accuracy_adv_sum = tf.summary.scalar(\"accuracy_adv\", accuracy_adv)\n\nwith tf.name_scope(\"max_output\"):\n with tf.name_scope(\"max_output_test\"):\n max_output_sigmoid_test = tf.reduce_max(Ysigmoid)\n max_output_softmax_test = tf.reduce_max(Ysoftmax)\n with tf.name_scope(\"max_output_noise\"):\n max_output_sigmoid_noise = tf.reduce_max(Ysigmoid_noisy)\n max_output_softmax_noise = tf.reduce_max(Ysoftmax_noisy)\n with tf.name_scope(\"max_output_adv\"):\n max_output_sigmoid_adv = tf.reduce_max(Ysigmoid_adv)\n max_output_softmax_adv = tf.reduce_max(Ysoftmax_adv)\n\n \"\"\" Summary \"\"\"\n max_output_sigmoid_test_sum = tf.summary.scalar(\"max_output_sigmoid_test\", max_output_sigmoid_test)\n max_output_softmax_test_sum = tf.summary.scalar(\"max_output_softmax_test\", max_output_softmax_test)\n max_output_sigmoid_noise_sum = tf.summary.scalar(\"max_output_sigmoid_noise\", max_output_sigmoid_noise)\n max_output_softmax_noise_sum = tf.summary.scalar(\"max_output_softmax_noise\", max_output_softmax_noise)\n max_output_sigmoid_adv_sum = tf.summary.scalar(\"max_output_sigmoid_adv\", max_output_sigmoid_adv)\n max_output_softmax_adv_sum = tf.summary.scalar(\"max_output_softmax_adv\", max_output_softmax_adv)\n\nutils.show_all_variables()\nt_vars = tf.trainable_variables()\nc_vars = [var for var in t_vars if 'C_' in var.name]\\\n if config_num < 3 else [var for var in t_vars if 'C_W' in var.name]\ng_vars = [var for var in t_vars if 'C_W' in var.name or 'G_' in var.name]\\\n if config_num < 3 else c_vars\n\n# training step\nlearning_rate_dis = 0.003 if config_num == 0 or config_num == 3 else 0.03\nlearning_rate_gen = 0.01\n\nwith tf.name_scope(\"train\"):\n c_train = tf.train.AdamOptimizer(learning_rate_dis).minimize(c_loss, var_list=c_vars)\n g_train = tf.train.AdamOptimizer(learning_rate_gen).minimize(g_loss, var_list=g_vars)\n\n# final summary operations\ng_sum = tf.summary.merge([g_loss_sum])\nc_sum = tf.summary.merge([input_test_sum, accuracy_sum, c_loss_sum, max_output_sigmoid_test_sum, max_output_softmax_test_sum])\nnoise_sum = tf.summary.merge([max_output_sigmoid_noise_sum, max_output_softmax_noise_sum])\nnoisy_sum = tf.summary.merge([input_noisy_sum, accuracy_noisy_sum])\nadv_sum = tf.summary.merge([input_adv_sum, accuracy_adv_sum, max_output_sigmoid_adv_sum, max_output_softmax_adv_sum])\n\nfolder_out = 'out/' + model_name + '/'\nif not os.path.exists(folder_out):\n os.makedirs(folder_out)\n\nfolder_csv = 'csv/' + model_name + '/'\nif not os.path.exists(folder_csv):\n os.makedirs(folder_csv)\n\nfolder_logs = 'logs/' + model_name\nif not os.path.exists(folder_csv):\n os.makedirs(folder_logs)\n\nwriter = tf.summary.FileWriter(folder_logs, sess.graph)\n\nbatch_size = 100\nnum_train_images = mnist.train.images.shape[0]\nnum_batches = num_train_images // batch_size\nall_classes = np.eye(10)\n\ncounter = 0\n\nfgsm_params = {'eps': 0.3,\n 'clip_min': 0.,\n 'clip_max': 1.}\n\nrandom_noise = np.random.random_sample(mnist.test.images.shape)\ntest_image_with_noise = np.clip(mnist.test.images + 0.1*random_noise, 0., 1.)\n\naccuracy_list = []\nsigmoid_list = []\nsoftmax_list = []\n\n# initialize all variables\ntf.global_variables_initializer().run()\n\nfor i in range(50001):\n batch_X, batch_Y = mnist.train.next_batch(batch_size)\n\n if i % 500 == 0 or i == 50000:\n counter += 1\n # Saves generated images\n samples = sess.run(GXsigmoid_test, feed_dict={GY: all_classes})\n fig = plot_generator(samples)\n plt.savefig(folder_out+\"gen_\"+str(i).zfill(6)+'.png', bbox_inches='tight')\n plt.close(fig)\n\n fig = plot_first_hidden(C_W1.eval(session=sess))\n plt.savefig(folder_out+\"hidden_\"+str(i).zfill(6)+'.png', bbox_inches='tight')\n plt.close(fig)\n\n attack_fgsm = FastGradientMethod(model_classifier, sess=sess)\n adv_x_np = attack_fgsm.generate_np(mnist.test.images, **fgsm_params)\n fig = plot_generator(adv_x_np[:10])\n plt.savefig(folder_out+\"adv_\"+str(i).zfill(6)+'.png', bbox_inches='tight')\n plt.close(fig)\n\n accu_test, c_loss_test, sigmoid_test, softmax_test, sum_c = sess.run([accuracy, c_loss, max_output_sigmoid_test, max_output_softmax_test, c_sum], {X: mnist.test.images, Y_: mnist.test.labels})\n writer.add_summary(sum_c, i)\n g_loss_test, sum_g = sess.run([g_loss, g_sum], {GY: batch_Y, GX_: batch_X})\n writer.add_summary(sum_g, i)\n\n print(str(i) + \": epoch \" + str(i*batch_size//mnist.train.images.shape[0]+1)\\\n + \" - test loss class: \" + str(c_loss_test) + \" test loss gen: \" + str(g_loss_test))\n print(\"Real test images - Sigmoid: \" + str(sigmoid_test) + \"\\tSoftmax: \" + str(softmax_test) + \"\\taccuracy: \"+ str(accu_test))\n\n sigmoid_random, softmax_random, sum_random = sess.run([max_output_sigmoid_noise, max_output_softmax_noise, noise_sum], {X_noisy: random_noise})\n writer.add_summary(sum_random, i)\n accu_random, sum_noisy = sess.run([accuracy_noisy, noisy_sum], {X_noisy: test_image_with_noise, Y_: mnist.test.labels})\n writer.add_summary(sum_noisy, i)\n print(\"Random noise images - Sigmoid: \" + str(sigmoid_random) + \"\\tSoftmax: \" + str(softmax_random) + \"\\taccuracy: \"+ str(accu_random))\n\n accu_adv, sigmoid_adv, softmax_adv, sum_adv = sess.run([accuracy_adv, max_output_sigmoid_adv, max_output_softmax_adv, adv_sum], {X_adv: adv_x_np, Y_: mnist.test.labels})\n writer.add_summary(sum_adv, i)\n print(\"Adversarial examples - Sigmoid: \" + str(sigmoid_adv) + \"\\tSoftmax: \" + str(softmax_adv) + \"\\taccuracy: \"+ str(accu_adv))\n print()\n accuracy_list.append([i, accu_test, accu_random, accu_adv, counter])\n sigmoid_list.append([i, sigmoid_test, sigmoid_random, sigmoid_adv, counter])\n softmax_list.append([i, softmax_test, softmax_random, softmax_adv, counter])\n\n sess.run(c_train, {X: batch_X, Y_: batch_Y})\n if config_num == 1 or (config_num == 2 and i < 25000) or\\\n config_num == 4 or (config_num == 5 and i < 25000):\n sess.run(g_train, {GY: batch_Y, GX_: batch_X})\n\nwriter.close()\n\n# Save data in csv\nwith open(folder_csv+\"accuracy.csv\", \"w\") as output:\n writer = csv.writer(output, lineterminator='\\n')\n writer.writerows(accuracy_list)\n\nwith open(folder_csv+\"sigmoid.csv\", \"w\") as output:\n writer = csv.writer(output, lineterminator='\\n')\n writer.writerows(sigmoid_list)\n\nwith open(folder_csv+\"softmax.csv\", \"w\") as output:\n writer = csv.writer(output, lineterminator='\\n')\n writer.writerows(softmax_list)\n\n# Load data in csv\naccu_data = utils_csv.get_data_csv_file(folder_csv+\"accuracy.csv\")\nsigmoid_data = utils_csv.get_data_csv_file(folder_csv+\"sigmoid.csv\")\nsoftmax_data = utils_csv.get_data_csv_file(folder_csv+\"softmax.csv\")\n\n# Print best values\nutils_csv.print_best(accu_data, sigmoid_data, softmax_data, folder_csv+\"summary.txt\")\n" ]
[ [ "tensorflow.nn.softmax_cross_entropy_with_logits", "tensorflow.cast", "numpy.random.random_sample", "tensorflow.nn.conv2d_transpose", "tensorflow.nn.sigmoid_cross_entropy_with_logits", "tensorflow.train.AdamOptimizer", "numpy.exp", "tensorflow.summary.scalar", "tensorflow.nn.conv2d", "numpy.clip", "tensorflow.summary.image", "numpy.eye", "numpy.arange", "matplotlib.pyplot.subplot", "tensorflow.name_scope", "matplotlib.gridspec.GridSpec", "matplotlib.pyplot.axis", "tensorflow.trainable_variables", "matplotlib.pyplot.close", "tensorflow.argmax", "matplotlib.pyplot.figure", "tensorflow.matmul", "tensorflow.nn.sigmoid", "tensorflow.InteractiveSession", "tensorflow.placeholder", "tensorflow.global_variables_initializer", "tensorflow.set_random_seed", "tensorflow.summary.merge", "tensorflow.reduce_max", "tensorflow.nn.softmax", "tensorflow.summary.FileWriter", "tensorflow.transpose", "numpy.random.seed", "tensorflow.reshape", "numpy.random.shuffle", "tensorflow.variable_scope" ], [ "tensorflow.nn.softmax_cross_entropy_with_logits", "tensorflow.cast", "numpy.random.random_sample", "tensorflow.nn.sigmoid_cross_entropy_with_logits", "numpy.max", "tensorflow.train.AdamOptimizer", "tensorflow.summary.scalar", "numpy.clip", "tensorflow.summary.image", "numpy.eye", "matplotlib.pyplot.subplot", "tensorflow.name_scope", "matplotlib.gridspec.GridSpec", "matplotlib.pyplot.axis", "tensorflow.trainable_variables", "matplotlib.pyplot.close", "tensorflow.argmax", "tensorflow.examples.tutorials.mnist.input_data.read_data_sets", "matplotlib.pyplot.figure", "tensorflow.matmul", "tensorflow.nn.sigmoid", "tensorflow.InteractiveSession", "numpy.min", "tensorflow.placeholder", "tensorflow.global_variables_initializer", "numpy.transpose", "tensorflow.set_random_seed", "tensorflow.summary.merge", "tensorflow.reduce_max", "tensorflow.nn.softmax", "tensorflow.summary.FileWriter", "tensorflow.transpose", "numpy.random.seed", "tensorflow.reshape", "tensorflow.variable_scope" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10" ] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10" ] } ]
kathryn-garside/PyDMD-fork
[ "0158c4144019f0899ce34ec44286b0f700c56b38", "0158c4144019f0899ce34ec44286b0f700c56b38", "0158c4144019f0899ce34ec44286b0f700c56b38" ]
[ "pydmd/hankeldmd.py", "tests/test_dmd.py", "pydmd/dmd_modes_tuner.py" ]
[ "\"\"\"\nDerived module from dmdbase.py for hankel dmd.\n\nReference:\n- H. Arbabi, I. Mezic, Ergodic theory, dynamic mode decomposition, and\ncomputation of spectral properties of the Koopman operator. SIAM Journal on\nApplied Dynamical Systems, 2017, 16.4: 2096-2126.\n\"\"\"\nfrom copy import copy\n\nimport numpy as np\n\nfrom .dmdbase import DMDBase\nfrom .dmd import DMD\n\n\nclass HankelDMD(DMDBase):\n \"\"\"\n Hankel Dynamic Mode Decomposition\n\n :param svd_rank: the rank for the truncation; If 0, the method computes the\n optimal rank and uses it for truncation; if positive interger, the\n method uses the argument for the truncation; if float between 0 and 1,\n the rank is the number of the biggest singular values that are needed\n to reach the 'energy' specified by `svd_rank`; if -1, the method does\n not compute truncation.\n :type svd_rank: int or float\n :param int tlsq_rank: rank truncation computing Total Least Square. Default\n is 0, that means no truncation.\n :param bool exact: flag to compute either exact DMD or projected DMD.\n Default is False.\n :param opt: argument to control the computation of DMD modes amplitudes.\n See :class:`DMDBase`. Default is False.\n :type opt: bool or int\n :param rescale_mode: Scale Atilde as shown in\n 10.1016/j.jneumeth.2015.10.010 (section 2.4) before computing its\n eigendecomposition. None means no rescaling, 'auto' means automatic\n rescaling using singular values, otherwise the scaling factors.\n :type rescale_mode: {'auto'} or None or numpy.ndarray\n :param bool forward_backward: If True, the low-rank operator is computed\n like in fbDMD (reference: https://arxiv.org/abs/1507.02264). Default is\n False.\n :param int d: the new order for spatial dimension of the input snapshots.\n Default is 1.\n :param sorted_eigs: Sort eigenvalues (and modes/dynamics accordingly) by\n magnitude if `sorted_eigs='abs'`, by real part (and then by imaginary\n part to break ties) if `sorted_eigs='real'`. Default: False.\n :type sorted_eigs: {'real', 'abs'} or False\n :param reconstruction_method: Method used to reconstruct the snapshots of\n the dynamical system from the multiple versions available due to how\n HankelDMD is conceived. If `'first'` (default) the first version\n available is selected (i.e. the nearest to the 0-th row in the\n augmented matrix). If `'mean'` we compute the element-wise mean. If\n `reconstruction_method` is an array of float values we compute the\n weighted average (for each snapshots) using the given values as weights\n (the number of weights must be equal to `d`).\n :type reconstruction_method: {'first', 'mean'} or array-like\n \"\"\"\n\n def __init__(\n self,\n svd_rank=0,\n tlsq_rank=0,\n exact=False,\n opt=False,\n rescale_mode=None,\n forward_backward=False,\n d=1,\n sorted_eigs=False,\n reconstruction_method=\"first\",\n ):\n super().__init__(\n svd_rank=svd_rank,\n tlsq_rank=tlsq_rank,\n exact=exact,\n opt=opt,\n rescale_mode=rescale_mode,\n sorted_eigs=sorted_eigs,\n )\n self._d = d\n\n if isinstance(reconstruction_method, list):\n if len(reconstruction_method) != d:\n raise ValueError(\n \"The length of the array of weights must be equal to d\"\n )\n elif isinstance(reconstruction_method, np.ndarray):\n if (\n reconstruction_method.ndim > 1\n or reconstruction_method.shape[0] != d\n ):\n raise ValueError(\n \"The length of the array of weights must be equal to d\"\n )\n self._reconstruction_method = reconstruction_method\n\n self._sub_dmd = DMD(\n svd_rank=svd_rank,\n tlsq_rank=tlsq_rank,\n exact=exact,\n opt=opt,\n rescale_mode=rescale_mode,\n forward_backward=forward_backward,\n sorted_eigs=sorted_eigs,\n )\n\n @property\n def d(self):\n \"\"\"The new order for spatial dimension of the input snapshots.\"\"\"\n return self._d\n\n def _hankel_first_occurrence(self, time):\n r\"\"\"\n For a given `t` such that there is :math:`k \\in \\mathbb{N}` such that\n :math:`t = t_0 + k dt`, return the index of the first column in Hankel\n pseudo matrix (see also :func:`_pseudo_hankel_matrix`) which contains\n the snapshot corresponding to `t`.\n\n :param time: The time corresponding to the requested snapshot.\n :return: The index of the first appeareance of `time` in the columns of\n Hankel pseudo matrix.\n :rtype: int\n \"\"\"\n return max(\n 0,\n (time - self.original_time[\"t0\"]) // self.dmd_time[\"dt\"]\n - (self.original_time[\"t0\"] + self.d - 1),\n )\n\n def _update_sub_dmd_time(self):\n \"\"\"\n Update the time dictionaries (`dmd_time` and `original_time`) of\n the auxiliary DMD instance `HankelDMD._sub_dmd` after an update of the\n time dictionaries of the time dictionaries of this instance of the\n higher level instance of `HankelDMD`.\n \"\"\"\n self._sub_dmd.dmd_time[\"t0\"] = self._hankel_first_occurrence(\n self.dmd_time[\"t0\"]\n )\n self._sub_dmd.dmd_time[\"tend\"] = self._hankel_first_occurrence(\n self.dmd_time[\"tend\"]\n )\n\n def reconstructions_of_timeindex(self, timeindex=None):\n \"\"\"\n Build a collection of all the available versions of the given\n `timeindex`. The indexing of time instants is the same used for\n :func:`reconstructed_data`. For each time instant there are at least\n one and at most `d` versions. If `timeindex` is `None` the function\n returns the whole collection, for all the time instants.\n\n :param int timeindex: The index of the time snapshot.\n :return: a collection of all the available versions for the given\n time snapshot, or for all the time snapshots if `timeindex` is\n `None` (in the second case, time varies along the first dimension\n of the array returned).\n :rtype: numpy.ndarray or list\n \"\"\"\n self._update_sub_dmd_time()\n\n rec = self._sub_dmd.reconstructed_data\n space_dim = rec.shape[0] // self.d\n time_instants = rec.shape[1] + self.d - 1\n\n # for each time instance, we collect all its appearences. each\n # snapshot appears at most d times (for instance, the first appears\n # only once).\n reconstructed_snapshots = np.full(\n (time_instants, self.d, space_dim), np.nan, dtype=rec.dtype\n )\n\n c_idxes = (\n np.array(range(self.d))[:, None]\n .repeat(2, axis=1)[None, :]\n .repeat(rec.shape[1], axis=0)\n )\n c_idxes[:, :, 0] += np.array(range(rec.shape[1]))[:, None]\n\n reconstructed_snapshots[c_idxes[:, :, 0], c_idxes[:, :, 1]] = np.array(\n np.swapaxes(np.split(rec.T, self.d, axis=1), 0, 1)\n )\n\n if timeindex is None:\n return reconstructed_snapshots\n\n return reconstructed_snapshots[timeindex]\n\n def _first_reconstructions(self, reconstructions):\n \"\"\"Return the first occurrence of each snapshot available in the given\n matrix (which must be the result of `self._sub_dmd.reconstructed_data`,\n or have the same shape).\n\n :param reconstructions: A matrix of (higher-order) snapshots having\n shape `(space*self.d, time_instants)`\n :type reconstructions: np.ndarray\n :return: The first snapshot that occurs in `reconstructions` for each\n available time instant.\n :rtype: np.ndarray\n \"\"\"\n first_nonmasked_idx = np.repeat(\n np.array(range(reconstructions.shape[0]))[:, None], 2, axis=1\n )\n first_nonmasked_idx[self.d - 1 :, 1] = self.d - 1\n\n return reconstructions[\n first_nonmasked_idx[:, 0], first_nonmasked_idx[:, 1]\n ].T\n\n @property\n def reconstructed_data(self):\n self._update_sub_dmd_time()\n\n rec = self.reconstructions_of_timeindex()\n rec = np.ma.array(rec, mask=np.isnan(rec))\n\n if self._reconstruction_method == \"first\":\n result = self._first_reconstructions(rec)\n elif self._reconstruction_method == \"mean\":\n result = np.mean(rec, axis=1).T\n elif isinstance(self._reconstruction_method, (np.ndarray, list)):\n result = np.average(\n rec, axis=1, weights=self._reconstruction_method\n ).T\n else:\n raise ValueError(\n \"The reconstruction method wasn't recognized: {}\".format(\n self._reconstruction_method\n )\n )\n\n # we want to return only the requested timesteps\n time_index = min(\n self.d - 1,\n int(\n (self.dmd_time[\"t0\"] - self.original_time[\"t0\"])\n // self.dmd_time[\"dt\"]\n ),\n )\n result = result[:, time_index : time_index + len(self.dmd_timesteps)]\n\n return result.filled(fill_value=0)\n\n def _pseudo_hankel_matrix(self, X):\n \"\"\"\n Method for arranging the input snapshots `X` into the (pseudo) Hankel\n matrix. The attribute `d` controls the shape of the output matrix.\n :Example:\n\n >>> from pydmd import HankelDMD\n >>> dmd = HankelDMD(d=2)\n >>> a = np.array([[1, 2, 3, 4, 5]])\n >>> dmd._pseudo_hankel_matrix(a)\n array([[1, 2, 3, 4],\n [2, 3, 4, 5]])\n >>> dmd = pydmd.hankeldmd.HankelDMD(d=4)\n >>> dmd._pseudo_hankel_matrix(a)\n array([[1, 2],\n [2, 3],\n [3, 4],\n [4, 5]])\n\n \"\"\"\n return np.concatenate(\n [X[:, i : X.shape[1] - self.d + i + 1] for i in range(self.d)],\n axis=0,\n )\n\n @property\n def modes(self):\n return self._sub_dmd.modes\n\n @property\n def eigs(self):\n return self._sub_dmd.eigs\n\n @property\n def amplitudes(self):\n return self._sub_dmd.amplitudes\n\n @property\n def operator(self):\n return self._sub_dmd.operator\n\n @property\n def svd_rank(self):\n return self._sub_dmd.svd_rank\n\n @property\n def modes_activation_bitmask(self):\n return self._sub_dmd.modes_activation_bitmask\n\n @modes_activation_bitmask.setter\n def modes_activation_bitmask(self, value):\n self._sub_dmd.modes_activation_bitmask = value\n\n # due to how we implemented HankelDMD we need an alternative implementation\n # of __getitem__\n def __getitem__(self, key):\n \"\"\"\n Restrict the DMD modes used by this instance to a subset of indexes\n specified by keys. The value returned is a shallow copy of this DMD\n instance, with a different value in :func:`modes_activation_bitmask`.\n Therefore assignments to attributes are not reflected into the original\n instance.\n\n However the DMD instance returned should not be used for low-level\n manipulations on DMD modes, since the underlying DMD operator is shared\n with the original instance. For this reasons modifications to NumPy\n arrays may result in unwanted and unspecified situations which should\n be avoided in principle.\n\n :param key: An index (integer), slice or list of indexes.\n :type key: int or slice or list or np.ndarray\n :return: A shallow copy of this DMD instance having only a subset of\n DMD modes which are those indexed by `key`.\n :rtype: HankelDMD\n \"\"\"\n\n sub_dmd_copy = copy(self._sub_dmd)\n sub_dmd_copy.allocate_proxy()\n\n shallow_copy = copy(self)\n shallow_copy._sub_dmd = sub_dmd_copy\n return DMDBase.__getitem__(shallow_copy, key)\n\n def fit(self, X):\n \"\"\"\n Compute the Dynamic Modes Decomposition to the input data.\n\n :param X: the input snapshots.\n :type X: numpy.ndarray or iterable\n \"\"\"\n snp, self._snapshots_shape = self._col_major_2darray(X)\n self._snapshots = self._pseudo_hankel_matrix(snp)\n self._sub_dmd.fit(self._snapshots)\n\n # Default timesteps\n n_samples = snp.shape[1]\n self._set_initial_time_dictionary(\n {\"t0\": 0, \"tend\": n_samples - 1, \"dt\": 1}\n )\n\n return self\n", "from builtins import range\nfrom unittest import TestCase\nfrom pydmd.dmd import DMD\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport os\n\n# 15 snapshot with 400 data. The matrix is 400x15 and it contains\n# the following data: f1 + f2 where\n# f1 = lambda x,t: sech(x+3)*(1.*np.exp(1j*2.3*t))\n# f2 = lambda x,t: (sech(x)*np.tanh(x))*(2.*np.exp(1j*2.8*t))\nsample_data = np.load('tests/test_datasets/input_sample.npy')\n\n\ndef create_noisy_data():\n mu = 0.\n sigma = 0. # noise standard deviation\n m = 100 # number of snapshot\n noise = np.random.normal(mu, sigma, m) # gaussian noise\n A = np.array([[1., 1.], [-1., 2.]])\n A /= np.sqrt(3)\n n = 2\n X = np.zeros((n, m))\n X[:, 0] = np.array([0.5, 1.])\n # evolve the system and perturb the data with noise\n for k in range(1, m):\n X[:, k] = A.dot(X[:, k - 1])\n X[:, k - 1] += noise[k - 1]\n return X\n\n\nnoisy_data = create_noisy_data()\n\n\nclass TestDmd(TestCase):\n def test_shape(self):\n dmd = DMD(svd_rank=-1)\n dmd.fit(X=sample_data)\n assert dmd.modes.shape[1] == sample_data.shape[1] - 1\n\n def test_truncation_shape(self):\n dmd = DMD(svd_rank=3)\n dmd.fit(X=sample_data)\n assert dmd.modes.shape[1] == 3\n\n def test_rank(self):\n dmd = DMD(svd_rank=0.9)\n dmd.fit(X=sample_data)\n assert len(dmd.eigs) == 2\n\n def test_Atilde_shape(self):\n dmd = DMD(svd_rank=3)\n dmd.fit(X=sample_data)\n assert dmd.atilde.shape == (dmd.svd_rank, dmd.svd_rank)\n\n def test_Atilde_values(self):\n dmd = DMD(svd_rank=2)\n dmd.fit(X=sample_data)\n exact_atilde = np.array(\n [[-0.70558526 + 0.67815084j, 0.22914898 + 0.20020143j],\n [0.10459069 + 0.09137814j, -0.57730040 + 0.79022994j]])\n np.testing.assert_allclose(exact_atilde, dmd.atilde)\n\n def test_eigs_1(self):\n dmd = DMD(svd_rank=-1)\n dmd.fit(X=sample_data)\n assert len(dmd.eigs) == 14\n\n def test_eigs_2(self):\n dmd = DMD(svd_rank=5)\n dmd.fit(X=sample_data)\n assert len(dmd.eigs) == 5\n\n def test_eigs_3(self):\n dmd = DMD(svd_rank=2)\n dmd.fit(X=sample_data)\n expected_eigs = np.array([\n -8.09016994e-01 + 5.87785252e-01j, -4.73868662e-01 + 8.80595532e-01j\n ])\n np.testing.assert_almost_equal(dmd.eigs, expected_eigs, decimal=6)\n\n def test_dynamics_1(self):\n dmd = DMD(svd_rank=5)\n dmd.fit(X=sample_data)\n assert dmd.dynamics.shape == (5, sample_data.shape[1])\n\n def test_dynamics_2(self):\n dmd = DMD(svd_rank=1)\n dmd.fit(X=sample_data)\n expected_dynamics = np.array([[\n -2.20639502 - 9.10168802e-16j, 1.55679980 - 1.49626864e+00j,\n -0.08375915 + 2.11149018e+00j, -1.37280962 - 1.54663768e+00j,\n 2.01748787 + 1.60312745e-01j, -1.53222592 + 1.25504678e+00j,\n 0.23000498 - 1.92462280e+00j, 1.14289644 + 1.51396355e+00j,\n -1.83310653 - 2.93174173e-01j, 1.49222925 - 1.03626336e+00j,\n -0.35015209 + 1.74312867e+00j, -0.93504202 - 1.46738182e+00j,\n 1.65485808 + 4.01263449e-01j, -1.43976061 + 8.39117825e-01j,\n 0.44682540 - 1.56844403e+00j\n ]])\n np.testing.assert_allclose(dmd.dynamics, expected_dynamics)\n\n def test_dynamics_opt_1(self):\n dmd = DMD(svd_rank=5, opt=True)\n dmd.fit(X=sample_data)\n assert dmd.dynamics.shape == (5, sample_data.shape[1])\n\n def test_dynamics_opt_2(self):\n dmd = DMD(svd_rank=1, opt=True)\n dmd.fit(X=sample_data)\n expected_dynamics = np.array([[\n -4.56004133 - 6.48054238j, 7.61228319 + 1.4801793j,\n -6.37489962 + 4.11788355j, 1.70548899 - 7.22866146j,\n 3.69875496 + 6.25701574j, -6.85298745 - 1.90654427j,\n 6.12829151 - 3.30212967j, -2.08469012 + 6.48584004j,\n -2.92745126 - 5.99004747j, 6.12772217 + 2.24123565j,\n -5.84352626 + 2.57413711j, 2.37745273 - 5.77906544j,\n 2.24158249 + 5.68989493j, -5.44023459 - 2.49457492j,\n 5.53024740 - 1.92916437j\n ]])\n np.testing.assert_allclose(dmd.dynamics, expected_dynamics)\n\n def test_reconstructed_data(self):\n dmd = DMD()\n dmd.fit(X=sample_data)\n dmd_data = dmd.reconstructed_data\n np.testing.assert_allclose(dmd_data, sample_data)\n\n def test_original_time(self):\n dmd = DMD(svd_rank=2)\n dmd.fit(X=sample_data)\n expected_dict = {'dt': 1, 't0': 0, 'tend': 14}\n np.testing.assert_equal(dmd.original_time, expected_dict)\n\n def test_original_timesteps(self):\n dmd = DMD()\n dmd.fit(X=sample_data)\n np.testing.assert_allclose(dmd.original_timesteps,\n np.arange(sample_data.shape[1]))\n\n def test_dmd_time_1(self):\n dmd = DMD(svd_rank=2)\n dmd.fit(X=sample_data)\n expected_dict = {'dt': 1, 't0': 0, 'tend': 14}\n np.testing.assert_equal(dmd.dmd_time, expected_dict)\n\n def test_dmd_time_2(self):\n dmd = DMD()\n dmd.fit(X=sample_data)\n dmd.dmd_time['t0'] = 10\n dmd.dmd_time['tend'] = 14\n expected_data = sample_data[:, -5:]\n np.testing.assert_allclose(dmd.reconstructed_data, expected_data)\n\n def test_dmd_time_3(self):\n dmd = DMD()\n dmd.fit(X=sample_data)\n dmd.dmd_time['t0'] = 8\n dmd.dmd_time['tend'] = 11\n expected_data = sample_data[:, 8:12]\n np.testing.assert_allclose(dmd.reconstructed_data, expected_data)\n\n def test_dmd_time_4(self):\n dmd = DMD(svd_rank=3)\n dmd.fit(X=sample_data)\n dmd.dmd_time['t0'] = 20\n dmd.dmd_time['tend'] = 20\n expected_data = np.array([[-7.29383297e+00 - 4.90248179e-14j],\n [-5.69109796e+00 - 2.74068833e+00j],\n [3.38410649e-83 + 3.75677740e-83j]])\n np.testing.assert_almost_equal(dmd.dynamics, expected_data, decimal=6)\n\n def test_plot_eigs_1(self):\n dmd = DMD()\n dmd.fit(X=sample_data)\n dmd.plot_eigs(show_axes=True, show_unit_circle=True)\n plt.close()\n\n def test_plot_eigs_2(self):\n dmd = DMD()\n dmd.fit(X=sample_data)\n dmd.plot_eigs(show_axes=False, show_unit_circle=False)\n plt.close()\n\n def test_plot_eigs_3(self):\n dmd = DMD()\n dmd.fit(X=sample_data)\n dmd.plot_eigs(show_axes=False, show_unit_circle=True, filename='eigs.png')\n self.addCleanup(os.remove, 'eigs.png')\n\n def test_plot_modes_1(self):\n dmd = DMD()\n dmd.fit(X=sample_data)\n with self.assertRaises(ValueError):\n dmd.plot_modes_2D()\n\n def test_plot_modes_2(self):\n dmd = DMD(svd_rank=-1)\n dmd.fit(X=sample_data)\n dmd.plot_modes_2D((1, 2, 5), x=np.arange(20), y=np.arange(20))\n plt.close()\n\n def test_plot_modes_3(self):\n dmd = DMD()\n snapshots = [snap.reshape(20, 20) for snap in sample_data.T]\n dmd.fit(X=snapshots)\n dmd.plot_modes_2D()\n plt.close()\n\n def test_plot_modes_4(self):\n dmd = DMD()\n snapshots = [snap.reshape(20, 20) for snap in sample_data.T]\n dmd.fit(X=snapshots)\n dmd.plot_modes_2D(index_mode=1)\n plt.close()\n\n def test_plot_modes_5(self):\n dmd = DMD()\n snapshots = [snap.reshape(20, 20) for snap in sample_data.T]\n dmd.fit(X=snapshots)\n dmd.plot_modes_2D(index_mode=1, filename='tmp.png')\n self.addCleanup(os.remove, 'tmp.1.png')\n\n def test_plot_snapshots_1(self):\n dmd = DMD()\n dmd.fit(X=sample_data)\n with self.assertRaises(ValueError):\n dmd.plot_snapshots_2D()\n\n def test_plot_snapshots_2(self):\n dmd = DMD(svd_rank=-1)\n dmd.fit(X=sample_data)\n dmd.plot_snapshots_2D((1, 2, 5), x=np.arange(20), y=np.arange(20))\n plt.close()\n\n def test_plot_snapshots_3(self):\n dmd = DMD()\n snapshots = [snap.reshape(20, 20) for snap in sample_data.T]\n dmd.fit(X=snapshots)\n dmd.plot_snapshots_2D()\n plt.close()\n\n def test_plot_snapshots_4(self):\n dmd = DMD()\n snapshots = [snap.reshape(20, 20) for snap in sample_data.T]\n dmd.fit(X=snapshots)\n dmd.plot_snapshots_2D(index_snap=2)\n plt.close()\n\n def test_plot_snapshots_5(self):\n dmd = DMD()\n snapshots = [snap.reshape(20, 20) for snap in sample_data.T]\n dmd.fit(X=snapshots)\n dmd.plot_snapshots_2D(index_snap=2, filename='tmp.png')\n self.addCleanup(os.remove, 'tmp.2.png')\n\n def test_tdmd_plot(self):\n dmd = DMD(tlsq_rank=3)\n dmd.fit(X=sample_data)\n dmd.plot_eigs(show_axes=False, show_unit_circle=False)\n plt.close()\n\n # we check that modes are the same vector multiplied by a coefficient\n # when we rescale\n def test_rescale_mode_auto_same_modes(self):\n dmd_no_rescale = DMD(svd_rank=2, opt=True, rescale_mode=None)\n dmd_no_rescale.fit(X=sample_data)\n\n dmd_auto_rescale = DMD(svd_rank=2, opt=True, rescale_mode='auto')\n dmd_auto_rescale.fit(X=sample_data)\n\n def normalize(vector):\n return vector / np.linalg.norm(vector)\n\n dmd_rescale_normalized_modes = np.apply_along_axis(normalize, 0,\n dmd_auto_rescale.modes)\n dmd_no_rescale_normalized_modes = np.apply_along_axis(normalize, 0,\n dmd_no_rescale.modes)\n\n np.testing.assert_almost_equal(dmd_no_rescale_normalized_modes,\n dmd_rescale_normalized_modes, decimal=3)\n\n # we check that modes are the same vector multiplied by a coefficient\n # when we rescale\n def test_rescale_mode_custom_same_modes(self):\n dmd_no_rescale = DMD(svd_rank=2, opt=True, rescale_mode=None)\n dmd_no_rescale.fit(X=sample_data)\n\n dmd_rescale = DMD(svd_rank=2, opt=True, rescale_mode=\n np.linspace(5,10, 2))\n dmd_rescale.fit(X=sample_data)\n\n def normalize(vector):\n return vector / np.linalg.norm(vector)\n\n dmd_rescale_normalized_modes = np.apply_along_axis(normalize, 0,\n dmd_rescale.modes)\n dmd_no_rescale_normalized_modes = np.apply_along_axis(normalize, 0,\n dmd_no_rescale.modes)\n\n np.testing.assert_almost_equal(dmd_no_rescale_normalized_modes,\n dmd_rescale_normalized_modes, decimal=3)\n\n def test_rescale_mode_same_evolution(self):\n dmd_no_rescale = DMD(svd_rank=5, opt=True, rescale_mode=None)\n dmd_no_rescale.fit(X=sample_data)\n dmd_no_rescale.dmd_time['tend'] *= 2\n\n dmd_rescale = DMD(svd_rank=5, opt=True, rescale_mode=\n np.linspace(5,10, 5))\n dmd_rescale.fit(X=sample_data)\n dmd_rescale.dmd_time['tend'] *= 2\n\n np.testing.assert_almost_equal(dmd_rescale.reconstructed_data,\n dmd_no_rescale.reconstructed_data, decimal=6)\n\n def test_rescale_mode_coefficients_count_check(self):\n dmd_rescale = DMD(svd_rank=5, opt=True, rescale_mode=\n np.linspace(5,10, 6))\n with self.assertRaises(ValueError):\n dmd_rescale.fit(X=sample_data)\n\n def test_predict(self):\n def f1(x,t):\n return 1./np.cosh(x+3)*np.exp(2.3j*t)\n\n def f2(x,t):\n return 2./np.cosh(x)*np.tanh(x)*np.exp(2.8j*t)\n\n x = np.linspace(-2, 2, 4)\n t = np.linspace(0, 4*np.pi, 10)\n\n xgrid, tgrid = np.meshgrid(x, t)\n\n X1 = f1(xgrid, tgrid)\n X2 = f2(xgrid, tgrid)\n X = X1 + X2\n\n dmd = DMD()\n dmd.fit(X.T)\n\n expected = np.array([\n [ 0.35407111+0.31966903j, 0.0581077 -0.51616519j,\n -0.4936891 +0.36476117j, 0.70397844+0.05332291j,\n -0.56648961-0.50687223j, 0.15372065+0.74444603j,\n 0.30751808-0.63550106j, -0.5633934 +0.24365451j,\n 0.47550633+0.20903766j, -0.0985528 -0.46673545j],\n [ 0.52924739+0.47782492j, 0.08685642-0.77153733j,\n -0.73794122+0.54522635j, 1.05227097+0.07970435j,\n -0.8467597 -0.7576467j , 0.22977376+1.11275987j,\n 0.4596623 -0.94991449j, -0.84213164+0.3642023j ,\n 0.71076254+0.3124588j , -0.14731169-0.69765229j],\n [-0.49897731-0.45049592j, -0.0818887 +0.72740958j,\n 0.69573498-0.51404236j, -0.99208678-0.0751457j ,\n 0.79832963+0.71431342j, -0.21663195-1.04911604j,\n -0.43337211+0.89558454j, 0.79396628-0.3433719j ,\n -0.67011078-0.29458785j, 0.13888626+0.65775036j],\n [-0.2717424 -0.2453395j , -0.04459648+0.39614632j,\n 0.37889637-0.2799468j , -0.54028918-0.04092425j,\n 0.43476929+0.38901417j, -0.11797748-0.57134724j,\n -0.23601389+0.48773418j, 0.43239301-0.18699989j,\n - 0.36494147 - 0.16043216j, 0.07563728 + 0.35821j]\n ])\n\n np.testing.assert_almost_equal(dmd.predict(X.T), expected, decimal=6)\n\n def test_predict_exact(self):\n dmd = DMD(exact=True)\n expected = np.load('tests/test_datasets/input_sample_predict_exact.npy')\n\n np.testing.assert_almost_equal(dmd\n .fit(sample_data)\n .predict(sample_data[:,20:40]), expected, decimal=6)\n\n def test_predict_nexact(self):\n dmd = DMD(exact=False)\n expected = np.load('tests/test_datasets/input_sample_predict_nexact.npy')\n\n np.testing.assert_almost_equal(dmd\n .fit(sample_data)\n .predict(sample_data[:, 10:30]), expected, decimal=6)\n\n\n def test_advanced_snapshot_parameter(self):\n dmd = DMD(svd_rank=0.99)\n dmd.fit(sample_data)\n\n dmd2 = DMD(svd_rank=0.99, opt=-1)\n dmd2.fit(sample_data)\n\n np.testing.assert_almost_equal(dmd2.reconstructed_data.real,\n dmd.reconstructed_data.real, decimal=6)\n\n\n def test_sorted_eigs_default(self):\n dmd = DMD()\n assert dmd.operator._sorted_eigs == False\n\n def test_sorted_eigs_set_real(self):\n dmd = DMD(sorted_eigs='real')\n assert dmd.operator._sorted_eigs == 'real'\n\n def test_sorted_eigs_abs_right_eigs(self):\n dmd = DMD(svd_rank=20, sorted_eigs='abs')\n dmd.fit(sample_data)\n\n dmd2 = DMD(svd_rank=20)\n dmd2.fit(sample_data)\n\n assert len(dmd.eigs) == len(dmd2.eigs)\n assert set(dmd.eigs) == set(dmd2.eigs)\n\n previous = dmd.eigs[0]\n for eig in dmd.eigs[1:]:\n assert abs(previous) <= abs(eig)\n previous = eig\n\n def test_sorted_eigs_abs_right_eigenvectors(self):\n dmd = DMD(svd_rank=20, sorted_eigs='abs')\n dmd.fit(sample_data)\n\n dmd2 = DMD(svd_rank=20)\n dmd2.fit(sample_data)\n\n for idx, eig in enumerate(dmd2.eigs):\n eigenvector = dmd2.operator.eigenvectors.T[idx]\n for idx_new, eig_new in enumerate(dmd.eigs):\n if eig_new == eig:\n assert all(dmd.operator.eigenvectors.T[idx_new] == eigenvector)\n break\n\n def test_sorted_eigs_abs_right_modes(self):\n dmd = DMD(svd_rank=20, sorted_eigs='abs')\n dmd.fit(sample_data)\n\n dmd2 = DMD(svd_rank=20)\n dmd2.fit(sample_data)\n\n for idx, eig in enumerate(dmd2.eigs):\n mode = dmd2.modes.T[idx]\n for idx_new, eig_new in enumerate(dmd.eigs):\n if eig_new == eig:\n np.testing.assert_almost_equal(dmd.modes.T[idx_new], mode,\n decimal=6)\n break\n\n def test_sorted_eigs_real_right_eigs(self):\n dmd = DMD(svd_rank=20, sorted_eigs='real')\n dmd.fit(sample_data)\n\n dmd2 = DMD(svd_rank=20)\n dmd2.fit(sample_data)\n\n assert len(dmd.eigs) == len(dmd2.eigs)\n assert set(dmd.eigs) == set(dmd2.eigs)\n\n previous = complex(dmd.eigs[0])\n for eig in dmd.eigs[1:]:\n x = complex(eig)\n assert x.real > previous.real or (x.real == previous.real and x.imag >= previous.imag)\n previous = x\n\n def test_sorted_eigs_real_right_eigenvectors(self):\n dmd = DMD(svd_rank=20, sorted_eigs='real')\n dmd.fit(sample_data)\n\n dmd2 = DMD(svd_rank=20)\n dmd2.fit(sample_data)\n\n for idx, eig in enumerate(dmd2.eigs):\n eigenvector = dmd2.operator.eigenvectors.T[idx]\n for idx_new, eig_new in enumerate(dmd.eigs):\n if eig_new == eig:\n assert all(dmd.operator.eigenvectors.T[idx_new] == eigenvector)\n break\n\n def test_sorted_eigs_real_right_modes(self):\n dmd = DMD(svd_rank=20, sorted_eigs='real')\n dmd.fit(sample_data)\n\n dmd2 = DMD(svd_rank=20)\n dmd2.fit(sample_data)\n\n for idx, eig in enumerate(dmd2.eigs):\n mode = dmd2.modes.T[idx]\n for idx_new, eig_new in enumerate(dmd.eigs):\n if eig_new == eig:\n np.testing.assert_almost_equal(dmd.modes.T[idx_new], mode,\n decimal=6)\n break\n\n def test_sorted_eigs_dynamics(self):\n dmd = DMD(svd_rank=20, sorted_eigs='abs')\n dmd.fit(sample_data)\n\n dmd2 = DMD(svd_rank=20)\n dmd2.fit(sample_data)\n\n for idx, eig in enumerate(dmd2.eigs):\n dynamic = dmd2.dynamics[idx]\n for idx_new, eig_new in enumerate(dmd.eigs):\n if eig_new == eig:\n np.testing.assert_almost_equal(dmd.dynamics[idx_new],\n dynamic, decimal=6)\n break\n\n def test_sorted_eigs_frequency(self):\n dmd = DMD(svd_rank=20, sorted_eigs='abs')\n dmd.fit(sample_data)\n\n dmd2 = DMD(svd_rank=20)\n dmd2.fit(sample_data)\n\n for idx, eig in enumerate(dmd2.eigs):\n frq = dmd2.frequency[idx]\n for idx_new, eig_new in enumerate(dmd.eigs):\n if eig_new == eig:\n np.testing.assert_almost_equal(dmd.frequency[idx_new],\n frq, decimal=6)\n break\n\n def test_sorted_eigs_amplitudes(self):\n dmd = DMD(svd_rank=20, sorted_eigs='abs')\n dmd.fit(sample_data)\n\n dmd2 = DMD(svd_rank=20)\n dmd2.fit(sample_data)\n\n for idx, eig in enumerate(dmd2.eigs):\n amp = dmd2.amplitudes[idx]\n for idx_new, eig_new in enumerate(dmd.eigs):\n if eig_new == eig:\n np.testing.assert_almost_equal(dmd.amplitudes[idx_new],\n amp, decimal=6)\n break\n\n def test_save(self):\n dmd = DMD(svd_rank=-1)\n dmd.fit(X=sample_data)\n dmd.save('pydmd.test')\n\n def test_load(self):\n dmd = DMD(svd_rank=-1)\n dmd.fit(X=sample_data)\n dmd.save('pydmd.test2')\n loaded_dmd = DMD.load('pydmd.test2')\n np.testing.assert_array_equal(dmd.reconstructed_data,\n loaded_dmd.reconstructed_data)\n\n def test_load(self):\n dmd = DMD(svd_rank=-1)\n dmd.fit(X=sample_data)\n dmd.save('pydmd.test2')\n loaded_dmd = DMD.load('pydmd.test2')\n assert isinstance(loaded_dmd, DMD)\n\n def test_get_bitmask_default(self):\n dmd = DMD(svd_rank=10)\n dmd.fit(X=sample_data)\n assert np.all(dmd.modes_activation_bitmask == True)\n\n def test_set_bitmask(self):\n dmd = DMD(svd_rank=3)\n dmd.fit(X=sample_data)\n\n new_bitmask = np.full(len(dmd.amplitudes), True, dtype=bool)\n new_bitmask[[0]] = False\n dmd.modes_activation_bitmask = new_bitmask\n\n assert dmd.modes_activation_bitmask[0] == False\n assert np.all(dmd.modes_activation_bitmask[1:] == True)\n\n def test_not_fitted_get_bitmask_raises(self):\n dmd = DMD(svd_rank=3)\n with self.assertRaises(RuntimeError):\n print(dmd.modes_activation_bitmask)\n\n def test_not_fitted_set_bitmask_raises(self):\n dmd = DMD(svd_rank=3)\n with self.assertRaises(RuntimeError):\n dmd.modes_activation_bitmask = np.full(3, True, dtype=bool)\n\n def test_raise_wrong_dtype_bitmask(self):\n dmd = DMD(svd_rank=3)\n dmd.fit(X=sample_data)\n with self.assertRaises(RuntimeError):\n dmd.modes_activation_bitmask = np.full(3, 0.1)\n\n def test_fitted(self):\n dmd = DMD(svd_rank=3)\n assert not dmd.fitted\n dmd.fit(X=sample_data)\n assert dmd.fitted\n\n def test_bitmask_amplitudes(self):\n dmd = DMD(svd_rank=10)\n dmd.fit(X=sample_data)\n\n old_n_amplitudes = dmd.amplitudes.shape[0]\n retained_amplitudes = np.delete(dmd.amplitudes, [0,-1])\n\n new_bitmask = np.full(dmd.amplitudes.shape[0], True, dtype=bool)\n new_bitmask[[0,-1]] = False\n dmd.modes_activation_bitmask = new_bitmask\n\n assert dmd.amplitudes.shape[0] == old_n_amplitudes - 2\n np.testing.assert_almost_equal(dmd.amplitudes, retained_amplitudes)\n\n def test_bitmask_eigs(self):\n dmd = DMD(svd_rank=10)\n dmd.fit(X=sample_data)\n\n old_n_eigs = dmd.eigs.shape[0]\n retained_eigs = np.delete(dmd.eigs, [0,-1])\n\n new_bitmask = np.full(dmd.amplitudes.shape[0], True, dtype=bool)\n new_bitmask[[0,-1]] = False\n dmd.modes_activation_bitmask = new_bitmask\n\n assert dmd.eigs.shape[0] == old_n_eigs - 2\n np.testing.assert_almost_equal(dmd.eigs, retained_eigs)\n\n def test_bitmask_modes(self):\n dmd = DMD(svd_rank=10)\n dmd.fit(X=sample_data)\n\n old_n_modes = dmd.modes.shape[1]\n retained_modes = np.delete(dmd.modes, [0,-1], axis=1)\n\n new_bitmask = np.full(dmd.amplitudes.shape[0], True, dtype=bool)\n new_bitmask[[0,-1]] = False\n dmd.modes_activation_bitmask = new_bitmask\n\n assert dmd.modes.shape[1] == old_n_modes - 2\n np.testing.assert_almost_equal(dmd.modes, retained_modes)\n\n def test_reconstructed_data(self):\n dmd = DMD(svd_rank=10)\n dmd.fit(X=sample_data)\n\n new_bitmask = np.full(dmd.amplitudes.shape[0], True, dtype=bool)\n new_bitmask[[0,-1]] = False\n dmd.modes_activation_bitmask = new_bitmask\n\n dmd.reconstructed_data\n assert True\n\n def test_getitem_modes(self):\n dmd = DMD(svd_rank=-1)\n dmd.fit(X=sample_data)\n old_n_modes = dmd.modes.shape[1]\n\n assert dmd[[0,-1]].modes.shape[1] == 2\n np.testing.assert_almost_equal(dmd[[0,-1]].modes, dmd.modes[:,[0,-1]])\n\n assert dmd.modes.shape[1] == old_n_modes\n\n assert dmd[1::2].modes.shape[1] == old_n_modes // 2\n np.testing.assert_almost_equal(dmd[1::2].modes, dmd.modes[:,1::2])\n\n assert dmd.modes.shape[1] == old_n_modes\n\n assert dmd[[1,3]].modes.shape[1] == 2\n np.testing.assert_almost_equal(dmd[[1,3]].modes, dmd.modes[:,[1,3]])\n\n assert dmd.modes.shape[1] == old_n_modes\n\n assert dmd[2].modes.shape[1] == 1\n np.testing.assert_almost_equal(np.squeeze(dmd[2].modes), dmd.modes[:,2])\n\n assert dmd.modes.shape[1] == old_n_modes\n\n def test_getitem_raises(self):\n dmd = DMD(svd_rank=-1)\n dmd.fit(X=sample_data)\n\n with self.assertRaises(ValueError):\n dmd[[0,1,1,0,1]]\n with self.assertRaises(ValueError):\n dmd[[True, True, False, True]]\n with self.assertRaises(ValueError):\n dmd[1.0]\n\n # this is a test for the correctness of the amplitudes saved in the Proxy\n # between DMDBase and the modes activation bitmask. if this test fails\n # you probably need to call allocate_proxy once again after you compute\n # the final value of the amplitudes\n def test_correct_amplitudes(self):\n dmd = DMD(svd_rank=-1)\n dmd.fit(X=sample_data)\n np.testing.assert_array_almost_equal(dmd.amplitudes, dmd._b)\n", "\"\"\"\nA module which contains several functions to tune (i.e. improve) DMD instances\nthrough the \"manual\" modification of DMD modes.\n\"\"\"\nfrom copy import deepcopy\nfrom functools import partial\n\nimport numpy as np\n\n\ndef select_modes(\n dmd,\n criteria,\n in_place=True,\n return_indexes=False,\n nullify_amplitudes=False,\n):\n \"\"\"\n Select the DMD modes by using the given `criteria`.\n `criteria` is a function which takes as input the DMD\n object itself and return a numpy.ndarray of boolean where `False`\n indicates that the corresponding mode will be discarded.\n The class :class:`ModesSelectors` contains some pre-packed selector\n functions.\n\n Example:\n\n .. code-block:: python\n\n >>> dmd = ...\n >>> def stable_modes(dmd):\n >>> toll = 1e-3\n >>> return np.abs(np.abs(dmd.eigs) - 1) < toll\n >>> select_modes(dmd, stable_modes)\n\n :param pydmd.DMDBase dmd: An instance of DMD from which we want to delete\n modes according to some criteria.\n :param callable criteria: The function used to select the modes. Must\n return a boolean array (whose length is the number of DMD modes in\n `dmd`) such that `True` items correspond to retained DMD modes, while\n `False` items correspond to deleted modes.\n :param bool in_place: If `True`, the given DMD instance will be modified\n according to the given `criteria`. Otherwise, a new instance will be\n created (via `copy.deepcopy`).\n :param bool return_indexes: If `True`, this function returns the indexes\n corresponding to DMD modes cut using the given `criteria` (default\n `False`).\n :param bool nullify_amplitudes: If `True`, the amplitudes associated with\n DMD modes to be removed are set to 0, therefore the number of DMD\n modes remains constant. If `False` (default) DMD modes are actually\n removed, therefore the number of DMD modes in the instance decreases.\n :returns: If `return_indexes` is `True`, the returned value is a tuple\n whose items are:\n\n 0. The modified DMD instance;\n 1. The indexes (on the old DMD instance) corresponding to DMD modes\n cut.\n\n Otherwise, the returned value is the modified DMD instance.\n \"\"\"\n if not in_place:\n dmd = deepcopy(dmd)\n\n selected_indexes = np.where(criteria(dmd))[0]\n\n all_indexes = set(np.arange(len(dmd.eigs)))\n cut_indexes = np.array(list(all_indexes - set(selected_indexes)))\n\n if len(cut_indexes) > 0:\n tmp = np.array(dmd.modes_activation_bitmask)\n tmp[cut_indexes] = False\n dmd.modes_activation_bitmask = tmp\n\n if return_indexes:\n return dmd, cut_indexes\n return dmd\n\n\ndef stabilize_modes(\n dmd, inner_radius, outer_radius=np.inf, in_place=True, return_indexes=False\n):\n \"\"\"\n Stabilize modes in a circular sector of radius [`inner_radius`,\n `outer_radius`].\n\n Stabilizing a mode means that the corresponding eigenvalue is divided\n by its module (i.e. normalized) in order to make the associated\n dynamic a trigonometric function with respect to the time (since the\n eigenvalue is projected on the unit circle). At the same time, the\n corresponding mode amplitude is multiplied by the former module of the\n eigenvalue, in order to \"recover\" the correctness of the result in the\n first time instants.\n\n This approach may give better results in the prediction when one or\n more eigenvalues are strongly unstable (i.e. the corresponding DMD mode\n \"explodes\" several instants after the known time frame).\n\n In order to stabilize an unbounded (above) circular sector, the\n parameter `outer_radius` should be set to `np.inf` (default).\n\n :param pydmd.DMDBase dmd: An instance of DMD which we want to stabilize.\n :param float inner_radius: The inner radius of the circular sector to\n be stabilized.\n :param float outer_radius: The outer radius of the circular sector to\n be stabilized.\n :param bool in_place: If `True`, the given DMD instance will be modified\n according to the given `criteria`. Otherwise, a new instance will be\n created (via `copy.deepcopy`).\n :param bool return_indexes: If `True`, this function returns the indexes\n corresponding to DMD modes stabilized (default `False`).\n :returns: If `return_indexes` is `True`, the returned value is a tuple\n whose items are:\n\n 0. The modified DMD instance;\n 1. The indexes (on the old DMD instance) corresponding to DMD modes\n stabilized.\n\n Otherwise, the returned value is the modified DMD instance.\n \"\"\"\n if not in_place:\n dmd = deepcopy(dmd)\n\n eigs_module = np.abs(dmd.eigs)\n\n # indexes associated with eigenvalues that must be stabilized\n fixable_eigs_indexes = np.logical_and(\n inner_radius < eigs_module,\n eigs_module < outer_radius,\n )\n\n dmd.amplitudes[fixable_eigs_indexes] *= np.abs(\n dmd.eigs[fixable_eigs_indexes]\n )\n dmd.eigs[fixable_eigs_indexes] /= np.abs(dmd.eigs[fixable_eigs_indexes])\n\n if return_indexes:\n stabilized_indexes = np.where(fixable_eigs_indexes)[0]\n return dmd, stabilized_indexes\n return dmd\n\n\nclass ModesSelectors:\n \"\"\"\n A container class which defines some static methods for pre-packed\n modes selectors functions to be used in `select_modes`.\n\n For instance, to select the first `x` modes by integral contributions:\n\n Example:\n\n .. code-block:: python\n\n >>> from pydmd.dmd_modes_tuner import ModesSelectors, select_modes\n >>> select_modes(dmd, ModesSelectors.integral_contribution(x))\n\n Most private static methods in this class are \"non-partialized\", which\n means that they also take the parameters that characterize the selector.\n By contrast, public static method are ready mode selector, whose only\n parameter is the DMD instance on which that selector should be applied, and\n are the output of a call to `functools.partial` applied to a\n non-partialized selector. This mechanism is employed to reduce the\n boilerplate code needed while applying a selector.\n \"\"\"\n\n @staticmethod\n def _threshold(dmd, low_threshold, up_threshold):\n \"\"\"\n Non-partialized function of the modes selector `threshold`.\n\n :param DMDBase dmd: An instance of DMDBase.\n :param float low_threshold: The minimum accepted module of an\n eigenvalue.\n :param float up_threshold: The maximum accepted module of an\n eigenvalue.\n :return np.ndarray: An array of bool, where each \"True\" index means\n that the corresponding DMD mode is selected.\n \"\"\"\n eigs_module = np.abs(dmd.eigs)\n\n return np.logical_and(\n eigs_module < up_threshold,\n eigs_module > low_threshold,\n )\n\n @staticmethod\n def threshold(low_threshold, up_threshold):\n \"\"\"\n Retain only DMD modes associated with an eigenvalue whose module is\n between `low_threshold` and `up_threshold` (inclusive on both sides).\n\n :param float low_threshold: The minimum accepted module of an\n eigenvalue.\n :param float up_threshold: The maximum accepted module of an\n eigenvalue.\n :return np.ndarray: An array of bool, where each \"True\" index means\n that the corresponding DMD mode is selected.\n \"\"\"\n return partial(\n ModesSelectors._threshold,\n low_threshold=low_threshold,\n up_threshold=up_threshold,\n )\n\n @staticmethod\n def _stable_modes(\n dmd,\n max_distance_from_unity_inside,\n max_distance_from_unity_outside,\n ):\n \"\"\"\n Non-partialized function of the modes selector `stable_modes`.\n\n :param DMDBase dmd: An instance of DMDBase.\n :param float max_distance_from_unity_inside: The maximum distance\n from the unit circle for points inside it.\n :param float max_distance_from_unity_outside: The maximum distance\n from the unit circle for points outside it.\n :return np.ndarray: An array of bool, where each \"True\" index means\n that the corresponding DMD mode is selected.\n \"\"\"\n return ModesSelectors._threshold(\n dmd,\n 1 - max_distance_from_unity_inside,\n 1 + max_distance_from_unity_outside,\n )\n\n @staticmethod\n def stable_modes(\n max_distance_from_unity=None,\n max_distance_from_unity_inside=None,\n max_distance_from_unity_outside=None,\n ):\n \"\"\"\n Select all the modes corresponding to eigenvalues whose distance\n from the unit circle is less than or equal to a specified threshold. It\n is possible to specify the distance separately for eigenvalues inside\n and outside the unit circle, but you cannot set clashing\n thresholds.\n\n The following are allowed combinations of parameters:\n\n .. code-block:: python\n\n >>> # the maximum allowed distance from the unit circle (both\n ... # inside and outside) is 1.e-3.\n >>> stable_modes(max_distance_from_unity=1.e-3)\n >>> # the maximum allowed distance from the unit circle is 1.e-3\n ... # inside and 1.e-4 outside.\n >>> stable_modes(max_distance_from_unity_inside=1.e-3,\n ... max_distance_from_unity_outside=1.e-4)\n >>> # the maximum allowed distance from the unit circle is 1.e-4\n ... # outside and unspecified (i.e. infinity) inside.\n >>> stable_modes(max_distance_from_unity_outside=1.e-4)\n\n Since `max_distance_from_unity` controls both inside and outside\n distance, you cannot set also `max_distance_from_unity_inside` or\n `max_distance_from_unity_outside` simultaneously:\n\n >>> # this is not allowed\n >>> stable_modes(max_distance_from_unity=1.e-3,\n ... max_distance_from_unity_inside=1.e-4)\n\n For code clarity reasons, the snippet above would have failed even if\n `max_distance_from_unity_inside=1.e-3`.\n\n :param float max_distance_from_unity: The maximum distance from the\n unit circle. Defaults to `None`.\n :param float max_distance_from_unity_inside: The maximum distance\n from the unit circle for points inside it. Defaults to `None`.\n :param float max_distance_from_unity_outside: The maximum distance\n from the unit circle for points outside it. Defaults to `None`.\n :return callable: A function which can be used as the parameter\n of `select_modes` to select DMD modes according to\n the criteria of stability.\n \"\"\"\n\n if max_distance_from_unity and max_distance_from_unity_inside:\n raise ValueError(\n \"\"\"Only one between `max_distance_from_unity`\nand `max_distance_from_unity_inside` can be not `None`\"\"\"\n )\n if max_distance_from_unity and max_distance_from_unity_outside:\n raise ValueError(\n \"\"\"Only one between `max_distance_from_unity`\nand `max_distance_from_unity_outside` can be not `None`\"\"\"\n )\n\n if max_distance_from_unity:\n max_distance_from_unity_outside = max_distance_from_unity\n max_distance_from_unity_inside = max_distance_from_unity\n\n if max_distance_from_unity_outside is None:\n max_distance_from_unity_outside = float(\"inf\")\n if max_distance_from_unity_inside is None:\n max_distance_from_unity_inside = float(\"inf\")\n\n if max_distance_from_unity_outside == float(\n \"inf\"\n ) and max_distance_from_unity_inside == float(\"inf\"):\n raise ValueError(\n \"\"\"The combination of parameters does not make sense\"\"\"\n )\n\n return partial(\n ModesSelectors._stable_modes,\n max_distance_from_unity_inside=max_distance_from_unity_inside,\n max_distance_from_unity_outside=max_distance_from_unity_outside,\n )\n\n @staticmethod\n def _compute_integral_contribution(mode, dynamic):\n \"\"\"\n Compute the integral contribution across time of the given DMD mode,\n given the mode and its dynamic, as shown in\n http://dx.doi.org/10.1016/j.euromechflu.2016.11.015\n\n :param numpy.ndarray mode: The DMD mode.\n :param numpy.ndarray dynamic: The dynamic of the given DMD mode, as\n returned by `dmd.dynamics[mode_index]`.\n :return float: the integral contribution of the given DMD mode.\n \"\"\"\n return pow(np.linalg.norm(mode), 2) * sum(np.abs(dynamic))\n\n @staticmethod\n def _integral_contribution(dmd, n):\n \"\"\"\n Non-partialized function of the modes selector `integral_contribution`.\n\n :param DMDBase dmd: An instance of DMDBase.\n :param int n: The number of DMD modes to be selected.\n :return np.ndarray: An array of bool, where each \"True\" index means\n that the corresponding DMD mode is selected.\n \"\"\"\n\n # temporary reset dmd_time to original_time\n temp = dmd.dmd_time\n dmd._dmd_time = dmd.original_time\n\n dynamics = dmd.dynamics\n modes = dmd.modes\n\n # reset dmd_time\n dmd._dmd_time = temp\n\n n_of_modes = modes.shape[1]\n integral_contributions = [\n ModesSelectors._compute_integral_contribution(*tp)\n for tp in zip(modes.T, dynamics)\n ]\n\n indexes_first_n = np.array(integral_contributions).argsort()[-n:]\n\n truefalse_array = np.array([False for _ in range(n_of_modes)])\n truefalse_array[indexes_first_n] = True\n return truefalse_array\n\n @staticmethod\n def integral_contribution(n):\n \"\"\"\n Reference: http://dx.doi.org/10.1016/j.euromechflu.2016.11.015\n\n :param int n: The number of DMD modes to be selected.\n :return callable: A function which can be used as the parameter\n of `select_modes` to select DMD modes according to\n the criteria of integral contribution.\n \"\"\"\n return partial(ModesSelectors._integral_contribution, n=n)\n\n\nselectors = {\n \"module_threshold\": ModesSelectors.threshold,\n \"stable_modes\": ModesSelectors.stable_modes,\n \"integral_contribution\": ModesSelectors.integral_contribution,\n}\n\n\nclass ModesTuner:\n \"\"\"Class for semi-automatic tuning of DMD modes.\n\n This class generates a new instance from the instance passed to the\n constructor, and modifies that one whenever one of the tuning methods\n is called. Therefore there is no need to worry about subsequent\n unwanted changes in the given instance.\n\n `ModesTuner` provides a simplified interface to the tuning functions\n :func:`select_modes` and :func:`stabilize_modes`, but in order to\n have more control on what is happening (i.e. when to use in-place\n tuning, or to check which modes have been changed) you may prefer to\n use them instead.\n\n :param dmds: One or more instances of DMD.\n :type dmd: list or pydmd.DMDBase\n :param bool in_place: If `True`, this tuner works directly on the given\n DMD instance.\n \"\"\"\n\n def __init__(self, dmds, in_place=False):\n # if True, we return a list since we received a list in the constructor\n self._init_received_list = isinstance(dmds, list)\n\n dmds = dmds if self._init_received_list else [dmds]\n self._dmds = dmds if in_place else list(map(deepcopy, dmds))\n\n def subset(self, indexes):\n \"\"\"\n Generate a temporary instance of `ModesTuner` which operates on a\n subset of the DMD instances held by this `ModesTuner`.\n\n :param list indexes: List of indexes of the DMD instances to be put\n into the subset.\n :return ModesTuner: A `ModesTuner` which operates \"in place\" on the\n DMD instances held by the caller `ModesTuner`.\n \"\"\"\n if not self._init_received_list:\n raise ValueError(\"Cannot index a single DMD instance.\")\n\n return ModesTuner([self._dmds[i] for i in indexes], in_place=True)\n\n def get(self):\n \"\"\"Returns the private DMD instance(s) that `ModesTuner` is working on.\n Be aware that those instances are the internal instances owned by\n `ModesTuner`, therefore they are going going to be modified by\n subsequent calls to tuning methods.\n\n :return: The private DMD instance owned by `ModesTuner`, or a list of\n DMD instances depending on the parameter received by the\n constructor of this instance.\n :rtype: list or pydmd.DMDBase\n \"\"\"\n\n if self._init_received_list:\n return self._dmds\n return self._dmds[0]\n\n def copy(self):\n \"\"\"Returns a deep copy of the private DMD instance(s) that `ModesTuner`\n is working on. They are not going to be modified by subsequent calls to\n tuning methods, and therefore provide a secure \"snapshot\" to the DMD(s).\n\n :return: A copy of the private DMD instance owned by `ModesTuner`, or a\n list of copies depending on the parameter received by the\n constructor of this instance.\n :rtype: list or pydmd.DMDBase\n \"\"\"\n\n if self._init_received_list:\n return list(map(deepcopy, self._dmds))\n return deepcopy(self._dmds[0])\n\n def select(self, criteria, nullify_amplitudes=False, **kwargs):\n r\"\"\"\n Select the DMD modes by using the given `criteria`, which can be either\n a string or a function. You can choose pre-packed criteria by passing\n one of the allowed string values for criteria. In this case you need to\n pass (as keyword arguments) the arguments needed to construct the\n criteria (see example below).\n\n Allowed string values for `criteria`:\n\n * `'module_threshold'`: Retain modes such that the module of the corresponding eigenvalue is included in the interval [`low_threshold`, `up_threshold`] (cfr. :func:`ModesSelectors.threshold`);\n * `'stable_modes'`: Retain modes such that the corresponding eigenvalue is not far from the unit circle (cfr. :func:`ModesSelectors.stable_modes`);\n * `'integral_contribution'`: Retain the first `n` modes in terms of integral contribution (cfr. :func:`ModesSelectors.integral_contribution`).\n\n You might want to read the documentation of\n :class:`ModesSelectors` in order to get detailed info regarding the\n behavior of each argument.\n\n Example:\n\n .. code-block:: python\n\n >>> from pydmd.dmd_modes_tuner import ModesTuner\n >>> mtuner = ModesTuner(dmd)\n >>> mtuner.select('stable_modes', max_distance_from_unity_inside=1.e-1,\n max_distance_from_unity_outside=1.e-3)\n\n :param criteria: Criteria used to select DMD modes. The allowed strings\n are `module_threshold`, `stable_modes` and `integral_contribution`.\n If `criteria` is a function it must take an instance of DMD as the\n only parameter.\n :type criteria: str or callable\n :param bool nullify_amplitudes: If `True`, the amplitudes associated\n with DMD modes to be removed are set to 0, therefore the number of\n DMD modes remains constant. If `False` (default) DMD modes are\n actually removed, therefore the number of DMD modes in the instance\n decreases.\n :param \\**kwargs: Parameters passed to the chosen criteria (if\n `criteria` is a string).\n :return ModesTuner: This instance of `ModesTuner` in order to allow\n chaining multiple operations.\n \"\"\"\n\n if isinstance(criteria, str):\n if criteria not in selectors:\n raise ValueError(\"Could't find the specified criteria\")\n criteria = selectors[criteria](**kwargs)\n if not callable(criteria):\n raise ValueError(\n \"\"\"You should provide a criteria to select DMD\nmodes (either a string or a function)\"\"\"\n )\n\n for dmd in self._dmds:\n select_modes(dmd, criteria, nullify_amplitudes=nullify_amplitudes)\n return self\n\n def stabilize(self, inner_radius, outer_radius=np.inf):\n \"\"\"\n Stabilize modes in a circular sector of radius [`inner_radius`,\n `outer_radius`].\n\n Stabilizing a mode means that the corresponding eigenvalue is divided\n by its module (i.e. normalized) in order to make the associated\n dynamic a trigonometric function with respect to the time (since the\n eigenvalue is projected on the unit circle). At the same time, the\n corresponding mode amplitude is multiplied by the former module of the\n eigenvalue, in order to \"recover\" the correctness of the result in the\n first time instants.\n\n This approach may give better results in the prediction when one or\n more eigenvalues are strongly unstable (i.e. the corresponding DMD mode\n \"explodes\" several instants after the known time frame).\n\n In order to stabilize an unbounded (above) circular sector, the\n parameter `outer_radius` should be set to `np.inf` (default).\n\n :param float inner_radius: The inner radius of the circular sector to\n be stabilized.\n :param float outer_radius: The outer radius of the circular sector to\n be stabilized.\n :return ModesTuner: This instance of `ModesTuner` in order to allow\n chaining multiple operations.\n \"\"\"\n\n for dmd in self._dmds:\n stabilize_modes(dmd, inner_radius, outer_radius)\n return self\n" ]
[ [ "numpy.split", "numpy.isnan", "numpy.full", "numpy.mean", "numpy.average" ], [ "numpy.sqrt", "numpy.linspace", "numpy.squeeze", "numpy.all", "numpy.exp", "numpy.testing.assert_equal", "numpy.arange", "numpy.full", "numpy.testing.assert_almost_equal", "numpy.apply_along_axis", "matplotlib.pyplot.close", "numpy.load", "numpy.zeros", "numpy.testing.assert_array_almost_equal", "numpy.cosh", "numpy.delete", "numpy.testing.assert_allclose", "numpy.meshgrid", "numpy.array", "numpy.tanh", "numpy.linalg.norm", "numpy.testing.assert_array_equal", "numpy.random.normal" ], [ "numpy.abs", "numpy.logical_and", "numpy.linalg.norm", "numpy.array", "numpy.where" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
tkelestemur/pfrl
[ "388855fb30313185d43ae0d0f4b694be647a5c43" ]
[ "pfrl/policies/softmax_policy.py" ]
[ "import torch\nfrom torch import nn\nfrom torch.distributions import Categorical\n\n\nclass SoftmaxCategoricalHead(nn.Module):\n def forward(self, logits):\n return torch.distributions.Categorical(logits=logits)\n\n\n# class MultiSoftmaxCategoricalHead(nn.Module):\n# def forward(self, logits):\n# return Independent(Categorical(logits=logits), reinterpreted_batch_ndims=1)\n\n\nclass MultiCategorical():\n def __init__(self, dims=None, logits=None):\n self.dims = dims\n logits = torch.split(logits, tuple(dims), dim=1)\n self.dists = [Categorical(logits=logits_dim) for logits_dim in logits]\n\n def log_prob(self, actions):\n actions = torch.unbind(actions, dim=1)\n logprobs = torch.stack([\n dist.log_prob(action) for dist, action in zip(self.dists, actions)\n ], dim=1)\n return logprobs.sum(dim=1)\n\n def entropy(self):\n return torch.stack([dist.entropy() for dist in self.dists], dim=1).sum(dim=1)\n\n def sample(self):\n return torch.stack([dist.sample() for dist in self.dists], dim=1)\n\n def mode(self):\n return torch.stack([\n torch.argmax(dist.probs, dim=1) for dist in self.dists\n ], dim=1)\n\n\nclass MultiSoftmaxCategoricalHead(nn.Module):\n def __init__(self, dims=None):\n self.dims = dims\n super().__init__()\n\n def forward(self, logits):\n return MultiCategorical(dims=self.dims, logits=logits)\n" ]
[ [ "torch.argmax", "torch.distributions.Categorical", "torch.unbind" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
LaudateCorpus1/sunpy
[ "f7bdf22e5229a577c5851c1e05502f0d68b4b369", "f7bdf22e5229a577c5851c1e05502f0d68b4b369", "f7bdf22e5229a577c5851c1e05502f0d68b4b369" ]
[ "sunpy/coordinates/wcs_utils.py", "sunpy/tests/helpers.py", "sunpy/map/compositemap.py" ]
[ "import numpy as np\n\nimport astropy.units as u\nimport astropy.wcs.utils\nfrom astropy.coordinates import (\n ITRS,\n BaseCoordinateFrame,\n CartesianRepresentation,\n SkyCoord,\n SphericalRepresentation,\n)\nfrom astropy.wcs import WCS\n\nfrom sunpy import log\nfrom .frames import (\n BaseCoordinateFrame,\n Heliocentric,\n HeliographicCarrington,\n HeliographicStonyhurst,\n Helioprojective,\n SunPyBaseCoordinateFrame,\n)\n\n__all__ = ['solar_wcs_frame_mapping', 'solar_frame_to_wcs_mapping']\n\ntry:\n # TODO: Remove vendored version after Astropy 5.0\n from astropy.wcs.utils import obsgeo_to_frame\nexcept ImportError:\n def obsgeo_to_frame(obsgeo, obstime):\n \"\"\"\n Convert a WCS obsgeo property into an `~builtin_frames.ITRS` coordinate frame.\n\n Parameters\n ----------\n obsgeo : array-like\n A shape ``(6, )`` array representing ``OBSGEO-[XYZ], OBSGEO-[BLH]`` as\n returned by ``WCS.wcs.obsgeo``.\n\n obstime : time-like\n The time assiociated with the coordinate, will be passed to\n `~.builtin_frames.ITRS` as the obstime keyword.\n\n Returns\n -------\n `~.builtin_frames.ITRS`\n An `~.builtin_frames.ITRS` coordinate frame\n representing the coordinates.\n\n Notes\n -----\n\n The obsgeo array as accessed on a `.WCS` object is a length 6 numpy array\n where the first three elements are the coordinate in a cartesian\n representation and the second 3 are the coordinate in a spherical\n representation.\n\n This function priorities reading the cartesian coordinates, and will only\n read the spherical coordinates if the cartesian coordinates are either all\n zero or any of the cartesian coordinates are non-finite.\n\n In the case where both the spherical and cartesian coordinates have some\n non-finite values the spherical coordinates will be returned with the\n non-finite values included.\n\n \"\"\"\n if (obsgeo is None\n or len(obsgeo) != 6\n or np.all(np.array(obsgeo) == 0)\n or np.all(~np.isfinite(obsgeo))\n ): # NOQA\n raise ValueError(f\"Can not parse the 'obsgeo' location ({obsgeo}). \"\n \"obsgeo should be a length 6 non-zero, finite numpy array\")\n\n # If the cartesian coords are zero or have NaNs in them use the spherical ones\n if np.all(obsgeo[:3] == 0) or np.any(~np.isfinite(obsgeo[:3])):\n data = SphericalRepresentation(*(obsgeo[3:] * (u.deg, u.deg, u.m)))\n\n # Otherwise we assume the cartesian ones are valid\n else:\n data = CartesianRepresentation(*obsgeo[:3] * u.m)\n\n return ITRS(data, obstime=obstime)\n\n\ndef solar_wcs_frame_mapping(wcs):\n \"\"\"\n This function registers the coordinates frames to their FITS-WCS coordinate\n type values in the `astropy.wcs.utils.wcs_to_celestial_frame` registry.\n\n Parameters\n ----------\n wcs : astropy.wcs.WCS\n\n Returns\n -------\n astropy.coordinates.BaseCoordinateFrame\n \"\"\"\n\n if hasattr(wcs, \"coordinate_frame\"):\n return wcs.coordinate_frame\n\n dateobs = wcs.wcs.dateobs or None\n\n # Get observer coordinate from the WCS auxillary information\n required_attrs = {HeliographicStonyhurst: ['hgln_obs', 'hglt_obs', 'dsun_obs'],\n HeliographicCarrington: ['crln_obs', 'hglt_obs', 'dsun_obs']}\n\n # Get rsun from the WCS auxillary information\n rsun = wcs.wcs.aux.rsun_ref\n if rsun is not None:\n rsun *= u.m\n\n # TODO: remove these errors in sunpy 4.1\n bad_attrs = [f'.{attr}' for attr in ['rsun', 'heliographic_observer']\n if hasattr(wcs, attr)]\n if len(bad_attrs):\n raise ValueError(f\"The {' and '.join(bad_attrs)} attribute(s) on a WCS \"\n \"are no longer supported.\")\n\n observer = None\n for frame, attr_names in required_attrs.items():\n attrs = [getattr(wcs.wcs.aux, attr_name) for attr_name in attr_names]\n if all([attr is not None for attr in attrs]):\n kwargs = {'obstime': dateobs}\n if rsun is not None:\n kwargs['rsun'] = rsun\n if issubclass(frame, HeliographicCarrington):\n kwargs['observer'] = 'self'\n\n observer = frame(attrs[0] * u.deg,\n attrs[1] * u.deg,\n attrs[2] * u.m,\n **kwargs)\n\n # Read the observer out of obsgeo for ground based observers\n if observer is None:\n try:\n observer = obsgeo_to_frame(wcs.wcs.obsgeo, dateobs)\n observer = SkyCoord(observer, rsun=rsun)\n except ValueError as e:\n # The helper function assumes you know the obsgeo coords you are\n # parsing are good, we are not sure, so catch the error.\n\n # This approach could lead to an invalid observer (i.e. one of the\n # coords being NaN), but only if the WCS has been constructed like that.\n log.debug(f\"Could not parse obsgeo coordinates from WCS:\\n{e}\")\n\n # Collect all of the possible frame attributes, although some may be removed later\n frame_args = {'obstime': dateobs}\n if observer is not None:\n frame_args['observer'] = observer\n if rsun is not None:\n frame_args['rsun'] = rsun\n\n frame_class = _sunpy_frame_class_from_ctypes(wcs.wcs.ctype)\n\n if frame_class:\n if frame_class == HeliographicStonyhurst:\n frame_args.pop('observer', None)\n if frame_class == Heliocentric:\n frame_args.pop('rsun', None)\n\n return frame_class(**frame_args)\n\n\ndef _sunpy_frame_class_from_ctypes(ctypes):\n # Truncate the ctype to the first four letters\n ctypes = {c[:4] for c in ctypes}\n\n mapping = {\n Helioprojective: {'HPLN', 'HPLT'},\n HeliographicStonyhurst: {'HGLN', 'HGLT'},\n HeliographicCarrington: {'CRLN', 'CRLT'},\n Heliocentric: {'SOLX', 'SOLY'},\n }\n\n for frame_class, ctype_pair in mapping.items():\n if ctype_pair <= ctypes:\n return frame_class\n\n\ndef _set_wcs_aux_obs_coord(wcs, obs_frame):\n \"\"\"\n Set (in-place) observer coordinate information on a WCS.\n\n Parameters\n ----------\n wcs : astropy.wcs.WCS\n obs_frame : astropy.coordinates.SkyCoord, astropy.coordinates.CoordinateFrame\n \"\"\"\n # Sometimes obs_coord can be a SkyCoord, so convert down to a frame\n if hasattr(obs_frame, 'frame'):\n obs_frame = obs_frame.frame\n\n if isinstance(obs_frame, HeliographicStonyhurst):\n wcs.wcs.aux.hgln_obs = obs_frame.lon.to_value(u.deg)\n elif isinstance(obs_frame, HeliographicCarrington):\n wcs.wcs.aux.crln_obs = obs_frame.lon.to_value(u.deg)\n else:\n raise ValueError('obs_coord must be in a Stonyhurst or Carrington frame')\n # These two keywords are the same for Carrington and Stonyhurst\n wcs.wcs.aux.hglt_obs = obs_frame.lat.to_value(u.deg)\n wcs.wcs.aux.dsun_obs = obs_frame.radius.to_value(u.m)\n\n\ndef solar_frame_to_wcs_mapping(frame, projection='TAN'):\n \"\"\"\n For a given frame, this function returns the corresponding WCS object.\n It registers the WCS coordinates types from their associated frame in the\n `astropy.wcs.utils.celestial_frame_to_wcs` registry.\n\n Parameters\n ----------\n frame : astropy.coordinates.BaseCoordinateFrame\n projection : str, optional\n\n Returns\n -------\n astropy.wcs.WCS\n \"\"\"\n wcs = WCS(naxis=2)\n\n if hasattr(frame, 'rsun'):\n wcs.wcs.aux.rsun_ref = frame.rsun.to_value(u.m)\n\n if hasattr(frame, 'observer') and frame.observer is not None:\n if isinstance(frame.observer, BaseCoordinateFrame):\n observer = frame.observer\n elif frame.observer == 'self':\n observer = frame\n _set_wcs_aux_obs_coord(wcs, observer)\n\n if isinstance(frame, SunPyBaseCoordinateFrame):\n\n if frame.obstime:\n wcs.wcs.dateobs = frame.obstime.utc.isot\n\n if isinstance(frame, Helioprojective):\n xcoord = 'HPLN' + '-' + projection\n ycoord = 'HPLT' + '-' + projection\n wcs.wcs.cunit = ['arcsec', 'arcsec']\n elif isinstance(frame, Heliocentric):\n xcoord = 'SOLX'\n ycoord = 'SOLY'\n wcs.wcs.cunit = ['deg', 'deg']\n elif isinstance(frame, HeliographicCarrington):\n xcoord = 'CRLN' + '-' + projection\n ycoord = 'CRLT' + '-' + projection\n wcs.wcs.cunit = ['deg', 'deg']\n elif isinstance(frame, HeliographicStonyhurst):\n xcoord = 'HGLN' + '-' + projection\n ycoord = 'HGLT' + '-' + projection\n wcs.wcs.cunit = ['deg', 'deg']\n\n else:\n return None\n\n wcs.wcs.ctype = [xcoord, ycoord]\n\n return wcs\n\n\nastropy.wcs.utils.WCS_FRAME_MAPPINGS.append([solar_wcs_frame_mapping])\nastropy.wcs.utils.FRAME_WCS_MAPPINGS.append([solar_frame_to_wcs_mapping])\n", "import sys\nimport platform\nimport warnings\nfrom pathlib import Path\nfrom functools import wraps\n\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nimport pkg_resources\nimport pytest\n\nimport astropy\nfrom astropy.wcs.wcs import FITSFixedWarning\n\nimport sunpy.map\n\n__all__ = ['skip_windows', 'skip_glymur', 'skip_ana', 'skip_32bit',\n 'warnings_as_errors', 'asdf_entry_points']\n\n# SunPy's JPEG2000 capabilities rely on the glymur library.\n# First we check to make sure that glymur imports correctly before proceeding.\ntry:\n import glymur\nexcept ImportError:\n SKIP_GLYMUR = True\nelse:\n # See if we have a C backend\n if glymur.lib.openjp2.OPENJP2:\n SKIP_GLYMUR = False\n else:\n SKIP_GLYMUR = True\n\ntry:\n from sunpy.io import _pyana # NOQA\nexcept ImportError:\n SKIP_ANA = True\nelse:\n SKIP_ANA = False\n\nif sys.maxsize > 2**32:\n SKIP_32 = False\nelse:\n SKIP_32 = True\n\nskip_windows = pytest.mark.skipif(platform.system() == 'Windows', reason=\"Windows.\")\nskip_glymur = pytest.mark.skipif(SKIP_GLYMUR, reason=\"Glymur can not be imported.\")\nskip_ana = pytest.mark.skipif(SKIP_ANA, reason=\"ANA is not available.\")\nskip_32bit = pytest.mark.skipif(SKIP_32, reason=\"Fails on a 32 bit system.\")\n\n\n# Skip if the SunPy ASDF entry points are missing.\nasdf_entry_points = pytest.mark.skipif(not list(pkg_resources.iter_entry_points('asdf_extensions', 'sunpy')),\n reason=\"No SunPy ASDF entry points.\")\n\n\[email protected]\ndef warnings_as_errors(request):\n warnings.simplefilter('error')\n\n request.addfinalizer(lambda *args: warnings.resetwarnings())\n\n\nnew_hash_library = {}\n\n\ndef get_hash_library_name():\n \"\"\"\n Generate the hash library name for this env.\n \"\"\"\n import mpl_animators\n animators_version = \"dev\" if \"+\" in mpl_animators.__version__ else mpl_animators.__version__.replace('.', '')\n ft2_version = f\"{mpl.ft2font.__freetype_version__.replace('.', '')}\"\n mpl_version = \"dev\" if \"+\" in mpl.__version__ else mpl.__version__.replace('.', '')\n astropy_version = \"dev\" if \"dev\" in astropy.__version__ else astropy.__version__.replace('.', '')\n return f\"figure_hashes_mpl_{mpl_version}_ft_{ft2_version}_astropy_{astropy_version}_animators_{animators_version}.json\"\n\n\ndef figure_test(test_function):\n \"\"\"\n A decorator for a test that verifies the hash of the current figure or the\n returned figure, with the name of the test function as the hash identifier\n in the library. A PNG is also created in the 'result_image' directory,\n which is created on the current path.\n\n All such decorated tests are marked with `pytest.mark.mpl_image` for convenient filtering.\n\n Examples\n --------\n @figure_test\n def test_simple_plot():\n plt.plot([0,1])\n \"\"\"\n hash_library_name = get_hash_library_name()\n hash_library_file = Path(__file__).parent / hash_library_name\n\n @pytest.mark.remote_data\n @pytest.mark.mpl_image_compare(hash_library=hash_library_file,\n savefig_kwargs={'metadata': {'Software': None}},\n style='default')\n @wraps(test_function)\n def test_wrapper(*args, **kwargs):\n ret = test_function(*args, **kwargs)\n if ret is None:\n ret = plt.gcf()\n return ret\n return test_wrapper\n\n\ndef no_vso(f):\n \"\"\"\n Disable the VSO client from returning results via Fido during this test.\n \"\"\"\n from sunpy.net import Fido\n from sunpy.net.vso import VSOClient\n\n @wraps(f)\n def wrapper(*args, **kwargs):\n Fido.registry[VSOClient] = lambda *args: False\n res = f(*args, **kwargs)\n Fido.registry[VSOClient] = VSOClient._can_handle_query\n return res\n\n return wrapper\n\n\ndef fix_map_wcs(smap):\n # Helper function to fix a WCS and silence the warnings\n with warnings.catch_warnings():\n warnings.filterwarnings('ignore', category=FITSFixedWarning)\n wcs = smap.wcs\n wcs.fix()\n return sunpy.map.Map(smap.data, wcs)\n", "\"\"\"A Composite Map class\n\nAuthor: `Keith Hughitt <[email protected]>`\n\"\"\"\nimport matplotlib.pyplot as plt\nfrom matplotlib.collections import Collection, QuadMesh\nfrom matplotlib.contour import ContourSet, QuadContourSet\nfrom matplotlib.image import AxesImage, _ImageBase\n\nimport astropy.units as u\n\nfrom sunpy.map import GenericMap\nfrom sunpy.util import expand_list, get_keywords, get_set_methods\nfrom sunpy.util.decorators import add_common_docstring\nfrom sunpy.visualization import axis_labels_from_ctype, peek_show, wcsaxes_compat\n\n__all__ = ['CompositeMap']\n\n__author__ = \"Keith Hughitt\"\n__email__ = \"[email protected]\"\n\n# Valid keyword arguments for each plotting method\nACCEPTED_IMSHOW_KWARGS = get_keywords(\n [GenericMap.plot, plt.Axes.imshow, AxesImage.__init__, _ImageBase.__init__]\n) | get_set_methods(AxesImage)\n\nACCEPTED_PCOLORMESH_KWARGS = (get_keywords(\n [GenericMap.plot, plt.Axes.pcolormesh, QuadMesh.__init__, Collection.__init__]\n) | get_set_methods(QuadMesh)) - {\n 'color', 'ec', 'edgecolor', 'facecolor', 'linestyle', 'linestyles',\n 'linewidth', 'linewidths', 'ls', 'lw'\n}\n\nACCEPTED_CONTOUR_KWARGS = get_keywords(\n [GenericMap.draw_contours, ContourSet.__init__, QuadContourSet._process_args]\n)\n\n\nclass CompositeMap:\n \"\"\"\n CompositeMap(map1 [,map2,..])\n\n A Composite Map class\n\n Parameters\n ----------\n args : [`~sunpy.map.Map` | string]\n One or more map of filepaths\n\n Examples\n --------\n >>> import sunpy.map\n >>> import sunpy.data.sample # doctest: +REMOTE_DATA\n >>> comp_map = sunpy.map.Map(sunpy.data.sample.AIA_171_IMAGE,\n ... sunpy.data.sample.EIT_195_IMAGE,\n ... composite=True) # doctest: +REMOTE_DATA\n >>> comp_map.add_map(sunpy.map.Map(sunpy.data.sample.RHESSI_IMAGE)) # doctest: +REMOTE_DATA\n >>> comp_map.peek() # doctest: +SKIP\n\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n self._maps = expand_list(args)\n\n for m in self._maps:\n if not isinstance(m, GenericMap):\n raise ValueError(\n 'CompositeMap expects pre-constructed map objects.')\n\n # Default alpha and zorder values\n alphas = [1] * len(self._maps)\n zorders = list(range(0, 10 * len(self._maps), 10))\n levels = [False] * len(self._maps)\n\n # Set z-order and alpha values for the map\n for i, m in enumerate(self._maps):\n m.zorder = zorders[i]\n m.alpha = alphas[i]\n m.levels = levels[i]\n\n def add_map(self, amap, zorder=None, alpha=1, levels=False):\n \"\"\"Adds a map to the CompositeMap.\n\n Parameters\n ----------\n amap : `~sunpy.map.GenericMap` or subclass\n Map instance to be added\n zorder : `int`\n The index to use when determining where the map should lie along\n the z-axis; maps with higher z-orders appear above maps with lower\n z-orders.\n alpha : `float`\n Opacity at which the map should be displayed. An alpha value of 0\n results in a fully transparent image while an alpha value of 1\n results in a fully opaque image. Values between result in semi-\n transparent images.\n \"\"\"\n if zorder is None:\n zorder = max([m.zorder for m in self._maps]) + 10\n\n amap.zorder = zorder\n amap.alpha = alpha\n amap.levels = levels\n\n self._maps.append(amap)\n\n def remove_map(self, index):\n \"\"\"Removes and returns the map with the given index.\n\n Parameters\n ----------\n index : `int`\n The index of the map in the composite map.\n\n Returns\n -------\n `sunpy.map.CompositeMap`\n A composite map with the map indexed by 'index' removed from the\n composite map.\n \"\"\"\n return self._maps.pop(index)\n\n def list_maps(self):\n \"\"\"Prints a list of the currently included maps.\"\"\"\n print([m.__class__ for m in self._maps])\n\n def get_map(self, index):\n \"\"\"Returns the map with given index \"\"\"\n return self._maps[index]\n\n def get_alpha(self, index=None):\n \"\"\"\n Returns the alpha-channel value for a layer in the composite image.\n \"\"\"\n if index is None:\n return [_map.alpha for _map in self._maps]\n else:\n return self._maps[index].alpha\n\n def get_levels(self, index=None):\n \"\"\"Returns the list of contour levels for a map within the\n composite.\n\n Parameters\n ----------\n index : {`int` | None}\n The index of the map in the composite map.\n\n Returns\n -------\n `list`\n A list of the contour levels of map at index 'index' in the\n composite map. If index is None, then the contour levels of all\n the maps are returned as a list of lists.\n \"\"\"\n if index is None:\n return [_map.levels for _map in self._maps]\n else:\n return self._maps[index].levels\n\n def get_plot_settings(self, index=None):\n \"\"\"Returns the plot settings for a map within the composite map.\n\n Parameters\n ----------\n index : {`int` | None}\n The index of the map in the composite map.\n\n Returns\n -------\n {`dict` | `list`}\n The plot settings of the map(s) in the composite map. If None\n then the plot settings of all the maps are returned in a list.\n \"\"\"\n\n if index is None:\n return [_map.plot_settings for _map in self._maps]\n else:\n return self._maps[index].plot_settings\n\n def get_zorder(self, index=None):\n \"\"\"Returns the layering preference (z-order) for a map within the\n composite.\n\n Parameters\n ----------\n index : {`int` | None}\n The index of the map in the composite map.\n\n Returns\n -------\n {`float` | `list`}\n The layering order (z-order) of the map(s) in the composite\n map. If None then the layering order of all the maps is returned in\n a list.\n \"\"\"\n if index is None:\n return [_map.zorder for _map in self._maps]\n else:\n return self._maps[index].zorder\n\n def set_alpha(self, index, alpha):\n \"\"\"Sets the alpha-channel value for a layer in the composite image.\n\n Parameters\n ----------\n index : `int`\n The index of the map in the composite map.\n\n alpha : `float`\n A float in the range 0 to 1. Increasing values of alpha decrease\n the transparency of the layer (0 is complete transparency, 1\n indicates the layer will be completely opaque).\n\n Returns\n -------\n `~sunpy.map.CompositeMap`\n A composite map with alpha-channel value 'alpha' at layer 'index'.\n \"\"\"\n if 0 <= alpha <= 1:\n self._maps[index].alpha = alpha\n else:\n raise OutOfRangeAlphaValue(\"Alpha value must be between 0 and 1.\")\n\n def set_levels(self, index, levels, percent=False):\n \"\"\"\n Sets the contour levels for a layer in the composite image.\n\n Parameters\n ----------\n index : `int`\n The index of the map in the composite map.\n\n levels : array-like\n The contour levels.\n\n percent : `bool`\n If True, the input 'levels' are interpreted as percentages relative\n to the maximum value of the data in layer 'index' of the composite\n map. If False, the contour levels are set directly from 'levels'.\n\n Returns\n -------\n `~sunpy.map.CompositeMap`\n A composite map with contour levels 'levels' at layer 'index'.\n \"\"\"\n if percent is False:\n self._maps[index].levels = levels\n else:\n self._maps[index].levels = u.Quantity(levels, u.percent)\n\n def set_plot_settings(self, index, plot_settings):\n \"\"\"Sets the plot settings for a layer in the composite image.\n\n Parameters\n ----------\n index : `int`\n The index of the map in the composite map.\n\n plot_settings : `dict`\n A dictionary of the form\n\n Returns\n -------\n `~sunpy.map.CompositeMap`\n A composite map with plot settings 'plot_settings' at layer\n 'index'.\n \"\"\"\n self._maps[index].plot_settings = plot_settings\n\n def set_zorder(self, index, zorder):\n \"\"\"Set the layering order (z-order) for a map within the\n composite.\n\n Parameters\n ----------\n index : `int`\n The index of the map in the composite map.\n\n zorder : `int`\n The layer order.\n\n Returns\n -------\n `~sunpy.map.CompositeMap`\n A composite map with the map at layer 'index' having layering order\n 'zorder'.\n \"\"\"\n self._maps[index].zorder = zorder\n\n def draw_limb(self, index=None, axes=None, **kwargs):\n \"\"\"Draws a circle representing the solar limb.\n\n Parameters\n ----------\n index : `int`\n Map index to use to plot limb.\n\n axes : `matplotlib.axes.Axes` or None\n Axes to plot limb on or None to use current axes.\n\n Returns\n -------\n `matplotlib.axes.Axes`\n\n Notes\n -----\n Keyword arguments are passed onto `sunpy.map.mapbase.GenericMap.draw_limb`.\n \"\"\"\n if index is None:\n for i, amap in enumerate(self._maps):\n if hasattr(amap, 'rsun_obs'):\n index = i\n break\n\n index_check = hasattr(self._maps[index], 'rsun_obs')\n if not index_check or index is None:\n raise ValueError(\"Specified index does not have all\"\n \" the required attributes to draw limb.\")\n\n return self._maps[index].draw_limb(axes=axes, **kwargs)\n\n @u.quantity_input\n def draw_grid(self, index=None, axes=None, grid_spacing: u.deg = 20*u.deg, **kwargs):\n \"\"\"Draws a grid over the surface of the Sun.\n\n Parameters\n ----------\n index: int\n Index to determine which map to use to draw grid.\n axes: `~matplotlib.axes.Axes` or None\n Axes to plot limb on or None to use current axes.\n grid_spacing : `float`\n Spacing (in degrees) for longitude and latitude grid.\n\n Returns\n -------\n `matplotlib.axes.Axes` object\n\n Notes\n -----\n Keyword arguments are passed onto `sunpy.map.mapbase.GenericMap.draw_grid`.\n \"\"\"\n needed_attrs = ['rsun_meters', 'dsun', 'heliographic_latitude',\n 'heliographic_longitude']\n if index is None:\n for i, amap in enumerate(self._maps):\n if all([hasattr(amap, k) for k in needed_attrs]):\n index = i\n break\n\n index_check = all([hasattr(self._maps[index], k) for k in needed_attrs])\n if not index_check or index is None:\n raise ValueError(\"Specified index does not have all\"\n \" the required attributes to draw grid.\")\n\n ax = self._maps[index].draw_grid(axes=axes, grid_spacing=grid_spacing, **kwargs)\n return ax\n\n @add_common_docstring(\n ACCEPTED_IMSHOW_KWARGS=sorted(ACCEPTED_IMSHOW_KWARGS),\n ACCEPTED_PCOLORMESH_KWARGS=sorted(ACCEPTED_PCOLORMESH_KWARGS),\n ACCEPTED_CONTOUR_KWARGS=sorted(ACCEPTED_CONTOUR_KWARGS)\n )\n def plot(self, axes=None, annotate=True,\n title=\"SunPy Composite Plot\", **matplot_args):\n \"\"\"Plots the composite map object by calling :meth:`~sunpy.map.GenericMap.plot`\n or :meth:`~sunpy.map.GenericMap.draw_contours`.\n\n By default, each map is plotted as an image. If a given map has levels\n defined (via :meth:`~sunpy.map.CompositeMap.set_levels`), that map will instead\n be plotted as contours.\n\n Parameters\n ----------\n\n axes: `~matplotlib.axes.Axes` or None\n If provided the image will be plotted on the given axes. Else the\n current matplotlib axes will be used.\n\n annotate : `bool`\n If true, the data is plotted at it's natural scale; with\n title and axis labels.\n\n title : `str`\n Title of the composite map.\n\n **matplot_args : `dict`\n Any additional Matplotlib arguments that should be used\n when plotting.\n\n Returns\n -------\n ret : `list`\n List of axes image or quad contour sets that have been plotted.\n\n Notes\n -----\n Images are plotted using either `~matplotlib.axes.Axes.imshow` or\n `~matplotlib.axes.Axes.pcolormesh`, and contours are plotted using\n `~matplotlib.axes.Axes.contour`.\n The Matplotlib arguments accepted by the plotting method are passed to it.\n (For compatability reasons, we enforce a more restrictive set of\n accepted `~matplotlib.axes.Axes.pcolormesh` arguments.)\n If any Matplotlib arguments are not used by any plotting method,\n a ``TypeError`` will be raised.\n The ``sunpy.map.compositemap`` module includes variables which list the\n full set of arguments passed to each plotting method. These are:\n\n >>> import sunpy.map.compositemap\n >>> sorted(sunpy.map.compositemap.ACCEPTED_IMSHOW_KWARGS)\n {ACCEPTED_IMSHOW_KWARGS}\n >>> sorted(sunpy.map.compositemap.ACCEPTED_PCOLORMESH_KWARGS)\n {ACCEPTED_PCOLORMESH_KWARGS}\n >>> sorted(sunpy.map.compositemap.ACCEPTED_CONTOUR_KWARGS)\n {ACCEPTED_CONTOUR_KWARGS}\n\n If a transformation is required to overlay the maps with the correct\n alignment, the plot limits may need to be manually set because\n Matplotlib autoscaling may not work as intended.\n \"\"\"\n\n # If axes are not provided, create a WCSAxes based on the first map\n if not axes:\n axes = wcsaxes_compat.gca_wcs(self._maps[0].wcs)\n\n if annotate:\n axes.set_xlabel(axis_labels_from_ctype(self._maps[0].coordinate_system[0],\n self._maps[0].spatial_units[0]))\n axes.set_ylabel(axis_labels_from_ctype(self._maps[0].coordinate_system[1],\n self._maps[0].spatial_units[1]))\n axes.set_title(title)\n\n # Checklist to determine unused keywords in `matplot_args`\n unused_kwargs = set(matplot_args.keys())\n\n # Define a list of plotted objects\n ret = []\n # Plot layers of composite map\n for m in self._maps:\n # Parameters for plotting\n params = {\n \"alpha\": m.alpha,\n \"zorder\": m.zorder,\n }\n params.update(matplot_args)\n\n # The request to show a map layer rendered as a contour is indicated by a\n # non False levels property.\n if m.levels is False:\n # We tell GenericMap.plot() that we need to autoalign the map\n if wcsaxes_compat.is_wcsaxes(axes):\n params['autoalign'] = True\n\n # Filter `matplot_args`\n if params.get('autoalign', None) in (True, 'pcolormesh'):\n accepted_kwargs = ACCEPTED_PCOLORMESH_KWARGS\n else:\n accepted_kwargs = ACCEPTED_IMSHOW_KWARGS\n for item in matplot_args.keys():\n if item not in accepted_kwargs:\n del params[item]\n else: # mark as used\n unused_kwargs -= {item}\n\n params['annotate'] = False\n ret.append(m.plot(**params))\n else:\n # Filter `matplot_args`\n for item in matplot_args.keys():\n if item not in ACCEPTED_CONTOUR_KWARGS:\n del params[item]\n else: # mark as used\n unused_kwargs -= {item}\n\n ret.append(m.draw_contours(m.levels, **params))\n\n # Set the label of the first line so a legend can be created\n ret[-1].collections[0].set_label(m.name)\n\n if len(unused_kwargs) > 0:\n raise TypeError(f'plot() got unexpected keyword arguments {unused_kwargs}')\n\n # Adjust axes extents to include all data\n axes.axis('image')\n\n # Set current image (makes colorbar work)\n plt.sci(ret[0])\n return ret\n\n @peek_show\n def peek(self, colorbar=True, draw_limb=True, draw_grid=False, **matplot_args):\n \"\"\"\n Displays a graphical overview of the data in this object for user evaluation.\n For the creation of plots, users should instead use the `~sunpy.map.CompositeMap.plot`\n method and Matplotlib's pyplot framework.\n\n Parameters\n ----------\n colorbar : `bool` or `int`\n Whether to display a colorbar next to the plot.\n If specified as an integer a colorbar is plotted for that index.\n\n draw_limb : `bool`\n If true, draws a circle representing the solar limb.\n\n draw_grid : `bool`\n If true, draws a grid over the surface of the Sun.\n\n **matplot_args : dict\n Matplotlib Any additional imshow arguments that should be used\n when plotting.\n \"\"\"\n\n # Create a figure and add title and axes\n figure = plt.figure()\n\n axes = figure.add_subplot(111, projection=self._maps[0])\n\n ret = self.plot(axes=axes, **matplot_args)\n\n if not isinstance(colorbar, bool) and isinstance(colorbar, int):\n figure.colorbar(ret[colorbar])\n elif colorbar:\n plt.colorbar()\n if draw_limb:\n self.draw_limb(axes=axes)\n\n if isinstance(draw_grid, bool):\n if draw_grid:\n self.draw_grid(axes=axes)\n\n elif isinstance(draw_grid, (int, float)):\n self.draw_grid(axes=axes, grid_spacing=draw_grid)\n else:\n raise TypeError(\"draw_grid should be bool, int, long or float\")\n\n return figure\n\n\nclass OutOfRangeAlphaValue(ValueError):\n \"\"\"Exception to raise when an alpha value outside of the range 0-1 is\n requested.\n \"\"\"\n" ]
[ [ "numpy.all", "numpy.array", "numpy.isfinite" ], [ "matplotlib.__version__.replace", "matplotlib.pyplot.gcf", "matplotlib.ft2font.__freetype_version__.replace" ], [ "matplotlib.pyplot.colorbar", "matplotlib.pyplot.sci", "matplotlib.pyplot.figure" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
icane/demographic-indicators
[ "b1c394a4497e8e4c0189bf4c0518ce38fb873d4c" ]
[ "etl/deaths.py" ]
[ "\"\"\"Deaths indicators.\"\"\"\n\nfrom etl.common import to_json_stat, write_to_file\n\nfrom etl.config_deaths import deaths_cfg as cfg\n\nfrom etlstat.extractor.extractor import xlsx\n\nimport json\n\nimport pandas as pd\n\n\ndef transform(df, periods, prefix=''):\n \"\"\"Slice dataframe. Generate time period column.\n \n df (dataframe): dataset\n periods (int): number of time periods\n prefix (str): prefix for time periods\n \"\"\"\n for i in range(0, len(df)):\n period1 = str(df.loc[i, 'Año'])\n period2 = '{:0>2}'.format(df.loc[i, 'Mes'])\n df.loc[i, 'period'] = prefix + period1 + '-' + period2\n\n df.drop(columns={'Año', 'Mes'}, axis=1, inplace=True)\n df.rename(columns={'period': 'Mes'}, inplace=True)\n df = df.tail(periods)\n df = df.round(2)\n return df\n\ndef replace_month(json_str):\n \"\"\"Replace month number by its name.\"\"\"\n json_str = json_str.replace('-01\"', '-Ene\"')\n json_str = json_str.replace('-02\"', '-Feb\"')\n json_str = json_str.replace('-03\"', '-Mar\"')\n json_str = json_str.replace('-04\"', '-Abr\"')\n json_str = json_str.replace('-05\"', '-May\"')\n json_str = json_str.replace('-06\"', '-Jun\"')\n json_str = json_str.replace('-07\"', '-Jul\"')\n json_str = json_str.replace('-08\"', '-Ago\"')\n json_str = json_str.replace('-09\"', '-Sep\"')\n json_str = json_str.replace('-10\"', '-Oct\"')\n json_str = json_str.replace('-11\"', '-Nov\"')\n json_str = json_str.replace('-12\"', '-Dic\"')\n return json_str\n\n# Read input files\ndata = xlsx(cfg.path.input)\n\n# Datasets\ndf_global = pd.DataFrame()\nindicators = []\nfor key in cfg.series:\n print(key)\n variables = [\n 'Año', 'Mes',\n cfg.series[key].variables[0],\n cfg.series[key].moving_avg[0]]\n if (len(cfg.series[key].variables) == 2):\n variables.append(cfg.series[key].variables[1])\n variables.append(cfg.series[key].moving_avg[1])\n df = data[cfg.file]\\\n [cfg.series[key].sheet][variables].copy()\n\n # Drop NA rows, if any\n df.dropna(axis=0, how='all', inplace=True)\n\n # Rename variables\n df.rename(\n columns={\n cfg.series[key].variables[0]: 'Cantabria',\n cfg.series[key].moving_avg[0]: 'Cantabria_MM'},\n inplace=True)\n if (len(cfg.series[key].variables) == 2):\n df.rename(\n columns={\n cfg.series[key].variables[1]: 'España',\n cfg.series[key].moving_avg[1]: 'España_MM'}, \n inplace=True)\n\n # Remove .0 from Año and Mes\n df['Año'] = df['Año'].astype(str).replace('\\.0', '', regex=True)\n df['Mes'] = df['Mes'].astype(str).replace('\\.0', '', regex=True)\n\n # Merge global dataset\n df_cant = df[['Año', 'Mes', 'Cantabria']].copy()\n df_cant = transform(df_cant, cfg.periods.global_deaths, 'Cantabria - ')\n df_cant.set_index('Mes', inplace=True)\n df_cant = df_cant.transpose()\n df_cant.insert(0, 'Categoria', cfg.series[key].category)\n df_cant[' - Indicadores'] = cfg.series[key].label\n if (len(cfg.series[key].variables) == 2):\n df_esp = df[['Año', 'Mes', 'España']].copy()\n df_esp = transform(df_esp, cfg.periods.global_deaths, 'España - ')\n df_esp.set_index('Mes', inplace=True)\n df_esp = df_esp.transpose()\n df_esp[' - Indicadores'] = cfg.series[key].label\n df_cant = df_cant.merge(df_esp, on=' - Indicadores')\n\n indicators.append(df_cant)\n\n # Generate JSON-Stat dataset\n df = transform(df, cfg.periods.deaths)\n vars = ['Cantabria', 'Cantabria_MM']\n if (len(cfg.series[key].variables) == 2):\n vars.append('España')\n vars.append('España_MM')\n json_file = to_json_stat(\n df,\n ['Mes'],\n vars,\n cfg.series[key].source)\n json_obj = json.loads(json_file)\n json_obj['dimension']['Variables']['category']['unit'] = \\\n cfg.series[key].unit\n json_obj['note'] = cfg.series[key].note\n json_file = json.dumps(json_obj)\n json_file = replace_month(json_file)\n write_to_file(json_file, cfg.path.output + cfg.series[key].json)\n\n# Generate CSV global dataset\ndf_global = pd.concat(indicators, axis=0, verify_integrity=False)\ndf_global.to_csv(cfg.path.output + cfg.globals.csv, index=False)\n\nprint('\\nEnd of process. Files generated successfully.')\n" ]
[ [ "pandas.concat", "pandas.DataFrame" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "1.3", "0.19", "1.1", "1.5", "0.24", "0.20", "1.0", "0.25", "1.2" ], "scipy": [], "tensorflow": [] } ]
codecakes/random_games
[ "1e670021ec97a196726e937e658878dc63ba9d34" ]
[ "probability_combinatorics/linear_regression.py" ]
[ "from math import sqrt\nfrom itertools import izip\nfrom numpy import mean\n\nfrom py_variance_std import t_percentile\n\ndef calc_slope(r, sdy, sdx): return r * (float(sdy)/sdx)\n\ndef line_fitting(x_arr, y_arr):\n \"\"\"\n using straight line y = mx + c;\n m(of a sample data points) = Covariance(X,Y)/Covariance(X,X) =\n E[(X - E(X))(Y - E(Y))]/E[(X - E(X))^2]\n Another way: Look at calc_slope given STD Y and STD X and r\n \"\"\"\n xbar = mean(x_arr)\n ybar = mean(y_arr)\n xsqr_bar = mean([i**2 for i in x_arr])\n xybar = mean([i*j for i,j in izip(x_arr, y_arr)])\n #calcuate the slope m\n m = (xbar*ybar - xybar)/(xbar**2 - xsqr_bar)\n #calculate the y intercept\n c = ybar - m*xbar\n return ybar,m,xbar,c\n\ndef trace_line(x_arr, y_arr, x_start = 0):\n y, m, x, c = line_fitting(x_arr, y_arr)\n return [(i, (m*i)+c) for i in [x_start]+list(x_arr)]\n\ndef line_error(**params):\n \"\"\"\n The least squares estimates represent the minimum value;\n http://www.pmean.com/10/LeastSquares.html\n params: x_arr, y_arr, m,c\n \"\"\"\n if 'x_arr' in params and 'y_arr' in params:\n if ('m' in params and 'c' in params):\n m,c = params['m'], params['c']\n else:\n y, m, x, c = line_fitting(params['x_arr'], params['y_arr'])\n #return difference magnitude between y,actual - y,calculated/predicted\n return [(yi - ((m*xi)+c))**2 for yi,xi in izip(params['y_arr'], params['x_arr'])]\n\n\ndef std_error_y_estimate(n, y_line_error_var):\n \"\"\"\n To construct a confidence interval for the slope of the regression line, we need to know the standard error of the sampling distribution of the slope;\n\n n: total samples in x or y;\n y_line_error_var: sum(line_error(**params))\n\n df = n-2 since two variables while calculating linear regression.\n #calculate \\summ(yi - y_cap)^2 variance\n line_error_var = line_error(**params)\n \"\"\"\n return sqrt(float(y_line_error_var)/(n-2))\n\ndef x_line_std(x_arr):\n xbar = mean(x_arr)\n return sqrt(sum([(xi - xbar)**2 for xi in x_arr]))\n\ndef std_error_linear(se_y, x_line_std):\n \"\"\"\n se_y: from std_error_y_estimate(n, y_line_error_var)\n #calculate x - xbar variance and then STD\n xbar = mean(x_arr)\n x_line_std: x_line_std(x_arr, xbar)\n \"\"\"\n return se_y/x_line_std\n\ndef find_std_err_linear(x_arr, y_arr, n_sample):\n #Find SE of SEy/SEx\n #find descriptive params\n ybar,m,xbar,c = line_fitting(x_arr, y_arr)\n #find error in x\n se_x = x_line_std(x_arr)\n #find error in y\n y_line_error = sum(line_error(**dict(x_arr=x_arr, y_arr=y_arr, m=m, c=c)))\n se_y = std_error_y_estimate(n_sample, y_line_error)\n #return standard error\n return std_error_linear(se_y, se_x)\n\ndef r_squared(x_arr, y_arr):\n \"\"\"\n Literally Trying to do sqrt() of scipy.stats import pearsonr val\n using functions in this module: linear_regression.py.\n\n Also called Coefficient of Determination.\n It simply means total_variation_line: How much the best fit line is\n \"fit\" Or Away from the scattered points. High value means good fit.\n How much % is explained by the Fitted Line.\n High R^2 = good model, probably profitable,\n Low R^2 = bad model, probably dangerous\n \"\"\"\n y, m, x, c = line_fitting(x_arr, y_arr)\n total_var_y = ([(i-y)**2 for i in y_arr]) #(y-ybar)^2\n #print sum(total_var_y)\n #\\summ(yi - mxi * c)^2/\\summ(yi - ybar)^2\n variation_not_by_line = float(sum(line_error(x_arr=x_arr, y_arr=y_arr, m=m, c=c)))/sum(total_var_y)\n #R sqaured\n return 1 - variation_not_by_line #total variation in x, variation in line\n\ndef calc_tscore_from_r(r2,n):\n \"\"\"\n Hypothesis Testing if relationship is due to sampling error.\n r: coefficient of determination\n n: number of elements in a sample\n Returns: t score\n For looking at critical t val and comparing the t score,\n df = n-2 since there are 2 variables for correlation under test.\n \"\"\"\n return sqrt(r2*float(n-2)/(1 - r2))\n\ndef calc_p_from_tval_from_r(r,n, one_tailed= 0 ):\n return t_percentile(calc_tscore_from_r(r,n), n-2, one_tailed= one_tailed)\n\ndef margin_error_linear(tscore, se): return tscore * se\n\ndef ci_linear(slope, tscore, se):\n margin_error = margin_error_linear(tscore, se)\n return (slope - margin_error, slope + margin_error)\n" ]
[ [ "numpy.mean" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
LeoRoweBrown/ckvpy
[ "fff27847f5577750ae5860e3fdff81877fa4455a" ]
[ "tools/photon_yield.py" ]
[ "import numpy as np\nfrom scipy.integrate import simps\nimport scipy.constants as const\n\ndef compute(theta_in, f, beta, L, n=None):\n \"\"\"compute number of photons due to Frank-Tamm and Fresen equations\n theta (ndarray/list[float]): Angles in chosen wavelength range\n f (ndarray/list[float]): Frequencies in chosen wavelength range\n n (ndarray/list[float]): Refractive index in chosen wavelength range\n beta (float): Ratio of electron speed to speed of light\n\n TODO: replace n = 1/(beta*np.cos(theta_in)) with actual n_eff\n \"\"\"\n if n is None:\n print(\"Using Cherenkov angle to derive n instead of d(omega)/dk\")\n n = 1/(beta*np.cos(theta_in))\n r_s = np.absolute(\n (n*np.cos(theta_in) - np.sqrt(1-(n*np.sin(theta_in)**2.)))/ \\\n (n*np.cos(theta_in) + np.sqrt(1-(n*np.sin(theta_in)**2.)))\n )\n r_p = np.absolute(\n (n*np.sqrt(1-(n*np.sin(theta_in)**2.)) - np.cos(theta_in))/ \\\n (n*np.sqrt(1-(n*np.sin(theta_in)**2.)) + np.cos(theta_in))\n )\n r_eff =(r_p + r_s)/2.\n # print(r_eff)\n t_eff = 1-r_eff\n print(\"Transmission coeff:\", t_eff)\n # derive angles inside medium with snell's law for Fresnel equation\n # theta_in = np.arcsin(n*np.sin(theta))\n # n_photons = \\\n # (const*fine_structure/(const.hbar*const.c**2.))*\\\n # simps((1-1./(beta**2.*n**2.))*t_eff, x=const.h*f)\n # need even spaced intervals -> interpolate\n # integral is over f\n f_interp = np.linspace(np.min(f), np.max(f), num=30)\n theta_interp = np.interp(f_interp, f, theta_in)\n t_eff_interp = np.interp(f_interp, f, t_eff)\n n_photons = \\\n L*(const.fine_structure/(const.hbar*const.c))* \\\n simps(np.sin(theta_interp)**2.*t_eff_interp*const.h, x=f_interp)\n print(n_photons, \"photons\")\n return n_photons" ]
[ [ "numpy.min", "numpy.cos", "numpy.sin", "numpy.max", "numpy.interp" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
andrewschreiber/numpy-saliency
[ "2e788a1150f6e160f2271cbb4f20747559f243c0" ]
[ "model/network.py" ]
[ "import numpy as np\nimport pickle\nfrom model.loss import cross_entropy\nfrom model.layers import Conv2D, Maxpool2D, Dense, Flatten, ReLu, Softmax\n\n\nclass LeNet5:\n \"\"\"Implementation of LeNet 5 for MNIST\n http://yann.lecun.com/exdb/publis/pdf/lecun-98.pdf\n \"\"\"\n\n def __init__(self, weights_path=None):\n lr = 0.01\n layers = []\n layers.append(Conv2D(n_filter=6, n_channel=1,\n kernel_size=5, padding=2, stride=1,\n learning_rate=lr, name='conv1'))\n layers.append(ReLu())\n layers.append(Maxpool2D(\n pool_size=2, stride=2, name='maxpool2'))\n layers.append(Conv2D(n_filter=16, n_channel=6,\n kernel_size=5, padding=0, stride=1,\n learning_rate=lr, name='conv3'))\n layers.append(ReLu())\n layers.append(Maxpool2D(\n pool_size=2, stride=2, name='maxpool4'))\n layers.append(Conv2D(n_filter=120, n_channel=16,\n kernel_size=5, padding=0, stride=1,\n learning_rate=lr, name='conv5'))\n layers.append(ReLu())\n layers.append(Flatten())\n layers.append(Dense(\n num_inputs=120, num_outputs=84, learning_rate=lr, name='dense6'))\n layers.append(ReLu())\n layers.append(Dense(\n num_inputs=84, num_outputs=10, learning_rate=lr, name='dense7'))\n layers.append(Softmax())\n self.layers = layers\n if weights_path is not None:\n self._load(weights_path)\n\n def _load(self, weights_path):\n with open(weights_path, 'rb') as handle:\n b = pickle.load(handle)\n self.layers[0].load(b[0]['conv1.weights'], b[0]['conv1.bias'])\n self.layers[3].load(b[3]['conv3.weights'], b[3]['conv3.bias'])\n self.layers[6].load(b[6]['conv5.weights'], b[6]['conv5.bias'])\n self.layers[9].load(b[9]['dense6.weights'], b[9]['dense6.bias'])\n self.layers[11].load(b[11]['dense7.weights'], b[11]['dense7.bias'])\n\n def train(self, training_data, training_labels, batch_size, epochs,\n weights_path):\n print(\"Training LeNet...\")\n total_acc = 0\n for epoch in range(epochs):\n # batch training data\n for batch_index in range(0, training_data.shape[0], batch_size):\n loss = 0\n acc = 0\n\n data = training_data[batch_index:batch_index+batch_size]\n labels = training_labels[batch_index:batch_index+batch_size]\n\n # iterate over batch\n for b in range(len(data)):\n x = data[b]\n y = labels[b]\n\n # forward pass\n output = self.forward(x)\n if np.argmax(output) == np.argmax(y):\n acc += 1\n total_acc += 1\n loss += cross_entropy(output, y)\n\n # backward pass\n # update network on each datapoint for simplicity\n dy = y\n for l in range(len(self.layers)-1, -1, -1):\n dout = self.layers[l].backward(dy)\n dy = dout\n\n # print performance\n loss /= len(data)\n batch_acc = float(acc)/float(len(data))\n train_acc = float(total_acc) / \\\n float((batch_index+len(data)+epoch*len(training_data)))\n\n print(('| Epoch: {0:d}/{1:d} | Iter:{2:d} | Loss: {3:.2f} | ' +\n 'BatchAcc: {4:.2f} | TrainAcc: {5:.2f} |')\n .format(epoch+1, epochs, batch_index+len(data),\n loss, batch_acc, train_acc))\n\n # save parameters after each epoch\n print(\"Saving model to\", weights_path)\n layers = [layer.parameters() for layer in self.layers]\n with open(weights_path, 'wb') as handle:\n pickle.dump(layers, handle, protocol=pickle.HIGHEST_PROTOCOL)\n\n def forward(self, x):\n for l in range(len(self.layers)):\n output = self.layers[l].forward(x)\n x = output\n return output\n\n def predict(self, x):\n output = self.forward(x)\n digit = np.argmax(output)\n probability = output[0, digit]\n return digit, probability\n\n def test(self, data, labels):\n print(\"Testing LeNet...\")\n total_acc = 0\n test_size = len(data)\n for i in range(test_size):\n x = data[i]\n y = labels[i]\n if np.argmax(self.forward(x)) == np.argmax(y):\n total_acc += 1\n\n print(\"== Correct: {}/{}. Accuracy: {} ==\"\n .format(total_acc, test_size, total_acc/test_size))\n" ]
[ [ "numpy.argmax" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
Seb-Good/deepecg
[ "c99fbe80718ee9969936154ae2c1a04d81c9b246" ]
[ "deepecg/training/model/disc/model.py" ]
[ "\"\"\"\nmodel.py\n--------\nThis module provides a class and methods for building and managing a model with tensorflow.\nBy: Sebastian D. Goodfellow, Ph.D., 2018\n\"\"\"\n\n# Compatibility imports\nfrom __future__ import absolute_import, division, print_function\n\n# 3rd party imports\nimport os\nimport sys\nimport json\nimport pickle\nimport tensorflow as tf\n\n# Local imports\nfrom deepecg.training.model.disc.graph import Graph\nfrom deepecg.training.networks.deep_ecg_v1 import DeepECGV1\nfrom deepecg.training.networks.deep_ecg_v2 import DeepECGV2\nfrom deepecg.training.networks.deep_ecg_v3 import DeepECGV3\nfrom deepecg.training.networks.deep_ecg_v4 import DeepECGV4\nfrom deepecg.training.networks.deep_ecg_v5 import DeepECGV5\nfrom deepecg.training.networks.deep_ecg_v6 import DeepECGV6\nfrom deepecg.training.networks.deep_ecg_v7 import DeepECGV7\n\n\nclass Model(object):\n\n \"\"\"A class for managing a model through training.\"\"\"\n\n def __init__(self, model_name, network_name, network_parameters, save_path, data_path, max_to_keep):\n\n # Set input parameters\n self.model_name = model_name\n self.network_name = network_name\n self.network_parameters = network_parameters\n self.save_path = os.path.join(save_path, self.model_name)\n self.data_path = data_path\n self.max_to_keep = max_to_keep\n\n # Set attributes\n self.sess = None\n self.graph = None\n self.network = None\n\n # Create project file structure\n self._create_folder_structure()\n\n # Save parameters\n self._save_parameters()\n\n # Initialize graph\n self.initialize_graph()\n\n def initialize_graph(self):\n\n # Get neural network\n self.network = self._get_neural_network()\n\n # Save network object\n self._pickle_network()\n\n # Build computational graph\n self.graph = Graph(network=self.network, save_path=self.save_path, data_path=self.data_path,\n max_to_keep=self.max_to_keep)\n\n # Start session\n self.sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True))\n\n # Initialize global variables\n self.sess.run(self.graph.init_global)\n\n @classmethod\n def build_training_graph(cls, save_path):\n \"\"\"Build training graph.\"\"\"\n # Import model parameters\n model_parameters = cls._import_model_parameters(save_path=save_path)\n\n # Import network parameters\n network_parameters = cls._import_network_parameters(save_path=save_path)\n\n # Initialize Model\n return cls(model_name=model_parameters['model_name'], network_name=model_parameters['network_name'],\n network_parameters=network_parameters, save_path=os.path.dirname(save_path),\n data_path=model_parameters['data_path'], max_to_keep=model_parameters['max_to_keep'])\n\n def restore(self, global_step):\n \"\"\"Restore model from checkpoint.\"\"\"\n # Initialize graph\n if self.sess._closed:\n self.initialize_graph()\n\n # Restore checkpoint\n self.graph.saver.restore(sess=self.sess, save_path=os.path.join(self.save_path, 'checkpoints', global_step))\n\n def close_session(self):\n \"\"\"Close any active sessions.\"\"\"\n try:\n self.sess.close()\n except AttributeError:\n print('No active Tensorflow session.')\n\n def _save_parameters(self):\n \"\"\"Save model and network parameters to JSON.\"\"\"\n # Save model parameters\n self._save_model_parameters()\n\n # Save network parameters\n self._save_network_parameters()\n\n def _save_model_parameters(self):\n \"\"\"Save model parameters to JSON.\"\"\"\n # Get model parameters\n model_parameters = dict(model_name=self.model_name, network_name=self.network_name, save_path=self.save_path,\n data_path=self.data_path, max_to_keep=self.max_to_keep)\n\n # Save model parameters to JSON\n if not os.path.exists(os.path.join(self.save_path, 'parameters', 'model_parameters.json')):\n with open(os.path.join(self.save_path, 'parameters', 'model_parameters.json'), 'w') as file:\n json.dump(model_parameters, file)\n\n def _save_network_parameters(self):\n \"\"\"Save network parameters to JSON.\"\"\"\n if not os.path.exists(os.path.join(self.save_path, 'parameters', 'network_parameters.json')):\n with open(os.path.join(self.save_path, 'parameters', 'network_parameters.json'), 'w') as file:\n json.dump(self.network_parameters, file)\n\n def _get_neural_network(self):\n \"\"\"Instantiate neural network.\"\"\"\n # Convert string to class\n network = getattr(sys.modules[__name__], self.network_name)\n\n # Instantiate network class with network parameters\n network = network(**self.network_parameters)\n\n return network\n\n def _create_folder_structure(self):\n\n # Set list of folders\n folders = ['train', 'val', 'checkpoints', 'network', 'graph', 'logs', 'parameters']\n\n # Main project directory\n if not os.path.exists(self.save_path):\n os.makedirs(self.save_path)\n\n # Loop through and create project folders\n for folder in folders:\n self._create_folder(folder=folder)\n\n def _create_folder(self, folder):\n \"\"\"Create folder.\"\"\"\n if not os.path.exists(os.path.join(self.save_path, folder)):\n os.makedirs(os.path.join(self.save_path, folder))\n\n def _pickle_network(self):\n \"\"\"Pickle graph.\"\"\"\n with open(os.path.join(self.save_path, 'network', 'network.obj'), 'wb') as file:\n pickle.dump(obj=self.network, file=file)\n\n @staticmethod\n def _import_model_parameters(save_path):\n \"\"\"Import model parameters.\"\"\"\n with open(os.path.join(save_path, 'parameters', 'model_parameters.json')) as file:\n return json.load(file)\n\n @staticmethod\n def _import_network_parameters(save_path):\n \"\"\"Import network parameters.\"\"\"\n with open(os.path.join(save_path, 'parameters', 'network_parameters.json')) as file:\n return json.load(file)\n" ]
[ [ "tensorflow.ConfigProto" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
garibaldu/boundary-seekers
[ "441fea01e93de882bf22e0deb411f0b10602fa37" ]
[ "Testing/ND-Testing.py" ]
[ "import numpy as np\nimport tensorflow as tf\n\ndef __perms(n):\n if not n:\n return\n\n p = []\n\n for i in range(0, 2**n):\n s = bin(i)[2:]\n s = \"0\" * (n-len(s)) + s\n\n s_prime = np.array(list(map(lambda x: int(x), list(s))))\n p.append(s_prime)\n\n return p\n\ndef care(normal, bias, example):\n z = np.dot(normal, example) + bias\n return 1.0/(1.0 + np.exp(-z))\n\ndef deci(normal, bias, example):\n z = np.dot(normal, example) + bias\n return 1.0/(1.0 + np.exp(-z))\n\ndef sigmoid(phi):\n return 1.0/(1.0 + tf.exp(-phi))\n\ndef compute_penalty(weights, size):\n mask = np.concatenate((np.array([0], dtype=np.float32), np.ones(size, dtype=np.float32)))\n return tf.reduce_sum(tf.abs(tf.multiply(mask, weights)))\n\ndef train_boundary_hunter(points, out, iterations):\n in_size = len(points[0])\n out_size = 1\n\n inputs = tf.placeholder('float32', [in_size])\n targets = tf.placeholder('float32', [out_size])\n\n hidden_weights = tf.Variable(np.random.uniform(low=-0.5, high=0.5, size=(1, in_size+1)), dtype='float32')\n gate_weights = tf.Variable(np.random.uniform(low=-0.5, high=0.5, size=(1, in_size+1)), dtype='float32')\n byas = tf.Variable(np.random.uniform(low=-0.5, high=0.5, size=(1)), dtype='float32')\n #output_weights = tf.Variable(np.random.uniform(low=-0.5, high=0.5, size=(out_size, num_centroids + 1)), dtype='float32')\n\n inputs_prime = tf.concat([[1.0], inputs], axis=0)\n\n # Peform Computation\n # Peform Computation\n prob = tf.reduce_sum(tf.multiply(inputs_prime, hidden_weights), 1)\n\n g = sigmoid(tf.reduce_sum(tf.multiply(inputs_prime, gate_weights), 1))\n #hidden_out = tf.add(byas, tf.multiply(g, tf.subtract(prob, byas)))\n hidden_out = sigmoid(tf.add(g * prob, (1-g) * byas))\n\n reward = tf.log(compute_penalty(hidden_weights, in_size) + compute_penalty(gate_weights, in_size))\n\n targets_prime = tf.expand_dims(targets, 1)\n output = hidden_out\n errors = -(targets_prime * tf.log(output) + (1 -targets_prime) * tf.log(1 - output))#tf.pow(tf.subtract(tf.expand_dims(targets, 1), output), 2.0)\n error = tf.reduce_sum(errors)\n minimize = error - 0.02 * reward\n\n train_op = tf.train.GradientDescentOptimizer(0.01).minimize(minimize)\n #clip_byas = tf.assign(byas, tf.clip_by_value(byas, 0, 1))\n\n model = tf.global_variables_initializer()\n\n with tf.Session() as session:\n session.run(model)\n \n for e in range(iterations):\n for d in range(len(points)):\n session.run(train_op, feed_dict={inputs: points[d], targets: [out[d]]})\n #session.run(clip_byas)\n \n\n if e % 10 == 0:\n print(session.run(byas))\n err = 0\n for d in range(len(points)):\n err += session.run(error, feed_dict={inputs: points[d], targets: [out[d]]})\n print(err)\n print(session.run(reward))\n print()\n\n\n gates = session.run(gate_weights)[0]\n byas = session.run(byas)[0]\n boundarys = session.run(hidden_weights)[0]\n\n return (boundarys, gates, byas)\n\ndef get_final_class(predictions):\n tally_0 = 0\n tally_1 = 0\n\n for p in predictions:\n if (not p == None) and p >= 0.5:\n tally_1 += 1\n elif (not p == None) and p < 0.5:\n tally_0 += 1\n\n if tally_0 == 0 and tally_1 == 0:\n return None\n \n return 0 if tally_0 > tally_1 else 1\n\ndef run_boundary_hunters(boundarys, gates, points, out):\n in_size = len(points[0])\n out_size = 1\n \n inputs = tf.placeholder('float32', [in_size])\n targets = tf.placeholder('float32', [out_size])\n hidden_weights = tf.placeholder('float32', [None])\n gate_weights = tf.placeholder('float32', [None])\n\n inputs_prime = tf.concat([[1.0], inputs], axis=0)\n\n g = sigmoid(tf.reduce_sum(tf.multiply(inputs_prime, gate_weights)))\n prob = sigmoid(tf.reduce_sum(tf.multiply(inputs_prime, hidden_weights)))\n\n model = tf.global_variables_initializer()\n\n unsure = 0\n guessed = 0\n correct = 0\n with tf.Session() as session:\n session.run(model)\n\n for d in range(len(points)):\n predictions = []\n for b in range(len(boundarys)):\n prediction = None\n care = session.run(g, feed_dict={inputs: points[d], hidden_weights: boundarys[b], gate_weights: gates[b]})\n\n if care > 0.5:\n prediction = session.run(prob, feed_dict={inputs: points[d], hidden_weights: boundarys[b], gate_weights: gates[b]})\n predictions.append(prediction)\n\n p = get_final_class(predictions)\n #print(predictions, \": \", p)\n if not p == None:\n guessed += 1\n \n if p == out[d]:\n correct += 1\n elif p == None:\n unsure += 1\n\n return float(correct)/float(guessed), float(unsure)/float(len(points))\n\nN = 7\n# Generate All Points On Hypercube\nexamples = __perms(N)\ntargets = []\n\n# Generate Boundary Hunter\nbias = np.random.uniform(0, 1, 1)\ndecision = np.random.uniform(-1, 1, N)\ndecision_b = np.random.uniform(-1, 1, 1)\ncaring = np.random.uniform(-1, 1, N)\ncaring_b = np.random.uniform(-1, 1, 1)\n\nuncertian = 0\nclass1 = 0\nclass0 = 0\n\nfor example in examples:\n clas = None\n c = care(caring, caring_b, example)\n\n if c < 0.5:\n uncertian += 1\n r = np.random.rand(1)\n if r > bias:\n clas = 1\n else:\n clas = 0\n else:\n d = deci(decision, decision_b, example)\n if d >= 0.5:\n clas = 1\n class1 += 1\n else:\n clas=0\n class0 += 1\n targets.append(clas)\n\nif class0 == 0 or class1 == 0:\n print(\"Class 0: \", class0)\n print(\"Class 1: \", class1)\n print(\"Err\")\n raise \"GSFE\"\n\n\nbh = train_boundary_hunter(examples, targets, 20000)\n\nprint(\"Uncertian: \", uncertian)\nprint(\"Class 0: \", class0)\nprint(\"Class 1: \", class1)\n\nprint(\"Bias: \", bias)\nprint(\"{}, {}\".format(decision_b, decision))\nprint(\"{}, {}\".format(caring_b, caring))\nprint(run_boundary_hunters([np.concatenate((decision_b, decision))], [np.concatenate((caring_b, caring))], examples, targets))\n\nprint()\nprint(bh)\nprint(run_boundary_hunters([bh[0]], [bh[1]], examples, targets))\n\n" ]
[ [ "numpy.dot", "tensorflow.multiply", "tensorflow.concat", "tensorflow.reduce_sum", "tensorflow.expand_dims", "tensorflow.placeholder", "tensorflow.exp", "numpy.ones", "tensorflow.global_variables_initializer", "numpy.concatenate", "tensorflow.train.GradientDescentOptimizer", "tensorflow.add", "numpy.random.rand", "tensorflow.Session", "tensorflow.log", "numpy.random.uniform", "numpy.array", "numpy.exp" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10" ] } ]
okkhoy/rlpy
[ "af25d2011fff1d61cb7c5cc8992549808f0c6103", "af25d2011fff1d61cb7c5cc8992549808f0c6103", "af25d2011fff1d61cb7c5cc8992549808f0c6103", "af25d2011fff1d61cb7c5cc8992549808f0c6103", "af25d2011fff1d61cb7c5cc8992549808f0c6103" ]
[ "examples/pacman/independent.py", "rlpy/Representations/BEBF.py", "rlpy/Representations/Representation.py", "rlpy/Domains/HIVTreatment.py", "rlpy/Domains/FlipBoard.py" ]
[ "\"\"\"\nCart-pole balancing with independent discretization\n\"\"\"\nfrom __future__ import unicode_literals\nfrom __future__ import print_function\nfrom __future__ import division\nfrom __future__ import absolute_import\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom rlpy.Domains import Pacman\nfrom rlpy.Agents import Q_Learning\nfrom rlpy.Representations import *\nfrom rlpy.Policies import eGreedy\nfrom rlpy.Experiments import Experiment\nimport numpy as np\nfrom hyperopt import hp\n\nparam_space = {'discretization': hp.quniform(\"discretization\", 3, 50, 1),\n 'lambda_': hp.uniform(\"lambda_\", 0., 1.),\n 'boyan_N0': hp.loguniform(\"boyan_N0\", np.log(1e1), np.log(1e5)),\n 'initial_learn_rate': hp.loguniform(\"initial_learn_rate\", np.log(5e-2), np.log(1))}\n\n\ndef make_experiment(\n exp_id=1, path=\"./Results/Temp/{domain}/{agent}/{representation}/\",\n lambda_=0.9,\n boyan_N0=22.36,\n initial_learn_rate=.068,\n discretization=9):\n opt = {}\n opt[\"path\"] = path\n opt[\"exp_id\"] = exp_id\n opt[\"max_steps\"] = 150000\n opt[\"num_policy_checks\"] = 30\n opt[\"checks_per_policy\"] = 1\n\n domain = Pacman()\n opt[\"domain\"] = domain\n representation = IncrementalTabular(\n domain,\n discretization=discretization)\n policy = eGreedy(representation, epsilon=0.1)\n opt[\"agent\"] = Q_Learning(\n policy, representation, discount_factor=domain.discount_factor,\n lambda_=0.9, initial_learn_rate=initial_learn_rate,\n learn_rate_decay_mode=\"boyan\", boyan_N0=boyan_N0)\n experiment = Experiment(**opt)\n return experiment\n\nif __name__ == '__main__':\n #from Tools.run import run_profiled\n # run_profiled(make_experiment)\n experiment = make_experiment(1)\n experiment.run(visualize_steps=True)\n experiment.plot()\n # experiment.save()\n", "\"\"\"Bellman-Error Basis Function Representation.\"\"\"\nfrom __future__ import unicode_literals\nfrom __future__ import print_function\nfrom __future__ import division\nfrom __future__ import absolute_import\n\n#from rlpy.Tools import\nfrom builtins import super\nfrom builtins import int\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom builtins import range\nimport numpy as np\nfrom .Representation import Representation\nfrom rlpy.Tools import svm\n\n__copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\"\n__credits__ = [\"Alborz Geramifard\", \"Robert H. Klein\", \"Christoph Dann\",\n \"William Dabney\", \"Jonathan P. How\"]\n__license__ = \"BSD 3-Clause\"\n__author__ = \"Robert H. Klein\"\n\n\nclass BEBF(Representation):\n\n \"\"\"Bellman-Error Basis Function Representation.\n\n .. warning:: \n \n REQUIRES the implementation of locally-weighted\n projection regression (LWPR), available at:\n http://wcms.inf.ed.ac.uk/ipab/slmc/research/software-lwpr\n\n Parameters set according to: Parr et al., \n \"Analyzing Feature Generation for Function Approximation\" (2007).\n http://machinelearning.wustl.edu/mlpapers/paper_files/icml2007_ParrPLL07.pdf\n\n Bellman-Error Basis Function Representation. \\n\n 1. Initial basis function based on immediate reward. \\n\n 2. Evaluate r + Q(s', \\pi{s'}) - Q(s,a) for all samples. \\n\n 3. Train function approximator on bellman error of present solution above\\n\n 4. Add the above as a new basis function. \\n\n 5. Repeat the process using the new basis until the most\n recently added basis function has norm <= batchThreshold, which\n Parr et al. used as 10^-5.\\n\n \n Note that the *USER* can select the class of feature functions to be used;\n the BEBF function approximator itself consists of many feature functions \n which themselves are often approximations to their particular functions.\n Default here is to train a support vector machine (SVM) to be used for \n each feature function.\n \n \"\"\"\n # Number of features to be expanded in the batch setting; here 1 since\n # each BEBF will be identical on a given iteration\n maxBatchDiscovery = 1\n # from sklearn: \"epsilon in the epsilon-SVR model. It specifies the\n # epsilon-tube within which no penalty is associated in the training loss\n # function with points predicted within a distance epsilon from the actual\n # value.\"\n svm_epsilon = None\n # Array of pointers to feature functions, indexed by order created\n features = []\n batchThreshold = None\n # Initial number of features, initialized in __init__\n initial_features_num = 0\n\n def __init__(self, domain, discretization=20,\n batchThreshold=10 ** -3, svm_epsilon=.1):\n \"\"\"\n :param domain: the problem :py:class:`~rlpy.Domains.Domain.Domain` to learn\n :param discretization: Number of bins used for each continuous dimension.\n For discrete dimensions, this parameter is ignored.\n :param batchThreshold: Threshold below which no more features are added\n for a given data batch.\n :param svm_epsilon: (From sklearn, scikit-learn): \\\"epsilon in the \n epsilon-SVR model. It specifies the epsilon-tube within which no \n penalty is associated in the training loss function with points \n predicted within a distance epsilon from the actual value.\\\"\n \n \"\"\"\n \n self.setBinsPerDimension(domain, discretization)\n # Effectively initialize with IndependentDiscretization\n self.initial_features_num = int(sum(self.bins_per_dim))\n # Starting number of features equals the above, changes during\n # execution\n self.features_num = self.initial_features_num\n # self.features_num = 0\n self.svm_epsilon = svm_epsilon\n self.batchThreshold = batchThreshold\n self.addInitialFeatures()\n super(BEBF, self).__init__(domain, discretization)\n self.isDynamic = True\n # @return: a function object corresponding to the\n\n def getFunctionApproximation(self, X, y):\n \"\"\"\n :param X: Training dataset inputs\n :param y: Outputs associated with training set.\n \n Accepts dataset (X,y) and trains a feature function on it\n (default uses Support Vector Machine).\n Returns a handle to the trained feature function.\n \n \"\"\"\n \n # bebfApprox = svm.SVR(kernel='rbf', degree=3, C=1.0, epsilon = 0.0005) # support vector regression\n # C = penalty parameter of\n # error term, default 1\n bebfApprox = svm.SVR(\n kernel='rbf',\n degree=3,\n C=1.0,\n epsilon=self.svm_epsilon)\n bebfApprox.fit(X, y)\n return bebfApprox\n\n def addInitialFeatures(self):\n pass\n\n def phi_nonTerminal(self, s):\n F_s = np.zeros(self.features_num)\n # From IndependentDiscretization\n F_s[self.activeInitialFeatures(s)] = 1\n bebf_features_num = self.features_num - self.initial_features_num\n for features_ind, F_s_ind in enumerate(np.arange(bebf_features_num) + self.initial_features_num):\n F_s[F_s_ind] = self.features[features_ind].predict(s)\n# print 's,F_s',s,F_s\n# shout('F_s:',F_s)\n return F_s\n\n # Adds new features based on the Bellman Error in batch setting.\n # @param td_errors: p-by-1 (How much error observed for each sample)\n # @param all_phi_s: n-by-p features corresponding to all samples (each column corresponds to one sample)\n # @param s: List of states corresponding to each td_error in td_errors (note that the same state may appear multiple times because of different actions taken while there)\n # self.batchThreshold is threshold below which no more BEBFs are added.\n def batchDiscover(self, td_errors, all_phi_s, s):\n # need states here instead?\n addedFeature = False\n # PLACEHOLDER for norm of function\n norm = max(abs(td_errors)) # Norm of function\n for j in range(self.maxBatchDiscovery):\n self.features.append(self.getFunctionApproximation(s, td_errors))\n if norm > self.batchThreshold:\n self.addNewWeight()\n addedFeature = True\n self.features_num += 1\n self.logger.debug(\n 'Added feature. \\t %d total feats' %\n self.features_num)\n else:\n break\n return addedFeature\n\n def featureType(self):\n return float\n", "\"\"\"Representation base class.\"\"\"\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import absolute_import\n\nfrom builtins import int\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom builtins import range\nfrom builtins import object\nfrom past.utils import old_div\nimport logging\nfrom copy import deepcopy\nfrom rlpy.Tools import className, addNewElementForAllActions\nfrom rlpy.Tools import vec2id, bin2state, findElemArray1D\nfrom rlpy.Tools import hasFunction, id2vec, closestDiscretization\nimport scipy.sparse as sp\nimport numpy as np\n\n__copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\"\n__credits__ = [\"Alborz Geramifard\", \"Robert H. Klein\", \"Christoph Dann\",\n \"William Dabney\", \"Jonathan P. How\"]\n__license__ = \"BSD 3-Clause\"\n__author__ = \"Alborz Geramifard\"\n\n\nclass Representation(object):\n\n \"\"\"\n The Representation is the :py:class:`~rlpy.Agents.Agent.Agent`'s model of the\n value function associated with a :py:class:`~rlpy.Domains.Domain.Domain`.\n\n As the Agent interacts with the Domain, it receives updates in the form of\n state, action, reward, next state, next action. \\n\n The Agent passes these quantities to its Representation, which is\n responsible for maintaining the value function usually in some\n lower-dimensional feature space.\n Agents can later query the Representation for the value of being in a state\n *V(s)* or the value of taking an action in a particular state\n ( known as the Q-function, *Q(s,a)* ).\n\n .. note::\n\n Throughout the framework, ``phi`` refers to the vector of features;\n ``phi`` or ``phi_s`` is thus the vector of feature functions evaluated\n at the state *s*. phi_s_a appends \\|A\\|-1 copies of phi_s, such that\n \\|phi_s_a\\| = \\|A\\| * \\|phi\\|, where \\|A\\| is the size of the action\n space and \\|phi\\| is the number of features. Each of these blocks\n corresponds to a state-action pair; all blocks except for the selected\n action ``a`` are set to 0.\n\n The Representation class is a base class that provides the basic framework\n for all representations. It provides the methods and attributes\n that allow child classes to interact with the Agent and Domain classes\n within the RLPy library. \\n\n All new representation implementations should inherit from this class.\n\n .. note::\n At present, it is assumed that the Linear Function approximator\n family of representations is being used.\n\n \"\"\"\n #: A numpy array of the Linear Weights, one for each feature (theta)\n weight_vec = None\n #: The Domain that this Representation is modeling\n domain = None\n #: Number of features in the representation\n features_num = 0\n #: Number of actions in the representation\n actions_num = 0\n # Number of bins used for discretization of each continuous dimension\n discretization = 20\n #: Number of possible states per dimension [1-by-dim]\n bins_per_dim = 0\n #: Width of bins in each dimension\n binWidth_per_dim = 0\n #: Number of aggregated states based on the discretization.\n #: If the represenation is adaptive, set to the best resolution possible\n agg_states_num = 0\n # A simple object that records the prints in a file\n logger = None\n # A seeded numpy random number generator\n random_state = None\n\n #: True if the number of features may change during execution.\n isDynamic = False\n #: A dictionary used to cache expected results of step(). Used for planning algorithms\n expectedStepCached = None\n\n def __init__(self, domain, discretization=20, seed=1):\n \"\"\"\n :param domain: the problem :py:class:`~rlpy.Domains.Domain.Domain` to learn\n :param discretization: Number of bins used for each continuous dimension.\n For discrete dimensions, this parameter is ignored.\n \"\"\"\n\n for v in ['features_num']:\n if getattr(self, v) is None:\n raise Exception('Missed domain initialization of ' + v)\n self.expectedStepCached = {}\n self.setBinsPerDimension(domain, discretization)\n self.domain = domain\n self.state_space_dims = domain.state_space_dims\n self.actions_num = domain.actions_num\n self.discretization = discretization\n try:\n self.weight_vec = np.zeros(self.features_num * self.actions_num)\n except MemoryError as m:\n print(\n \"Unable to allocate weights of size: %d\\n\" %\n self.features_num *\n self.actions_num)\n raise m\n\n self._phi_sa_cache = np.empty(\n (self.actions_num, self.features_num))\n self._arange_cache = np.arange(self.features_num)\n self.agg_states_num = np.prod(self.bins_per_dim.astype('uint64'))\n self.logger = logging.getLogger(\"rlpy.Representations.\" + self.__class__.__name__)\n \n # a new stream of random numbers for each representation\n self.random_state = np.random.RandomState(seed=seed)\n \n def init_randomization(self):\n \"\"\"\n Any stochastic behavior in __init__() is broken out into this function\n so that if the random seed is later changed (eg, by the Experiment),\n other member variables and functions are updated accordingly.\n \n \"\"\"\n pass\n \n def V(self, s, terminal, p_actions, phi_s=None):\n \"\"\" Returns the value of state s under possible actions p_actions.\n\n :param s: The queried state\n :param terminal: Whether or not *s* is a terminal state\n :param p_actions: the set of possible actions\n :param phi_s: (optional) The feature vector evaluated at state s.\n If the feature vector phi(s) has already been cached,\n pass it here as input so that it need not be computed again.\n\n See :py:meth:`~rlpy.Representations.Representation.Representation.Qs`.\n \"\"\"\n\n if phi_s is None:\n phi_s = self.phi(s, terminal)\n AllQs = self.Qs(s, terminal, phi_s)\n if len(p_actions):\n return max(AllQs[p_actions])\n else:\n return 0 # Return 0 value when no action is possible\n\n def Qs(self, s, terminal, phi_s=None):\n \"\"\"\n Returns an array of actions available at a state and their\n associated values.\n\n :param s: The queried state\n :param terminal: Whether or not *s* is a terminal state\n :param phi_s: (optional) The feature vector evaluated at state s.\n If the feature vector phi(s) has already been cached,\n pass it here as input so that it need not be computed again.\n\n :return: The tuple (Q,A) where:\n - Q: an array of Q(s,a), the values of each action at *s*. \\n\n - A: the corresponding array of actionIDs (integers)\n\n .. note::\n This function is distinct\n from :py:meth:`~rlpy.Representations.Representation.Representation.Q`,\n which computes the Q function for an (s,a) pair. \\n\n Instead, this function ``Qs()`` computes all Q function values\n (for all possible actions) at a given state *s*.\n\n \"\"\"\n\n if phi_s is None:\n phi_s = self.phi(s, terminal)\n if len(phi_s) == 0:\n return np.zeros((self.actions_num))\n weight_vec_prime = self.weight_vec.reshape(-1, self.features_num)\n if self._phi_sa_cache.shape != (self.actions_num, self.features_num):\n self._phi_sa_cache = np.empty(\n (self.actions_num, self.features_num))\n Q = np.multiply(weight_vec_prime, phi_s,\n out=self._phi_sa_cache).sum(axis=1)\n # stacks phi_s in cache\n return Q\n\n def Q(self, s, terminal, a, phi_s=None):\n \"\"\" Returns the learned value of a state-action pair, *Q(s,a)*.\n\n :param s: The queried state in the state-action pair.\n :param terminal: Whether or not *s* is a terminal state\n :param a: The queried action in the state-action pair.\n :param phi_s: (optional) The feature vector evaluated at state s.\n If the feature vector phi(s) has already been cached,\n pass it here as input so that it need not be computed again.\n\n :return: (float) the value of the state-action pair (s,a), Q(s,a).\n\n \"\"\"\n if len(self.weight_vec) > 0:\n phi_sa, i, j = self.phi_sa(s, terminal, a, phi_s, snippet=True)\n return np.dot(phi_sa, self.weight_vec[i:j])\n else:\n return 0.0\n\n def phi(self, s, terminal):\n \"\"\"\n Returns :py:meth:`~rlpy.Representations.Representation.Representation.phi_nonTerminal`\n for a given representation, or a zero feature vector in a terminal state.\n\n :param s: The state for which to compute the feature vector\n\n :return: numpy array, the feature vector evaluted at state *s*.\n\n .. note::\n If state *s* is terminal the feature vector is returned as zeros!\n This prevents the learning algorithm from wrongfully associating\n the end of one episode with the start of the next (e.g., thinking\n that reaching the terminal state causes it to teleport back to the\n start state s0).\n\n\n \"\"\"\n if terminal or self.features_num == 0:\n return np.zeros(self.features_num, 'bool')\n else:\n return self.phi_nonTerminal(s)\n\n def phi_sa(self, s, terminal, a, phi_s=None, snippet=False):\n \"\"\"\n Returns the feature vector corresponding to a state-action pair.\n We use the copy paste technique (Lagoudakis & Parr 2003).\n Essentially, we append the phi(s) vector to itself *|A|* times, where\n *|A|* is the size of the action space.\n We zero the feature values of all of these blocks except the one\n corresponding to the actionID *a*.\n\n When ``snippet == False`` we construct and return the full, sparse phi_sa.\n When ``snippet == True``, we return the tuple (phi_s, index1, index2)\n where index1 and index2 are the indices defining the ends of the phi_s\n block which WOULD be nonzero if we were to construct the full phi_sa.\n\n :param s: The queried state in the state-action pair.\n :param terminal: Whether or not *s* is a terminal state\n :param a: The queried action in the state-action pair.\n :param phi_s: (optional) The feature vector evaluated at state s.\n If the feature vector phi(s) has already been cached,\n pass it here as input so that it need not be computed again.\n :param snippet: if ``True``, do not return a single phi_sa vector,\n but instead a tuple of the components needed to create it.\n See return value below.\n\n :return: If ``snippet==False``, return the enormous phi_sa vector\n constructed by the copy-paste method.\n If ``snippet==True``, do not construct phi_sa, only return\n a tuple (phi_s, index1, index2) as described above.\n\n \"\"\"\n if phi_s is None:\n phi_s = self.phi(s, terminal)\n if snippet is True:\n return phi_s, a * self.features_num, (a + 1) * self.features_num\n\n phi_sa = np.zeros(\n (self.features_num * self.actions_num),\n dtype=phi_s.dtype)\n if self.features_num == 0:\n return phi_sa\n if len(self._arange_cache) != self.features_num:\n self._arange_cache = np.arange(\n a * self.features_num,\n (a + 1) * self.features_num)\n else:\n self._arange_cache += a * self.features_num - self._arange_cache[0]\n phi_sa[self._arange_cache] = phi_s\n # Slower alternatives\n # Alternative 1: Set only non_zeros (Very close on running time with the current solution. In fact it is sometimes better)\n #nnz_ind = phi_s.nonzero()\n #phi_sa[nnz_ind+a*self.features_num] = phi_s[nnz_ind]\n # Alternative 2: Use of Kron\n #A = zeros(self.actions_num)\n #A[a] = 1\n #F_sa = kron(A,F_s)\n return phi_sa\n\n def addNewWeight(self):\n \"\"\"\n Add a new zero weight, corresponding to a newly added feature,\n to all actions.\n \"\"\"\n self.weight_vec = addNewElementForAllActions(\n self.weight_vec,\n self.actions_num)\n\n def hashState(self, s,):\n \"\"\"\n Returns a unique id for a given state.\n Essentially, enumerate all possible states and return the ID associated\n with *s*.\n\n Under the hood: first, discretize continuous dimensions into bins\n as necessary. Then map the binstate to an integer.\n \"\"\"\n ds = self.binState(s)\n return vec2id(ds, self.bins_per_dim)\n\n def setBinsPerDimension(self, domain, discretization):\n \"\"\"\n Set the number of bins for each dimension of the domain.\n Continuous spaces will be slices using the ``discretization`` parameter.\n :param domain: the problem :py:class:`~rlpy.Domains.Domain.Domain` to learn\n :param discretization: The number of bins a continuous domain should be sliced into.\n\n \"\"\"\n self.bins_per_dim = np.zeros(domain.state_space_dims, np.uint16)\n self.binWidth_per_dim = np.zeros(domain.state_space_dims)\n for d in range(domain.state_space_dims):\n if d in domain.continuous_dims:\n self.bins_per_dim[d] = discretization\n else:\n self.bins_per_dim[d] = domain.statespace_limits[d, 1] - \\\n domain.statespace_limits[d, 0]\n self.binWidth_per_dim[d] = old_div((domain.statespace_limits[d,1] - domain.statespace_limits[d, 0]), (self.bins_per_dim[d] * 1.))\n\n def binState(self, s):\n \"\"\"\n Returns a vector where each element is the zero-indexed bin number\n corresponding with the given state.\n (See :py:meth:`~rlpy.Representations.Representation.Representation.hashState`)\n Note that this vector will have the same dimensionality as *s*.\n\n (Note: This method is binary compact; the negative case of binary features is\n excluded from feature activation.\n For example, if the domain has a light and the light is off, no feature\n will be added. This is because the very *absence* of the feature\n itself corresponds to the light being off.\n \"\"\"\n s = np.atleast_1d(s)\n limits = self.domain.statespace_limits\n assert (np.all(s >= limits[:, 0]))\n assert (np.all(s <= limits[:, 1]))\n width = limits[:, 1] - limits[:, 0]\n diff = s - limits[:, 0]\n bs = (diff * self.bins_per_dim / width).astype(\"uint32\")\n m = bs == self.bins_per_dim\n bs[m] = self.bins_per_dim[m] - 1\n return bs\n\n def bestActions(self, s, terminal, p_actions, phi_s=None):\n \"\"\"\n Returns a list of the best actions at a given state.\n If *phi_s* [the feature vector at state *s*] is given, it is used to\n speed up code by preventing re-computation within this function.\n\n See :py:meth:`~rlpy.Representations.Representation.Representation.bestAction`\n\n :param s: The given state\n :param terminal: Whether or not the state *s* is a terminal one.\n :param phi_s: (optional) the feature vector at state (s).\n :return: A list of the best actions at the given state.\n\n \"\"\"\n Qs = self.Qs(s, terminal, phi_s)\n Qs = Qs[p_actions]\n # Find the index of best actions\n ind = findElemArray1D(Qs, Qs.max())\n return np.array(p_actions)[ind]\n\n def pre_discover(self, s, terminal, a, sn, terminaln):\n \"\"\"\n Identifies and adds (\"discovers\") new features for this adaptive\n representation BEFORE having obtained the TD-Error.\n For example, see :py:class:`~rlpy.Representations.IncrementalTabular.IncrementalTabular`.\n In that class, a new feature is added anytime a novel state is observed.\n\n .. note::\n For adaptive representations that require access to TD-Error to\n determine which features to add next,\n use :py:meth:`~rlpy.Representations.Representation.Representation.post_discover`\n instead.\n\n :param s: The state\n :param terminal: boolean, whether or not *s* is a terminal state.\n :param a: The action\n :param sn: The next state\n :param terminaln: boolean, whether or not *sn* is a terminal state.\n\n :return: The number of new features added to the representation\n \"\"\"\n\n return 0\n\n def post_discover(self, s, terminal, a, td_error, phi_s):\n \"\"\"\n Identifies and adds (\"discovers\") new features for this adaptive\n representation AFTER having obtained the TD-Error.\n For example, see :py:class:`~rlpy.Representations.iFDD.iFDD`.\n In that class, a new feature is added based on regions of high TD-Error.\n\n .. note::\n For adaptive representations that do not require access to TD-Error\n to determine which features to add next, you may\n use :py:meth:`~rlpy.Representations.Representation.Representation.pre_discover`\n instead.\n\n :param s: The state\n :param terminal: boolean, whether or not *s* is a terminal state.\n :param a: The action\n :param td_error: The temporal difference error at this transition.\n :param phi_s: The feature vector evaluated at state *s*.\n\n :return: The number of new features added to the representation\n \"\"\"\n return 0\n\n def bestAction(self, s, terminal, p_actions, phi_s=None):\n \"\"\"\n Returns the best action at a given state.\n If there are multiple best actions, this method selects one of them\n uniformly randomly.\n If *phi_s* [the feature vector at state *s*] is given, it is used to\n speed up code by preventing re-computation within this function.\n\n See :py:meth:`~rlpy.Representations.Representation.Representation.bestActions`\n\n :param s: The given state\n :param terminal: Whether or not the state *s* is a terminal one.\n :param phi_s: (optional) the feature vector at state (s).\n :return: The best action at the given state.\n \"\"\"\n bestA = self.bestActions(s, terminal, p_actions, phi_s)\n if isinstance(bestA, int):\n return bestA\n elif len(bestA) > 1:\n return self.random_state.choice(bestA)\n # return bestA[0]\n else:\n return bestA[0]\n\n def phi_nonTerminal(self, s):\n \"\"\" *Abstract Method* \\n\n Returns the feature vector evaluated at state *s* for non-terminal\n states; see\n function :py:meth:`~rlpy.Representations.Representation.Representation.phi`\n for the general case.\n\n :param s: The given state\n\n :return: The feature vector evaluated at state *s*.\n \"\"\"\n raise NotImplementedError\n\n def activeInitialFeatures(self, s):\n \"\"\"\n Returns the index of active initial features based on bins in each\n dimension.\n :param s: The state\n\n :return: The active initial features of this representation\n (before expansion)\n \"\"\"\n bs = self.binState(s)\n shifts = np.hstack((0, np.cumsum(self.bins_per_dim)[:-1]))\n index = bs + shifts\n return index.astype('uint32')\n\n def batchPhi_s_a(self, all_phi_s, all_actions,\n all_phi_s_a=None, use_sparse=False):\n \"\"\"\n Builds the feature vector for a series of state-action pairs (s,a)\n using the copy-paste method.\n\n .. note::\n See :py:meth:`~rlpy.Representations.Representation.Representation.phi_sa`\n for more information.\n\n :param all_phi_s: The feature vectors evaluated at a series of states.\n Has dimension *p* x *n*, where *p* is the number of states\n (indexed by row), and *n* is the number of features.\n :param all_actions: The set of actions corresponding to each feature.\n Dimension *p* x *1*, where *p* is the number of states included\n in this batch.\n :param all_phi_s_a: (Optional) Feature vector for a series of\n state-action pairs (s,a) using the copy-paste method.\n If the feature vector phi(s) has already been cached,\n pass it here as input so that it need not be computed again.\n :param use_sparse: Determines whether or not to use sparse matrix\n libraries provided with numpy.\n\n\n :return: all_phi_s_a (of dimension p x (s_a) )\n \"\"\"\n p, n = all_phi_s.shape\n a_num = self.actions_num\n if use_sparse:\n phi_s_a = sp.lil_matrix(\n (p, n * a_num), dtype=all_phi_s.dtype)\n else:\n phi_s_a = np.zeros((p, n * a_num), dtype=all_phi_s.dtype)\n\n for i in range(a_num):\n rows = np.where(all_actions == i)[0]\n if len(rows):\n phi_s_a[rows, i * n:(i + 1) * n] = all_phi_s[rows,:]\n return phi_s_a\n\n def batchBestAction(self, all_s, all_phi_s,\n action_mask=None, useSparse=True):\n \"\"\"\n Accepts a batch of states, returns the best action associated with each.\n\n .. note::\n See :py:meth:`~rlpy.Representations.Representation.Representation.bestAction`\n\n :param all_s: An array of all the states to consider.\n :param all_phi_s: The feature vectors evaluated at a series of states.\n Has dimension *p* x *n*, where *p* is the number of states\n (indexed by row), and *n* is the number of features.\n :param action_mask: (optional) a *p* x *|A|* mask on the possible\n actions to consider, where *|A|* is the size of the action space.\n The mask is a binary 2-d array, where 1 indicates an active mask\n (action is unavailable) while 0 indicates a possible action.\n :param useSparse: Determines whether or not to use sparse matrix\n libraries provided with numpy.\n\n :return: An array of the best action associated with each state.\n\n \"\"\"\n p, n = all_phi_s.shape\n a_num = self.actions_num\n\n if action_mask is None:\n action_mask = np.ones((p, a_num))\n for i, s in enumerate(all_s):\n action_mask[i, self.domain.possibleActions(s)] = 0\n\n a_num = self.actions_num\n if useSparse:\n # all_phi_s_a will be ap-by-an\n all_phi_s_a = sp.kron(np.eye(a_num, a_num), all_phi_s)\n all_q_s_a = all_phi_s_a * self.weight_vec.reshape(-1, 1) # ap-by-1\n else:\n # all_phi_s_a will be ap-by-an\n all_phi_s_a = np.kron(np.eye(a_num, a_num), all_phi_s)\n all_q_s_a = np.dot(all_phi_s_a, self.weight_vec.T) # ap-by-1\n all_q_s_a = all_q_s_a.reshape((a_num, -1)).T # a-by-p\n all_q_s_a = np.ma.masked_array(all_q_s_a, mask=action_mask)\n best_action = np.argmax(all_q_s_a, axis=1)\n\n # Calculate the corresponding phi_s_a\n phi_s_a = self.batchPhi_s_a(\n all_phi_s,\n best_action,\n all_phi_s_a,\n useSparse)\n return best_action, phi_s_a, action_mask\n\n def featureType(self):\n \"\"\" *Abstract Method* \\n\n Return the data type for the underlying features (eg 'float').\n \"\"\"\n raise NotImplementedError\n\n def Q_oneStepLookAhead(self, s, a, ns_samples, policy=None):\n \"\"\"\n Returns the state action value, Q(s,a), by performing one step\n look-ahead on the domain.\n\n .. note::\n For an example of how this function works, see\n `Line 8 of Figure 4.3 <http://webdocs.cs.ualberta.ca/~sutton/book/ebook/node43.html>`_\n in Sutton and Barto 1998.\n\n If the domain does not define ``expectedStep()``, this function uses\n ``ns_samples`` samples to estimate the one_step look-ahead.\n If a policy is passed (used in the policy evaluation), it is used to\n generate the action for the next state.\n Otherwise the best action is selected.\n\n .. note::\n This function should not be called in any RL algorithms unless\n the underlying domain is an approximation of the true model.\n\n :param s: The given state\n :param a: The given action\n :param ns_samples: The number of samples used to estimate the one_step look-ahead.\n :param policy: (optional) Used to select the action in the next state\n (*after* taking action a) when estimating the one_step look-aghead.\n If ``policy == None``, the best action will be selected.\n\n :return: The one-step lookahead state-action value, Q(s,a).\n \"\"\"\n # Hash new state for the incremental tabular case\n self.continuous_state_starting_samples = 10\n if hasFunction(self, 'addState'):\n self.addState(s)\n\n discount_factor = self.domain.discount_factor\n if hasFunction(self.domain, 'expectedStep'):\n p, r, ns, t, p_actions = self.domain.expectedStep(s, a)\n Q = 0\n for j in range(len(p)):\n if policy is None:\n Q += p[j, 0] * (r[j, 0] + discount_factor * self.V(ns[j,:], t[j,:], p_actions[j]))\n else:\n # For some domains such as blocks world, you may want to apply bellman backup to impossible states which may not have any possible actions.\n # This if statement makes sure that there exist at least\n # one action in the next state so the bellman backup with\n # the fixed policy is valid\n if len(self.domain.possibleActions(ns[j,:])):\n na = policy.pi(ns[j,:], t[j,:], self.domain.possibleActions(ns[j,:]))\n Q += p[j, 0] * (r[j, 0] + discount_factor * self.Q(ns[j,:], t[j,:], na))\n else:\n # See if they are in cache:\n key = tuple(np.hstack((s, [a])))\n cacheHit = self.expectedStepCached.get(key)\n if cacheHit is None:\n # Not found in cache => Calculate and store in cache\n # If continuous domain, sample <continuous_state_starting_samples> points within each discritized grid and sample <ns_samples>/<continuous_state_starting_samples> for each starting state.\n # Otherwise take <ns_samples> for the state.\n\n # First put s in the middle of the grid:\n # shout(self,s)\n s = self.stateInTheMiddleOfGrid(s)\n # print \"After:\", shout(self,s)\n if len(self.domain.continuous_dims):\n next_states = np.empty(\n (ns_samples, self.domain.state_space_dims))\n rewards = np.empty(ns_samples)\n # next states per samples initial state\n ns_samples_ = old_div(ns_samples, \\\n self.continuous_state_starting_samples)\n for i in range(self.continuous_state_starting_samples):\n # sample a random state within the grid corresponding\n # to input s\n new_s = s.copy()\n for d in range(self.domain.state_space_dims):\n w = self.binWidth_per_dim[d]\n # Sample each dimension of the new_s within the\n # cell\n new_s[d] = (self.random_state.rand() - .5) * w + s[d]\n # If the dimension is discrete make make the\n # sampled value to be int\n if not d in self.domain.continuous_dims:\n new_s[d] = int(new_s[d])\n # print new_s\n ns, r = self.domain.sampleStep(new_s, a, ns_samples_)\n next_states[i * ns_samples_:(i + 1) * ns_samples_,:] = ns\n rewards[i * ns_samples_:(i + 1) * ns_samples_] = r\n else:\n next_states, rewards = self.domain.sampleStep(\n s, a, ns_samples)\n self.expectedStepCached[key] = [next_states, rewards]\n else:\n # print \"USED CACHED\"\n next_states, rewards = cacheHit\n if policy is None:\n Q = np.mean([rewards[i] + discount_factor * self.V(next_states[i,:]) for i in range(ns_samples)])\n else:\n Q = np.mean([rewards[i] + discount_factor * self.Q(next_states[i,:], policy.pi(next_states[i,:])) for i in range(ns_samples)])\n return Q\n\n def Qs_oneStepLookAhead(self, s, ns_samples, policy=None):\n \"\"\"\n Returns an array of actions and their associated values Q(s,a),\n by performing one step look-ahead on the domain for each of them.\n\n .. note::\n For an example of how this function works, see\n `Line 8 of Figure 4.3 <http://webdocs.cs.ualberta.ca/~sutton/book/ebook/node43.html>`_\n in Sutton and Barto 1998.\n\n If the domain does not define ``expectedStep()``, this function uses\n ``ns_samples`` samples to estimate the one_step look-ahead.\n If a policy is passed (used in the policy evaluation), it is used to\n generate the action for the next state.\n Otherwise the best action is selected.\n\n .. note::\n This function should not be called in any RL algorithms unless\n the underlying domain is an approximation of the true model.\n\n :param s: The given state\n :param ns_samples: The number of samples used to estimate the one_step look-ahead.\n :param policy: (optional) Used to select the action in the next state\n (*after* taking action a) when estimating the one_step look-aghead.\n If ``policy == None``, the best action will be selected.\n\n :return: an array of length `|A|` containing the *Q(s,a)* for each\n possible *a*, where `|A|` is the number of possible actions from state *s*\n \"\"\"\n actions = self.domain.possibleActions(s)\n Qs = np.array([self.Q_oneStepLookAhead(s, a, ns_samples, policy)\n for a in actions])\n return Qs, actions\n\n def V_oneStepLookAhead(self, s, ns_samples):\n \"\"\"\n Returns the value of being in state *s*, V(s),\n by performing one step look-ahead on the domain.\n\n .. note::\n For an example of how this function works, see\n `Line 6 of Figure 4.5 <http://webdocs.cs.ualberta.ca/~sutton/book/ebook/node43.html>`_\n in Sutton and Barto 1998.\n\n If the domain does not define ``expectedStep()``, this function uses\n ``ns_samples`` samples to estimate the one_step look-ahead.\n\n .. note::\n This function should not be called in any RL algorithms unless\n the underlying domain is an approximation of the true model.\n\n :param s: The given state\n :param ns_samples: The number of samples used to estimate the one_step look-ahead.\n\n :return: The value of being in state *s*, *V(s)*.\n \"\"\"\n # The estimated value = max_a Q(s,a) together with the corresponding\n # action that maximizes the Q function\n Qs, actions = self.Qs_oneStepLookAhead(s, ns_samples)\n a_ind = np.argmax(Qs)\n return Qs[a_ind], actions[a_ind]\n\n def stateID2state(self, s_id):\n \"\"\"\n Returns the state vector correponding to a state_id.\n If dimensions are continuous it returns the state representing the\n middle of the bin (each dimension is discretized according to\n ``representation.discretization``.\n\n :param s_id: The id of the state, often calculated using the\n ``state2bin`` function\n\n :return: The state *s* corresponding to the integer *s_id*.\n \"\"\"\n\n # Find the bin number on each dimension\n s = np.array(id2vec(s_id, self.bins_per_dim))\n\n # Find the value corresponding to each bin number\n for d in range(self.domain.state_space_dims):\n s[d] = bin2state(s[d], self.bins_per_dim[d], self.domain.statespace_limits[d,:])\n\n if len(self.domain.continuous_dims) == 0:\n s = s.astype(int)\n return s\n\n def stateInTheMiddleOfGrid(self, s):\n \"\"\"\n Accepts a continuous state *s*, bins it into the discretized domain,\n and returns the state of the nearest gridpoint.\n Essentially, we snap *s* to the nearest gridpoint and return that\n gridpoint state.\n For continuous MDPs this plays a major rule in improving the speed\n through caching of next samples.\n\n :param s: The given state\n\n :return: The nearest state *s* which is captured by the discretization.\n \"\"\"\n s_normalized = s.copy()\n for d in range(self.domain.state_space_dims):\n s_normalized[d] = closestDiscretization(s[d], self.bins_per_dim[d], self.domain.statespace_limits[d,:])\n return s_normalized\n\n\n def episodeTerminated(self):\n pass\n \n def featureLearningRate(self):\n \"\"\"\n :return: An array or scalar used to adapt the learning rate of each\n feature individually.\n \"\"\"\n return 1.0\n\n def __deepcopy__(self, memo):\n cls = self.__class__\n result = cls.__new__(cls)\n memo[id(self)] = result\n for k, v in list(self.__dict__.items()):\n if k is \"logger\":\n continue\n setattr(result, k, deepcopy(v, memo))\n return result\n", "\"\"\"HIV Treatment domain\"\"\"\nfrom __future__ import print_function\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import unicode_literals\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom builtins import range\nfrom .Domain import Domain\nimport numpy as np\nfrom scipy.integrate import odeint\nfrom rlpy.Tools import plt\n\n__copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\"\n__credits__ = [\"Alborz Geramifard\", \"Robert H. Klein\", \"Christoph Dann\",\n \"William Dabney\", \"Jonathan P. How\"]\n__license__ = \"BSD 3-Clause\"\n__author__ = \"Christoph Dann\"\n\n\nclass HIVTreatment(Domain):\n\n \"\"\"\n Simulation of HIV Treatment. The aim is to find an optimal drug schedule.\n\n **STATE:** The state contains concentrations of 6 different cells:\n\n * T1: non-infected CD4+ T-lymphocytes [cells / ml]\n * T1*: infected CD4+ T-lymphocytes [cells / ml]\n * T2: non-infected macrophages [cells / ml]\n * T2*: infected macrophages [cells / ml]\n * V: number of free HI viruses [copies / ml]\n * E: number of cytotoxic T-lymphocytes [cells / ml]\n\n **ACTIONS:** The therapy consists of 2 drugs\n (reverse transcriptase inhibitor [RTI] and protease inhibitor [PI]) which\n are activated or not. The action space contains therefore of 4 actions:\n\n * *0*: none active\n * *1*: RTI active\n * *2*: PI active\n * *3*: RTI and PI active\n\n **REFERENCE:**\n\n .. seealso::\n Ernst, D., Stan, G., Gonc, J. & Wehenkel, L.\n Clinical data based optimal STI strategies for HIV:\n A reinforcement learning approach\n In Proceedings of the 45th IEEE Conference on Decision and Control (2006).\n\n\n \"\"\"\n state_names = (\"T1\", \"T1*\", \"T2\", \"T2*\", \"V\", \"E\")\n discount_factor = 0.98\n continuous_dims = np.arange(6)\n actions = np.array([[0., 0.], [.7, 0.], [0., .3], [.7, .3]])\n actions_num = 4\n episodeCap = 200 #: total of 1000 days with a measurement every 5 days\n dt = 5 #: measurement every 5 days\n logspace = True #: whether observed states are in log10 space or not\n #: only update the graphs in showDomain every x steps\n show_domain_every = 20\n # store samples of current episode for drawing\n episode_data = np.zeros((7, episodeCap + 1))\n\n if logspace:\n statespace_limits = np.array([[-5, 8]] * 6)\n else:\n statespace_limits = np.array([[0., 1e8]] * 6)\n\n def step(self, a):\n self.t += 1\n # if self.logspace:\n # s = np.power(10, s)\n\n eps1, eps2 = self.actions[a]\n ns = odeint(dsdt, self.state, [0, self.dt],\n args=(eps1, eps2), mxstep=1000)[-1]\n T1, T2, T1s, T2s, V, E = ns\n # the reward function penalizes treatment because of side-effects\n reward = - 0.1 * V - 2e4 * eps1 ** 2 - 2e3 * eps2 ** 2 + 1e3 * E\n self.state = ns.copy()\n if self.logspace:\n ns = np.log10(ns)\n\n self.episode_data[:-1, self.t] = self.state\n self.episode_data[-1, self.t - 1] = a\n return reward, ns, False, self.possibleActions()\n\n def possibleActions(self):\n return np.arange(4)\n\n def s0(self):\n self.t = 0\n self.episode_data[:] = np.nan\n # non-healthy stable state of the system\n s = np.array([163573., 5., 11945., 46., 63919., 24.])\n self.state = s.copy()\n if self.logspace:\n return np.log10(s), self.isTerminal(), self.possibleActions()\n self.episode_data[:-1, 0] = s\n return s, self.isTerminal(), self.possibleActions()\n\n def showDomain(self, a=0, s=None):\n \"\"\"\n shows a live graph of each concentration\n \"\"\"\n # only update the graph every couple of steps, otherwise it is\n # extremely slow\n if self.t % self.show_domain_every != 0 and not self.t >= self.episodeCap:\n return\n\n n = self.state_space_dims + 1\n names = list(self.state_names) + [\"Action\"]\n colors = [\"b\", \"b\", \"b\", \"b\", \"r\", \"g\", \"k\"]\n handles = getattr(self, \"_state_graph_handles\", None)\n plt.figure(\"Domain\", figsize=(12, 10))\n if handles is None:\n handles = []\n f, axes = plt.subplots(\n n, sharex=True, num=\"Domain\", figsize=(12, 10))\n f.subplots_adjust(hspace=0.1)\n for i in range(n):\n ax = axes[i]\n d = np.arange(self.episodeCap + 1) * 5\n ax.set_ylabel(names[i])\n ax.locator_params(tight=True, nbins=4)\n handles.append(\n ax.plot(d,\n self.episode_data[i],\n color=colors[i])[0])\n self._state_graph_handles = handles\n ax.set_xlabel(\"Days\")\n for i in range(n):\n handles[i].set_ydata(self.episode_data[i])\n ax = handles[i].axes\n ax.relim()\n ax.autoscale_view()\n plt.figure(\"Domain\").canvas.draw()\n plt.figure(\"Domain\").canvas.flush_events()\n\n\ndef dsdt(s, t, eps1, eps2):\n \"\"\"\n system derivate per time. The unit of time are days.\n \"\"\"\n # model parameter constants\n lambda1 = 1e4\n lambda2 = 31.98\n d1 = 0.01\n d2 = 0.01\n f = .34\n k1 = 8e-7\n k2 = 1e-4\n delta = .7\n m1 = 1e-5\n m2 = 1e-5\n NT = 100.\n c = 13.\n rho1 = 1.\n rho2 = 1.\n lambdaE = 1\n bE = 0.3\n Kb = 100\n d_E = 0.25\n Kd = 500\n deltaE = 0.1\n\n # decompose state\n T1, T2, T1s, T2s, V, E = s\n\n # compute derivatives\n tmp1 = (1. - eps1) * k1 * V * T1\n tmp2 = (1. - f * eps1) * k2 * V * T2\n dT1 = lambda1 - d1 * T1 - tmp1\n dT2 = lambda2 - d2 * T2 - tmp2\n dT1s = tmp1 - delta * T1s - m1 * E * T1s\n dT2s = tmp2 - delta * T2s - m2 * E * T2s\n dV = (1. - eps2) * NT * delta * (T1s + T2s) - c * V \\\n - ((1. - eps1) * rho1 * k1 * T1 +\n (1. - f * eps1) * rho2 * k2 * T2) * V\n dE = lambdaE + bE * (T1s + T2s) / (T1s + T2s + Kb) * E \\\n - d_E * (T1s + T2s) / (T1s + T2s + Kd) * E - deltaE * E\n\n return np.array([dT1, dT2, dT1s, dT2s, dV, dE])\n\ntry:\n from .HIVTreatment_dynamics import dsdt\nexcept Exception as e:\n print(e)\n print(\"Cython extension for HIVTreatment dynamics not available, expect slow runtime\")\n", "\"\"\"Flipboard domain.\"\"\"\nfrom __future__ import unicode_literals\nfrom __future__ import print_function\nfrom __future__ import division\nfrom __future__ import absolute_import\nfrom builtins import super\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom rlpy.Tools import plt, id2vec\nfrom .Domain import Domain\nimport numpy as np\n\n__copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\"\n__credits__ = [\"Alborz Geramifard\", \"Robert H. Klein\", \"Christoph Dann\",\n \"William Dabney\", \"Jonathan P. How\"]\n__license__ = \"BSD 3-Clause\"\n__author__ = \"Alborz Geramifard\"\n\n\nclass FlipBoard(Domain):\n\n \"\"\"\n A domain based on the last puzzle of Doors and Rooms Game stage 5-3.\n\n The goal of the game is to get all elements of a 4x4 board\n to have value 1.\n\n The initial state is the following::\n\n 1 0 0 0\n 0 0 0 0\n 0 1 0 0\n 0 0 1 0\n\n **STATE:** a 4x4 array of binary values. \\n\n **ACTION:** Invert the value of a given [Row, Col] (from 0->1 or 1->0).\\n\n **TRANSITION:** Determinisically flip all elements of the board on the same\n row OR col of the action. \\n\n **REWARD:** -1 per step. 0 when the board is solved [all ones]\n **REFERENCE:**\n\n .. seealso::\n `gameday inc. Doors and Rooms game <http://bit.ly/SYqdZI>`_\n\n \"\"\"\n\n discount_factor = 1\n BOARD_SIZE = 4\n STEP_REWARD = -1\n episodeCap = 100 # Set by the domain = min(100,rows*cols)\n actions_num = BOARD_SIZE ** 2\n statespace_limits = np.tile([0, 1], (BOARD_SIZE ** 2, 1))\n\n # Visual Stuff\n domain_fig = None\n move_fig = None\n\n def __init__(self):\n super(FlipBoard, self).__init__()\n\n def showDomain(self, a=0):\n s = self.state\n # Draw the environment\n if self.domain_fig is None:\n self.move_fig = plt.subplot(111)\n s = s.reshape((self.BOARD_SIZE, self.BOARD_SIZE))\n self.domain_fig = plt.imshow(\n s,\n cmap='FlipBoard',\n interpolation='nearest',\n vmin=0,\n vmax=1)\n plt.xticks(np.arange(self.BOARD_SIZE), fontsize=FONTSIZE)\n plt.yticks(np.arange(self.BOARD_SIZE), fontsize=FONTSIZE)\n # pl.tight_layout()\n a_row, a_col = id2vec(a, [self.BOARD_SIZE, self.BOARD_SIZE])\n self.move_fig = self.move_fig.plot(\n a_col,\n a_row,\n 'kx',\n markersize=30.0)\n plt.show()\n a_row, a_col = id2vec(a, [self.BOARD_SIZE, self.BOARD_SIZE])\n self.move_fig.pop(0).remove()\n # print a_row,a_col\n # Instead of '>' you can use 'D', 'o'\n self.move_fig = plt.plot(a_col, a_row, 'kx', markersize=30.0)\n s = s.reshape((self.BOARD_SIZE, self.BOARD_SIZE))\n self.domain_fig.set_data(s)\n plt.draw()\n # raw_input()\n\n def step(self, a):\n ns = self.state.copy()\n ns = np.reshape(ns, (self.BOARD_SIZE, -1))\n a_row, a_col = id2vec(a, [self.BOARD_SIZE, self.BOARD_SIZE])\n # print a_row, a_col\n # print ns\n ns[a_row, :] = np.logical_not(ns[a_row,:])\n ns[:, a_col] = np.logical_not(ns[:, a_col])\n ns[a_row, a_col] = not ns[a_row, a_col]\n if self.isTerminal():\n terminal = True\n r = 0\n else:\n terminal = False\n r = self.STEP_REWARD\n # sleep(1)\n ns = ns.flatten()\n self.state = ns.copy()\n return r, ns, terminal, self.possibleActions()\n\n def s0(self):\n self.state = np.array([[1, 0, 0, 0],\n [0, 0, 0, 0],\n [0, 1, 0, 0],\n [0, 0, 1, 0]], dtype='bool').flatten()\n return self.state, self.isTerminal(), self.possibleActions()\n\n def isTerminal(self):\n return np.count_nonzero(self.state) == self.BOARD_SIZE ** 2\n" ]
[ [ "numpy.log" ], [ "numpy.arange", "numpy.zeros" ], [ "numpy.dot", "numpy.hstack", "numpy.multiply", "numpy.arange", "numpy.eye", "numpy.cumsum", "numpy.ones", "numpy.atleast_1d", "numpy.all", "numpy.argmax", "numpy.ma.masked_array", "numpy.array", "numpy.random.RandomState", "numpy.zeros", "numpy.where", "numpy.empty", "scipy.sparse.lil_matrix" ], [ "numpy.arange", "scipy.integrate.odeint", "numpy.log10", "numpy.array", "numpy.zeros" ], [ "numpy.logical_not", "numpy.reshape", "numpy.arange", "numpy.tile", "numpy.count_nonzero", "numpy.array" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [ "1.7", "1.0", "0.10", "1.2", "0.14", "0.19", "1.5", "0.12", "0.17", "0.13", "1.6", "1.4", "1.9", "1.3", "1.10", "0.15", "0.18", "0.16", "1.8" ], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "0.16", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "1.3", "1.8" ], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
rsampaths16/ReRes
[ "51089c806c57087eb94d9a659036ebed88e96f13" ]
[ "processing/gray-scale-processing.py" ]
[ "import numpy\nimport scipy\nimport glob\nfrom matplotlib import pyplot\nfrom scipy import misc\nfrom numpy import random\n\nrandom.seed(0)\nSIZE = 128\nORIGINAL = '../data/offline-data/black-and-white-images/original'\nHIGH = '../data/offline-data/black-and-white-images/train/high'\nLOW = '../data/offline-data/black-and-white-images/train/low'\n\ndef sample_patch(image):\n x = random.randint(0, image.shape[0] - SIZE, dtype=numpy.int)\n y = random.randint(0, image.shape[1] - SIZE, dtype=numpy.int)\n high = numpy.copy(image[x:x+SIZE, y:y+SIZE])\n low = numpy.copy(high)\n low = misc.imresize(low, (SIZE // 4, SIZE // 4))\n low = misc.imresize(low, (SIZE, SIZE))\n return low, high\n\nunique_id = 1\nfor image_path in glob.glob(ORIGINAL + '/*.jpg'):\n print(image_path)\n sample = 1\n image = misc.imread(image_path)\n while sample > 0:\n low, high = sample_patch(image)\n misc.imsave(HIGH + '/' + str(unique_id) + '.jpg', high)\n misc.imsave(LOW + '/' + str(unique_id) + '.jpg', low)\n sample -= 1\n unique_id += 1\n" ]
[ [ "scipy.misc.imresize", "numpy.random.seed", "numpy.copy", "scipy.misc.imread", "numpy.random.randint" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [ "0.13", "0.14", "0.15", "0.10", "0.16", "0.19", "0.18", "0.12", "1.0", "0.17", "1.2" ], "tensorflow": [] } ]
franyancr/lenstronomy
[ "3a7b33512a474bf1796d23276d9028b580580cf1", "3a7b33512a474bf1796d23276d9028b580580cf1", "3a7b33512a474bf1796d23276d9028b580580cf1", "3a7b33512a474bf1796d23276d9028b580580cf1", "3a7b33512a474bf1796d23276d9028b580580cf1", "3a7b33512a474bf1796d23276d9028b580580cf1", "3a7b33512a474bf1796d23276d9028b580580cf1", "3a7b33512a474bf1796d23276d9028b580580cf1" ]
[ "lenstronomy/PointSource/point_source_types.py", "lenstronomy/LensModel/Profiles/curved_arc.py", "test/test_LensModel/test_Solver/test_solver4.py", "lenstronomy/Util/util.py", "test/test_ImSim/test_Numerics/test_grid.py", "lenstronomy/GalKin/galkin.py", "lenstronomy/LightModel/Profiles/nie.py", "test/test_Util/test_mask.py" ]
[ "import numpy as np\nfrom lenstronomy.LensModel.Solver.lens_equation_solver import LensEquationSolver\n\n\nclass Unlensed(object):\n \"\"\"\n class of a single point source in the image plane, aka star\n parameters: ra_image, dec_image, point_amp\n\n \"\"\"\n def __init__(self):\n pass\n\n def image_position(self, kwargs_ps, kwargs_lens=None, **kwargs): # kwargs_lens=None, min_distance=0.01, search_window=5, precision_limit=10**(-10), num_iter_max=100, x_center=0, y_center=0, magnification_limit=None):\n \"\"\"\n\n :param ra_image:\n :param dec_image:\n :param point_amp:\n :return:\n \"\"\"\n ra_image = kwargs_ps['ra_image']\n dec_image = kwargs_ps['dec_image']\n return np.array(ra_image), np.array(dec_image)\n\n def source_position(self, kwargs_ps, kwargs_lens=None):\n ra_image = kwargs_ps['ra_image']\n dec_image = kwargs_ps['dec_image']\n return np.array(ra_image), np.array(dec_image)\n\n def image_amplitude(self, kwargs_ps, kwargs_lens=None, **kwargs): # , x_pos=None, y_pos=None, min_distance=0.01, search_window=5, precision_limit=10**(-10), num_iter_max=100, x_center=0, y_center=0, magnification_limit=None):\n point_amp = kwargs_ps['point_amp']\n return np.array(point_amp)\n\n def source_amplitude(self, kwargs_ps, kwargs_lens=None):\n point_amp = kwargs_ps['point_amp']\n return np.array(point_amp)\n\n def update_lens_model(self, lens_model_class):\n pass\n\n\nclass LensedPositions(object):\n \"\"\"\n class of a single point source in the image plane, aka star\n parameters: ra_image, dec_image, point_amp\n\n \"\"\"\n def __init__(self, lensModel, fixed_magnification=False, additional_image=False):\n self._lensModel = lensModel\n self._solver = LensEquationSolver(lensModel)\n self._fixed_magnification = fixed_magnification\n self._additional_image = additional_image\n if fixed_magnification is True and additional_image is True:\n Warning('The combination of fixed_magnification=True and additional_image=True is not optimal for the current computation.'\n 'If you see this warning, please approach the developers.')\n\n def image_position(self, kwargs_ps, kwargs_lens, min_distance=0.01, search_window=5, precision_limit=10**(-10),\n num_iter_max=100, x_center=0, y_center=0, magnification_limit=None):\n \"\"\"\n\n :param ra_image:\n :param dec_image:\n :param point_amp:\n :return:\n \"\"\"\n if self._additional_image is True:\n ra_source, dec_source = self.source_position(kwargs_ps, kwargs_lens)\n ra_image, dec_image = self._solver.image_position_from_source(ra_source, dec_source, kwargs_lens,\n min_distance=min_distance,\n search_window=search_window,\n precision_limit=precision_limit,\n num_iter_max=num_iter_max, x_center=x_center,\n y_center=y_center, magnification_limit=magnification_limit)\n else:\n ra_image = kwargs_ps['ra_image']\n dec_image = kwargs_ps['dec_image']\n return np.array(ra_image), np.array(dec_image)\n\n def source_position(self, kwargs_ps, kwargs_lens):\n ra_image = kwargs_ps['ra_image']\n dec_image = kwargs_ps['dec_image']\n x_source, y_source = self._lensModel.ray_shooting(ra_image, dec_image, kwargs_lens)\n x_source = np.mean(x_source)\n y_source = np.mean(y_source)\n return np.array(x_source), np.array(y_source)\n\n def image_amplitude(self, kwargs_ps, kwargs_lens=None, x_pos=None, y_pos=None, **kwargs): # min_distance=0.01, search_window=5, precision_limit=10**(-10),num_iter_max=100, x_center=0, y_center=0):\n if self._fixed_magnification:\n if x_pos is not None and y_pos is not None:\n ra_image, dec_image = x_pos, y_pos\n else:\n ra_image, dec_image = self.image_position(kwargs_ps, kwargs_lens)\n mag = self._lensModel.magnification(ra_image, dec_image, kwargs_lens)\n point_amp = kwargs_ps['source_amp'] * np.abs(mag)\n else:\n point_amp = kwargs_ps['point_amp']\n if x_pos is not None:\n point_amp = _expand_to_array(point_amp, len(x_pos))\n #if np.atleast_1d(point_amp):\n # pass\n return np.array(point_amp)\n\n def source_amplitude(self, kwargs_ps, kwargs_lens=None):\n if self._fixed_magnification:\n source_amp = kwargs_ps['source_amp']\n else:\n ra_image, dec_image = kwargs_ps['ra_image'], kwargs_ps['dec_image']\n mag = self._lensModel.magnification(ra_image, dec_image, kwargs_lens)\n point_amp = kwargs_ps['point_amp']\n source_amp = np.mean(np.array(point_amp) / np.array(np.abs(mag)))\n return np.array(source_amp)\n\n def update_lens_model(self, lens_model_class):\n self._lensModel = lens_model_class\n self._solver = LensEquationSolver(lens_model_class)\n\n\nclass SourcePositions(object):\n \"\"\"\n class of a single point source in the image plane, aka star\n parameters: ra_image, dec_image, point_amp\n\n \"\"\"\n def __init__(self, lensModel, fixed_magnification=True):\n self._lensModel = lensModel\n self._solver = LensEquationSolver(lensModel)\n self._fixed_magnification = fixed_magnification\n\n def image_position(self, kwargs_ps, kwargs_lens, min_distance=0.01, search_window=5, precision_limit=10**(-10),\n num_iter_max=100, x_center=0, y_center=0, magnification_limit=None):\n \"\"\"\n\n :param ra_image:\n :param dec_image:\n :param point_amp:\n :return:\n \"\"\"\n ra_source, dec_source = self.source_position(kwargs_ps, kwargs_lens)\n ra_image, dec_image = self._solver.image_position_from_source(ra_source, dec_source, kwargs_lens,\n min_distance=min_distance,\n search_window=search_window,\n precision_limit=precision_limit,\n num_iter_max=num_iter_max, x_center=x_center,\n y_center=y_center, magnification_limit=magnification_limit)\n return ra_image, dec_image\n\n def source_position(self, kwargs_ps, kwargs_lens=None):\n ra_source = kwargs_ps['ra_source']\n dec_source = kwargs_ps['dec_source']\n return np.array(ra_source), np.array(dec_source)\n\n def image_amplitude(self, kwargs_ps, kwargs_lens=None, x_pos=None, y_pos=None, min_distance=0.01, search_window=5,\n precision_limit=10**(-10), num_iter_max=100, x_center=0, y_center=0, magnification_limit=None):\n if self._fixed_magnification:\n if x_pos is not None and y_pos is not None:\n ra_image, dec_image = x_pos, y_pos\n else:\n ra_image, dec_image = self.image_position(kwargs_ps, kwargs_lens, min_distance=min_distance,\n search_window=search_window,\n precision_limit=precision_limit,\n num_iter_max=num_iter_max, x_center=x_center,\n y_center=y_center, magnification_limit=magnification_limit)\n mag = self._lensModel.magnification(ra_image, dec_image, kwargs_lens)\n point_amp = kwargs_ps['source_amp'] * np.abs(mag)\n else:\n point_amp = kwargs_ps['point_amp']\n if x_pos is not None:\n point_amp = _expand_to_array(point_amp, len(x_pos))\n return np.array(point_amp)\n\n def source_amplitude(self, kwargs_ps, kwargs_lens=None):\n if self._fixed_magnification:\n source_amp = kwargs_ps['source_amp']\n else:\n ra_image, dec_image = self.image_position(kwargs_ps, kwargs_lens)\n mag = self._lensModel.magnification(ra_image, dec_image, kwargs_lens)\n point_amp = kwargs_ps['point_amp']\n source_amp = np.mean(np.array(point_amp) / np.array(mag))\n return np.array(source_amp)\n\n def update_lens_model(self, lens_model_class):\n self._lensModel = lens_model_class\n self._solver = LensEquationSolver(lens_model_class)\n\n\nclass PointSourceCached(object):\n \"\"\"\n\n \"\"\"\n def __init__(self, point_source_model, save_cache=False):\n self._model = point_source_model\n self._save_cache = save_cache\n\n def delete_lens_model_cache(self):\n if hasattr(self, '_x_image'):\n del self._x_image\n if hasattr(self, '_y_image'):\n del self._y_image\n if hasattr(self, '_x_source'):\n del self._x_source\n if hasattr(self, '_y_source'):\n del self._y_source\n\n def set_save_cache(self, bool):\n self._save_cache = bool\n\n def update_lens_model(self, lens_model_class):\n self._model.update_lens_model(lens_model_class)\n\n def image_position(self, kwargs_ps, kwargs_lens=None, min_distance=0.05, search_window=10,\n precision_limit=10**(-10), num_iter_max=100, x_center=0, y_center=0, magnification_limit=None):\n \"\"\"\n\n :param ra_image:\n :param dec_image:\n :param point_amp:\n :return:\n \"\"\"\n\n if not self._save_cache or not hasattr(self, '_x_image') or not hasattr(self, '_y_image'):\n self._x_image, self._y_image = self._model.image_position(kwargs_ps, kwargs_lens, min_distance=min_distance,\n search_window=search_window,\n precision_limit=precision_limit,\n num_iter_max=num_iter_max, x_center=x_center,\n y_center=y_center, magnification_limit=magnification_limit)\n return self._x_image, self._y_image\n\n def source_position(self, kwargs_ps, kwargs_lens=None):\n if not self._save_cache or not hasattr(self, '_x_source') or not hasattr(self, '_y_source'):\n self._x_source, self._y_source = self._model.source_position(kwargs_ps, kwargs_lens)\n return self._x_source, self._y_source\n\n def image_amplitude(self, kwargs_ps, kwargs_lens=None, min_distance=0.01, search_window=5, precision_limit=10**(-10),\n num_iter_max=100, x_center=0, y_center=0, magnification_limit=None):\n x_pos, y_pos = self.image_position(kwargs_ps, kwargs_lens, min_distance=min_distance,\n search_window=search_window,\n precision_limit=precision_limit,\n num_iter_max=num_iter_max, x_center=x_center,\n y_center=y_center, magnification_limit=magnification_limit)\n return self._model.image_amplitude(kwargs_ps, kwargs_lens, x_pos=x_pos, y_pos=y_pos)\n\n def source_amplitude(self, kwargs_ps, kwargs_lens=None):\n return self._model.source_amplitude(kwargs_ps, kwargs_lens)\n\n\ndef _expand_to_array(array, num):\n \"\"\"\n\n :param array: float/int or numpy array\n :param num: number of array entries expected in array\n :return: array of size num\n \"\"\"\n if np.isscalar(array):\n return np.ones(num) * array\n elif len(array) < num:\n out = np.zeros(num)\n out[0:len(array)] = array\n return out\n else:\n return array", "import numpy as np\nfrom lenstronomy.LensModel.Profiles.spp import SPP\nfrom lenstronomy.LensModel.Profiles.base_profile import LensProfileBase\n\n\nclass CurvedArc(LensProfileBase):\n \"\"\"\n lens model that describes a section of a highly magnified deflector region.\n The parameterization is chosen to describe local observables efficient.\n\n Observables are:\n - curvature radius (basically bending relative to the center of the profile)\n - radial stretch (plus sign) thickness of arc with parity (more generalized than the power-law slope)\n - tangential stretch (plus sign). Infinity means at critical curve\n - direction of curvature\n - position of arc\n\n Requirements:\n - Should work with other perturbative models without breaking its meaning (say when adding additional shear terms)\n - Must best reflect the observables in lensing\n - minimal covariances between the parameters, intuitive parameterization.\n\n \"\"\"\n param_names = ['tangential_stretch', 'radial_stretch', 'r_curvature', 'direction', 'center_x', 'center_y']\n lower_limit_default = {'tangential_stretch': -100, 'radial_stretch': -5, 'r_curvature': 0.001, 'direction': -np.pi, 'center_x': -100, 'center_y': -100}\n upper_limit_default = {'tangential_stretch': 100, 'radial_stretch': 5, 'r_curvature': 100, 'direction': np.pi, 'center_x': 100, 'center_y': 100}\n\n def __init__(self):\n self._spp = SPP()\n super(CurvedArc, self).__init__()\n\n @staticmethod\n def stretch2spp(tangential_stretch, radial_stretch, r_curvature, direction, center_x, center_y):\n \"\"\"\n\n :param tangential_stretch: float, stretch of intrinsic source in tangential direction\n :param radial_stretch: float, stretch of intrinsic source in radial direction\n :param r_curvature: curvature radius\n :param direction: float, angle in radian\n :param center_x: center of source in image plane\n :param center_y: center of source in image plane\n :return: parameters in terms of a spherical power-law profile resulting in the same observables\n \"\"\"\n center_x_spp = center_x - r_curvature * np.cos(direction)\n center_y_spp = center_y - r_curvature * np.sin(direction)\n\n gamma = (1./radial_stretch - 1) / (1 - 1./tangential_stretch) + 2\n theta_E = abs(1 - 1./tangential_stretch)**(1./(gamma - 1)) * r_curvature\n return theta_E, gamma, center_x_spp, center_y_spp\n\n @staticmethod\n def spp2stretch(theta_E, gamma, center_x_spp, center_y_spp, center_x, center_y):\n \"\"\"\n turn Singular power-law lens model into stretch parameterization at position (center_x, center_y)\n This is the inverse function of stretch2spp()\n\n :param theta_E:\n :param gamma:\n :param center_x_spp:\n :param center_y_spp:\n :param center_x:\n :param center_y:\n :return:\n \"\"\"\n r_curvature = np.sqrt((center_x_spp - center_x)**2 + (center_y_spp - center_y)**2)\n direction = np.arctan2(center_y - center_y_spp, center_x - center_x_spp)\n tangential_stretch = 1 / (1 - (theta_E/r_curvature) ** (gamma - 1))\n radial_stretch = 1 / (1 + (gamma - 2) * (theta_E/r_curvature) ** (gamma - 1))\n return tangential_stretch, radial_stretch, r_curvature, direction\n\n def function(self, x, y, tangential_stretch, radial_stretch, r_curvature, direction, center_x, center_y):\n \"\"\"\n ATTENTION: there may not be a global lensing potential!\n\n :param x:\n :param y:\n :param tangential_stretch:\n :param radial_stretch:\n :param r_curvature:\n :param direction:\n :param center_x:\n :param center_y:\n :return:\n \"\"\"\n theta_E, gamma, center_x_spp, center_y_spp = self.stretch2spp(tangential_stretch, radial_stretch, r_curvature, direction, center_x, center_y)\n return self._spp.function(x, y, theta_E, gamma, center_x_spp, center_y_spp) - self._spp.function(center_x, center_y, theta_E, gamma, center_x_spp, center_y_spp)\n\n def derivatives(self, x, y, tangential_stretch, radial_stretch, r_curvature, direction, center_x, center_y):\n \"\"\"\n\n :param x:\n :param y:\n :param tangential_stretch:\n :param radial_stretch:\n :param r_curvature:\n :param direction:\n :param center_x:\n :param center_y:\n :return:\n \"\"\"\n theta_E, gamma, center_x_spp, center_y_spp = self.stretch2spp(tangential_stretch,\n radial_stretch, r_curvature,\n direction, center_x, center_y)\n f_x, f_y = self._spp.derivatives(x, y, theta_E, gamma, center_x_spp, center_y_spp)\n f_x0, f_y0 = self._spp.derivatives(center_x, center_y, theta_E, gamma, center_x_spp, center_y_spp)\n return f_x - f_x0, f_y - f_y0\n\n def hessian(self, x, y, tangential_stretch, radial_stretch, r_curvature, direction, center_x, center_y):\n \"\"\"\n\n :param x:\n :param y:\n :param tangential_stretch:\n :param radial_stretch:\n :param r_curvature:\n :param direction:\n :param center_x:\n :param center_y:\n :return:\n \"\"\"\n theta_E, gamma, center_x_spp, center_y_spp = self.stretch2spp(tangential_stretch,\n radial_stretch, r_curvature,\n direction, center_x, center_y)\n return self._spp.hessian(x, y, theta_E, gamma, center_x_spp, center_y_spp)\n", "__author__ = 'sibirrer'\n\nimport numpy as np\nimport numpy.testing as npt\nimport pytest\nfrom lenstronomy.LensModel.Solver.lens_equation_solver import LensEquationSolver\nfrom lenstronomy.LensModel.Solver.solver4point import Solver4Point\nfrom lenstronomy.LensModel.lens_model import LensModel\nimport lenstronomy.Util.param_util as param_util\n\n\nclass TestSolver4Point(object):\n\n def setup(self):\n \"\"\"\n\n :return:\n \"\"\"\n pass\n\n def test_decoupling(self):\n lens_model_list = ['SPEP', 'SIS']\n lensModel = LensModel(lens_model_list)\n solver = Solver4Point(lensModel)\n solver_decoupled = Solver4Point(lensModel)\n lensEquationSolver = LensEquationSolver(lensModel)\n sourcePos_x = 0.1\n sourcePos_y = -0.1\n deltapix = 0.05\n numPix = 150\n gamma = 1.9\n phi_G, q = 0.5, 0.8\n e1, e2 = param_util.phi_q2_ellipticity(phi_G, q)\n kwargs_lens = [{'theta_E': 1., 'gamma': gamma, 'e1': e1, 'e2': e2, 'center_x': 0.1, 'center_y': -0.1},\n {'theta_E': 0.1, 'center_x': 0.5, 'center_y': 0}]\n x_pos, y_pos = lensEquationSolver.findBrightImage(sourcePos_x, sourcePos_y, kwargs_lens, numImages=4, min_distance=deltapix, search_window=numPix*deltapix)\n phi_G, q = 1.5, 0.9\n e1, e2 = param_util.phi_q2_ellipticity(phi_G, q)\n kwargs_lens_init = [{'theta_E': 1.3, 'gamma': gamma, 'e1': e1, 'e2': e2, 'center_x': 0., 'center_y': 0}, {'theta_E': 0.1, 'center_x': 0.5, 'center_y': 0}]\n kwargs_lens_new, accuracy = solver.constraint_lensmodel(x_pos, y_pos, kwargs_lens_init)\n kwargs_lens_new_2, accuracy = solver_decoupled.constraint_lensmodel(x_pos, y_pos, kwargs_lens_init)\n print(kwargs_lens_new_2)\n print(kwargs_lens_new)\n npt.assert_almost_equal(kwargs_lens_new[0]['theta_E'], kwargs_lens[0]['theta_E'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['e1'], kwargs_lens[0]['e1'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['e2'], kwargs_lens[0]['e2'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['center_x'], kwargs_lens[0]['center_x'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['center_y'], kwargs_lens[0]['center_y'], decimal=3)\n\n npt.assert_almost_equal(kwargs_lens_new[0]['theta_E'], kwargs_lens_new_2[0]['theta_E'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['e1'], kwargs_lens_new_2[0]['e1'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['e2'], kwargs_lens_new_2[0]['e2'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['center_x'], kwargs_lens_new_2[0]['center_x'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['center_y'], kwargs_lens_new_2[0]['center_y'], decimal=3)\n\n npt.assert_almost_equal(kwargs_lens_new[0]['theta_E'], 1., decimal=3)\n lensModel = LensModel(lens_model_list=lens_model_list)\n x_source_new, y_source_new = lensModel.ray_shooting(x_pos, y_pos, kwargs_lens_new)\n dist = np.sqrt((x_source_new - x_source_new[0]) ** 2 + (y_source_new - y_source_new[0]) ** 2)\n print(dist)\n assert np.max(dist) < 0.000001\n\n def test_solver_spep(self):\n lens_model_list = ['SPEP']\n lensModel = LensModel(lens_model_list)\n solver = Solver4Point(lensModel)\n lensEquationSolver = LensEquationSolver(lensModel)\n\n sourcePos_x = 0.1\n sourcePos_y = -0.1\n deltapix = 0.05\n numPix = 150\n gamma = 1.9\n phi_G, q = 0.5, 0.8\n e1, e2 = param_util.phi_q2_ellipticity(phi_G, q)\n kwargs_lens = [{'theta_E': 1., 'gamma': gamma, 'e1': e1, 'e2': e2, 'center_x': 0.1, 'center_y': -0.1}]\n x_pos, y_pos = lensEquationSolver.findBrightImage(sourcePos_x, sourcePos_y, kwargs_lens, numImages=4, min_distance=deltapix, search_window=numPix*deltapix)\n phi_G, q = 1.5, 0.9\n e1, e2 = param_util.phi_q2_ellipticity(phi_G, q)\n kwargs_lens_init = [{'theta_E': 1.3, 'gamma': gamma, 'e1': e1, 'e2': e2, 'center_x': 0., 'center_y': 0}]\n kwargs_lens_new, accuracy = solver.constraint_lensmodel(x_pos, y_pos, kwargs_lens_init)\n npt.assert_almost_equal(kwargs_lens_new[0]['theta_E'], kwargs_lens[0]['theta_E'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['e1'], kwargs_lens[0]['e1'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['e2'], kwargs_lens[0]['e2'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['center_x'], kwargs_lens[0]['center_x'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['center_y'], kwargs_lens[0]['center_y'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['theta_E'], 1., decimal=3)\n lensModel = LensModel(lens_model_list=lens_model_list)\n x_source_new, y_source_new = lensModel.ray_shooting(x_pos, y_pos, kwargs_lens_new)\n dist = np.sqrt((x_source_new - x_source_new[0]) ** 2 + (y_source_new - y_source_new[0]) ** 2)\n print(dist)\n assert np.max(dist) < 0.000001\n\n def test_solver_nfw(self):\n lens_model_list = ['NFW_ELLIPSE', 'SIS']\n lensModel = LensModel(lens_model_list)\n solver = Solver4Point(lensModel)\n lensEquationSolver = LensEquationSolver(lensModel)\n sourcePos_x = 0.1\n sourcePos_y = -0.1\n deltapix = 0.05\n numPix = 150\n Rs = 4.\n phi_G, q = 0.5, 0.8\n e1, e2 = param_util.phi_q2_ellipticity(phi_G, q)\n kwargs_lens = [{'alpha_Rs': 1., 'Rs': Rs, 'e1': e1, 'e2': e2, 'center_x': 0.1, 'center_y': -0.1},\n {'theta_E': 1, 'center_x': 0, 'center_y': 0}]\n x_pos, y_pos = lensEquationSolver.findBrightImage(sourcePos_x, sourcePos_y, kwargs_lens, numImages=4, min_distance=deltapix, search_window=numPix*deltapix)\n phi_G, q = 1.5, 0.9\n e1, e2 = param_util.phi_q2_ellipticity(phi_G, q)\n kwargs_lens_init = [{'alpha_Rs': 0.5, 'Rs': Rs, 'e1': e1, 'e2': e2, 'center_x': 0., 'center_y': 0}, kwargs_lens[1]]\n kwargs_lens_new, accuracy = solver.constraint_lensmodel(x_pos, y_pos, kwargs_lens_init)\n npt.assert_almost_equal(kwargs_lens_new[0]['alpha_Rs'], kwargs_lens[0]['alpha_Rs'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['e1'], kwargs_lens[0]['e1'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['e2'], kwargs_lens[0]['e2'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['center_x'], kwargs_lens[0]['center_x'], decimal=3)\n npt.assert_almost_equal(kwargs_lens_new[0]['center_y'], kwargs_lens[0]['center_y'], decimal=3)\n\n def test_solver_shapelets(self):\n lens_model_list = ['SHAPELETS_CART', 'SPEP']\n lensModel = LensModel(lens_model_list)\n solver = Solver4Point(lensModel)\n lensEquationSolver = LensEquationSolver(lensModel)\n sourcePos_x = 0.1\n sourcePos_y = -0.\n deltapix = 0.05\n numPix = 150\n coeffs = np.array([0, 0.1, 0.1, 0, 0, -0.1])\n kwargs_lens = [{'beta': 1., 'coeffs': coeffs, 'center_x': 0., 'center_y': 0.},\n {'theta_E': 1., 'gamma': 2, 'e1': 0.1, 'e2': 0, 'center_x': 0, 'center_y': 0}]\n x_pos, y_pos = lensEquationSolver.findBrightImage(sourcePos_x, sourcePos_y, kwargs_lens, numImages=4, min_distance=deltapix, search_window=numPix*deltapix)\n print(x_pos, y_pos)\n kwargs_lens_init = [{'beta': 1, 'coeffs': np.zeros_like(coeffs), 'center_x': 0., 'center_y': 0}, kwargs_lens[1]]\n kwargs_lens_new, accuracy = solver.constraint_lensmodel(x_pos, y_pos, kwargs_lens_init)\n npt.assert_almost_equal(kwargs_lens_new[0]['beta'], kwargs_lens[0]['beta'], decimal=3)\n coeffs_new = kwargs_lens_new[0]['coeffs']\n for i in range(len(coeffs)):\n npt.assert_almost_equal(coeffs_new[i], coeffs[i], decimal=3)\n\n def test_solver_simplified(self):\n lens_model_list = ['SPEP', 'SHEAR_GAMMA_PSI']\n lensModel = LensModel(lens_model_list)\n\n lensEquationSolver = LensEquationSolver(lensModel)\n sourcePos_x = 0.1\n sourcePos_y = -0.1\n deltapix = 0.05\n numPix = 150\n gamma = 1.9\n gamma_ext = 0.05\n psi_ext = 0.4\n #e1, e2 = param_util.phi_gamma_ellipticity(phi=psi_ext, gamma=gamma_ext)\n kwargs_lens = [{'theta_E': 1., 'gamma': gamma, 'e1': 0.1, 'e2': -0.1, 'center_x': 0.1, 'center_y': -0.1},\n {'gamma_ext': gamma_ext, 'psi_ext': psi_ext}]\n x_pos, y_pos = lensEquationSolver.findBrightImage(sourcePos_x, sourcePos_y, kwargs_lens, numImages=4,\n min_distance=deltapix, search_window=numPix * deltapix)\n e1_new, e2_new = param_util.phi_gamma_ellipticity(phi=0., gamma=gamma_ext+0.1)\n kwargs_lens_init = [{'theta_E': 1.3, 'gamma': gamma, 'e1': 0., 'e2': 0., 'center_x': 0., 'center_y': 0},\n {'gamma_ext': gamma_ext + 0.1, 'psi_ext': 0}]\n solver = Solver4Point(lensModel, solver_type='PROFILE_SHEAR')\n kwargs_lens_new, accuracy = solver.constraint_lensmodel(x_pos, y_pos, kwargs_lens_init)\n assert accuracy < 10**(-10)\n x_source, y_source = lensModel.ray_shooting(x_pos, y_pos, kwargs_lens_new)\n x_source, y_source = np.mean(x_source), np.mean(y_source)\n x_pos_new, y_pos_new = lensEquationSolver.findBrightImage(x_source, y_source, kwargs_lens_new, numImages=4,\n min_distance=deltapix, search_window=numPix * deltapix)\n print(x_pos, x_pos_new)\n x_pos = np.sort(x_pos)\n x_pos_new = np.sort(x_pos_new)\n y_pos = np.sort(y_pos)\n y_pos_new = np.sort(y_pos_new)\n for i in range(len(x_pos)):\n npt.assert_almost_equal(x_pos[i], x_pos_new[i], decimal=6)\n npt.assert_almost_equal(y_pos[i], y_pos_new[i], decimal=6)\n\n def test_solver_simplified_2(self):\n lens_model_list = ['SPEP', 'SHEAR_GAMMA_PSI']\n lensModel = LensModel(lens_model_list)\n\n lensEquationSolver = LensEquationSolver(lensModel)\n sourcePos_x = 0.1\n sourcePos_y = -0.1\n deltapix = 0.05\n numPix = 150\n gamma = 1.96\n e1, e2 = -0.01, -0.01\n psi_ext, gamma_ext = param_util.ellipticity2phi_gamma(e1, e2)\n kwargs_shear = {'gamma_ext': gamma_ext, 'psi_ext': psi_ext} # gamma_ext: shear strength, psi_ext: shear angel (in radian)\n kwargs_spemd = {'theta_E': 1., 'gamma': gamma, 'center_x': 0, 'center_y': 0, 'e1': -0.2, 'e2': -0.03}\n kwargs_lens = [kwargs_spemd, kwargs_shear]\n x_pos, y_pos = lensEquationSolver.findBrightImage(sourcePos_x, sourcePos_y, kwargs_lens, numImages=4,\n min_distance=deltapix, search_window=numPix * deltapix)\n kwargs_lens_init = [{'theta_E': 1.3, 'gamma': gamma, 'e1': 0, 'e2': 0, 'center_x': 0., 'center_y': 0},\n {'gamma_ext': gamma_ext, 'psi_ext': psi_ext}]\n solver = Solver4Point(lensModel, solver_type='PROFILE_SHEAR')\n kwargs_lens_new, accuracy = solver.constraint_lensmodel(x_pos, y_pos, kwargs_lens_init)\n assert accuracy < 10**(-10)\n x_source, y_source = lensModel.ray_shooting(x_pos, y_pos, kwargs_lens_new)\n x_source, y_source = np.mean(x_source), np.mean(y_source)\n x_pos_new, y_pos_new = lensEquationSolver.findBrightImage(x_source, y_source, kwargs_lens_new, numImages=4,\n min_distance=deltapix, search_window=numPix * deltapix)\n print(x_pos, x_pos_new)\n x_pos = np.sort(x_pos)\n x_pos_new = np.sort(x_pos_new)\n y_pos = np.sort(y_pos)\n y_pos_new = np.sort(y_pos_new)\n for i in range(len(x_pos)):\n npt.assert_almost_equal(x_pos[i], x_pos_new[i], decimal=6)\n npt.assert_almost_equal(y_pos[i], y_pos_new[i], decimal=6)\n npt.assert_almost_equal(kwargs_lens_new[1]['psi_ext'], kwargs_lens[1]['psi_ext'], decimal=8)\n npt.assert_almost_equal(kwargs_lens_new[1]['gamma_ext'], kwargs_lens[1]['gamma_ext'], decimal=8)\n\n def test_solver_profile_shear(self):\n lens_model_list = ['SPEP', 'SHEAR_GAMMA_PSI']\n lensModel = LensModel(lens_model_list)\n\n lensEquationSolver = LensEquationSolver(lensModel)\n sourcePos_x = 0.\n sourcePos_y = 0.1\n deltapix = 0.05\n numPix = 150\n gamma = 1.98\n e1, e2 = -0.04, -0.01\n gamma_ext = np.sqrt(e1**2 + e2**2)\n kwargs_shear = {'gamma_ext': gamma_ext, 'psi_ext': 0.3} # shear values to the source plane\n kwargs_spemd = {'theta_E': 1.66, 'gamma': gamma, 'center_x': 0.0, 'center_y': 0.0, 'e1': 0.1,\n 'e2': 0.05} # parameters of the deflector lens model\n\n kwargs_lens = [kwargs_spemd, kwargs_shear]\n x_pos, y_pos = lensEquationSolver.findBrightImage(sourcePos_x, sourcePos_y, kwargs_lens, numImages=4,\n min_distance=deltapix, search_window=numPix * deltapix)\n print(x_pos, y_pos, 'test positions')\n kwargs_lens_init = [{'theta_E': 1.3, 'gamma': gamma, 'e1': 0, 'e2': 0, 'center_x': 0., 'center_y': 0},\n {'gamma_ext': gamma_ext, 'psi_ext': -0.3}]\n solver = Solver4Point(lensModel, solver_type='PROFILE_SHEAR')\n kwargs_lens_new, accuracy = solver.constraint_lensmodel(x_pos, y_pos, kwargs_lens_init)\n assert accuracy < 10**(-10)\n x_source, y_source = lensModel.ray_shooting(x_pos, y_pos, kwargs_lens_new)\n x_source, y_source = np.mean(x_source), np.mean(y_source)\n x_pos_new, y_pos_new = lensEquationSolver.findBrightImage(x_source, y_source, kwargs_lens_new, numImages=4,\n min_distance=deltapix, search_window=numPix * deltapix)\n print(x_pos, x_pos_new)\n x_pos = np.sort(x_pos)\n x_pos_new = np.sort(x_pos_new)\n y_pos = np.sort(y_pos)\n y_pos_new = np.sort(y_pos_new)\n for i in range(len(x_pos)):\n npt.assert_almost_equal(x_pos[i], x_pos_new[i], decimal=6)\n npt.assert_almost_equal(y_pos[i], y_pos_new[i], decimal=6)\n npt.assert_almost_equal(kwargs_lens_new[1]['gamma_ext'], kwargs_lens[1]['gamma_ext'], decimal=8)\n npt.assert_almost_equal(kwargs_lens_new[1]['psi_ext'], kwargs_lens[1]['psi_ext'], decimal=8)\n npt.assert_almost_equal(kwargs_lens_new[0]['e1'], kwargs_lens[0]['e1'], decimal=8)\n npt.assert_almost_equal(kwargs_lens_new[0]['e2'], kwargs_lens[0]['e2'], decimal=8)\n\n def test_solver_profile_shear_2(self):\n lens_model_list = ['SPEP', 'SHEAR']\n lensModel = LensModel(lens_model_list)\n\n lensEquationSolver = LensEquationSolver(lensModel)\n sourcePos_x = 0.\n sourcePos_y = 0.1\n deltapix = 0.05\n numPix = 150\n gamma = 1.98\n e1, e2 = -0.04, -0.01\n\n kwargs_shear = {'e1': e1, 'e2': e2} # shear values to the source plane\n kwargs_spemd = {'theta_E': 1.66, 'gamma': gamma, 'center_x': 0.0, 'center_y': 0.0, 'e1': 0.1,\n 'e2': 0.05} # parameters of the deflector lens model\n\n kwargs_lens = [kwargs_spemd, kwargs_shear]\n x_pos, y_pos = lensEquationSolver.findBrightImage(sourcePos_x, sourcePos_y, kwargs_lens, numImages=4,\n min_distance=deltapix, search_window=numPix * deltapix)\n print(x_pos, y_pos, 'test positions')\n\n gamma_ext = np.sqrt(e1 ** 2 + e2 ** 2)\n e1_init, e2_init = param_util.phi_gamma_ellipticity(gamma=gamma_ext, phi=-1.3)\n\n kwargs_lens_init = [{'theta_E': 1.3, 'gamma': gamma, 'e1': 0, 'e2': 0, 'center_x': 0., 'center_y': 0},\n {'e1': e1_init, 'e2': e2_init}]\n solver = Solver4Point(lensModel, solver_type='PROFILE_SHEAR')\n kwargs_lens_new, accuracy = solver.constraint_lensmodel(x_pos, y_pos, kwargs_lens_init)\n assert accuracy < 10**(-10)\n x_source, y_source = lensModel.ray_shooting(x_pos, y_pos, kwargs_lens_new)\n x_source, y_source = np.mean(x_source), np.mean(y_source)\n x_pos_new, y_pos_new = lensEquationSolver.findBrightImage(x_source, y_source, kwargs_lens_new, numImages=4,\n min_distance=deltapix, search_window=numPix * deltapix)\n print(x_pos, x_pos_new)\n x_pos = np.sort(x_pos)\n x_pos_new = np.sort(x_pos_new)\n y_pos = np.sort(y_pos)\n y_pos_new = np.sort(y_pos_new)\n for i in range(len(x_pos)):\n npt.assert_almost_equal(x_pos[i], x_pos_new[i], decimal=6)\n npt.assert_almost_equal(y_pos[i], y_pos_new[i], decimal=6)\n npt.assert_almost_equal(kwargs_lens_new[1]['e1'], kwargs_lens[1]['e1'], decimal=8)\n npt.assert_almost_equal(kwargs_lens_new[1]['e2'], kwargs_lens[1]['e2'], decimal=8)\n npt.assert_almost_equal(kwargs_lens_new[0]['e1'], kwargs_lens[0]['e1'], decimal=8)\n npt.assert_almost_equal(kwargs_lens_new[0]['e2'], kwargs_lens[0]['e2'], decimal=8)\n\n def test_solver_multiplane(self):\n lens_model_list = ['SPEP', 'SHEAR', 'SIS']\n lensModel = LensModel(lens_model_list, z_source=1, lens_redshift_list=[0.5, 0.5, 0.3], multi_plane=True)\n\n lensEquationSolver = LensEquationSolver(lensModel)\n sourcePos_x = 0.01\n sourcePos_y = -0.01\n deltapix = 0.05\n numPix = 150\n gamma = 1.96\n e1, e2 = 0.01, 0.01\n kwargs_shear = {'e1': e1, 'e2': e2} # gamma_ext: shear strength, psi_ext: shear angel (in radian)\n kwargs_spemd = {'theta_E': 1., 'gamma': gamma, 'center_x': 0, 'center_y': 0, 'e1': 0.2, 'e2': 0.03}\n kwargs_sis = {'theta_E': .1, 'center_x': 1, 'center_y': 0}\n kwargs_lens = [kwargs_spemd, kwargs_shear, kwargs_sis]\n x_pos, y_pos = lensEquationSolver.findBrightImage(sourcePos_x, sourcePos_y, kwargs_lens, numImages=4,\n min_distance=deltapix, search_window=numPix * deltapix)\n print(x_pos, y_pos, 'test positions')\n kwargs_lens_init = [{'theta_E': 1.3, 'gamma': gamma, 'e1': 0.1, 'e2': 0, 'center_x': 0., 'center_y': 0},\n {'e1': e1, 'e2': e2}, {'theta_E': .1, 'center_x': 1, 'center_y': 0}]\n solver = Solver4Point(lensModel, solver_type='PROFILE')\n kwargs_lens_new, accuracy = solver.constraint_lensmodel(x_pos, y_pos, kwargs_lens_init)\n print(kwargs_lens_new, 'kwargs_lens_new')\n assert accuracy < 10**(-10)\n x_source, y_source = lensModel.ray_shooting(x_pos, y_pos, kwargs_lens_new)\n x_source, y_source = np.mean(x_source), np.mean(y_source)\n x_pos_new, y_pos_new = lensEquationSolver.findBrightImage(x_source, y_source, kwargs_lens_new, numImages=4,\n min_distance=deltapix, search_window=numPix * deltapix)\n print(x_pos, x_pos_new)\n x_pos = np.sort(x_pos)\n x_pos_new = np.sort(x_pos_new)\n y_pos = np.sort(y_pos)\n y_pos_new = np.sort(y_pos_new)\n for i in range(len(x_pos)):\n npt.assert_almost_equal(x_pos[i], x_pos_new[i], decimal=6)\n npt.assert_almost_equal(y_pos[i], y_pos_new[i], decimal=6)\n npt.assert_almost_equal(kwargs_lens_new[1]['e1'], kwargs_lens[1]['e1'], decimal=8)\n\n\nif __name__ == '__main__':\n pytest.main()\n", "__author__ = 'Simon Birrer'\n\n\"\"\"\nthis file contains standard routines\n\"\"\"\n\nimport numpy as np\nimport mpmath\nimport itertools\n\n\ndef merge_dicts(*dict_args):\n \"\"\"\n Given any number of dicts, shallow copy and merge into a new dict,\n precedence goes to key value pairs in latter dicts.\n \"\"\"\n result = {}\n for dictionary in dict_args:\n result.update(dictionary)\n return result\n\n\ndef approx_theta_E(ximg,yimg):\n\n dis = []\n xinds,yinds = [0,0,0,1,1,2],[1,2,3,2,3,3]\n\n for (i,j) in zip(xinds,yinds):\n\n dx,dy = ximg[i] - ximg[j], yimg[i] - yimg[j]\n dr = (dx**2+dy**2)**0.5\n dis.append(dr)\n dis = np.array(dis)\n\n greatest = np.argmax(dis)\n dr_greatest = dis[greatest]\n dis[greatest] = 0\n\n second_greatest = np.argmax(dis)\n dr_second = dis[second_greatest]\n\n return 0.5*(dr_greatest*dr_second)**0.5\n\n\ndef sort_image_index(ximg,yimg,xref,yref):\n\n \"\"\"\n\n :param ximg: x coordinates to sort\n :param yimg: y coordinates to sort\n :param xref: reference x coordinate\n :param yref: reference y coordinate\n :return: indexes such that ximg[indexes],yimg[indexes] matches xref,yref\n \"\"\"\n\n assert len(xref) == len(ximg)\n ximg,yimg = np.array(ximg),np.array(yimg)\n x_self = np.array(list(itertools.permutations(ximg)))\n y_self = np.array(list(itertools.permutations(yimg)))\n\n indexes = [0, 1, 2, 3]\n index_iterations = list(itertools.permutations(indexes))\n delta_r = []\n\n for i in range(0, int(len(x_self))):\n dr = 0\n for j in range(0, int(len(x_self[0]))):\n dr += (x_self[i][j] - xref[j]) ** 2 + (y_self[i][j] - yref[j]) ** 2\n\n delta_r.append(dr ** .5)\n\n min_indexes = np.array(index_iterations[np.argmin(delta_r)])\n\n return min_indexes\n\n\ndef rotate(xcoords, ycoords, angle):\n \"\"\"\n\n :param xcoords: x points\n :param ycoords: y points\n :param angle: angle in radians\n :return: x points and y points rotated ccw by angle theta\n \"\"\"\n return xcoords*np.cos(angle)+ycoords*np.sin(angle), -xcoords*np.sin(angle)+ycoords*np.cos(angle)\n\n\ndef map_coord2pix(ra, dec, x_0, y_0, M):\n \"\"\"\n this routines performs a linear transformation between two coordinate systems. Mainly used to transform angular\n into pixel coordinates in an image\n :param ra: ra coordinates\n :param dec: dec coordinates\n :param x_0: pixel value in x-axis of ra,dec = 0,0\n :param y_0: pixel value in y-axis of ra,dec = 0,0\n :param M: 2x2 matrix to transform angular to pixel coordinates\n :return: transformed coordinate systems of input ra and dec\n \"\"\"\n x, y = M.dot(np.array([ra, dec]))\n return x + x_0, y + y_0\n\n\ndef array2image(array, nx=0, ny=0):\n \"\"\"\n returns the information contained in a 1d array into an n*n 2d array (only works when lenght of array is n**2)\n\n :param array: image values\n :type array: array of size n**2\n :returns: 2d array\n :raises: AttributeError, KeyError\n \"\"\"\n if nx == 0 or ny == 0:\n n = int(np.sqrt(len(array)))\n if n**2 != len(array):\n raise ValueError(\"lenght of input array given as %s is not square of integer number!\" %(len(array)))\n nx, ny = n, n\n image = array.reshape(int(nx), int(ny))\n return image\n\n\ndef image2array(image):\n \"\"\"\n returns the information contained in a 2d array into an n*n 1d array\n\n :param array: image values\n :type array: array of size (n,n)\n :returns: 1d array\n :raises: AttributeError, KeyError\n \"\"\"\n nx, ny = image.shape # find the size of the array\n imgh = np.reshape(image, nx*ny) # change the shape to be 1d\n return imgh\n\n\ndef make_grid(numPix, deltapix, subgrid_res=1, left_lower=False):\n \"\"\"\n\n :param numPix: number of pixels per axis\n :param deltapix: pixel size\n :param subgrid_res: sub-pixel resolution (default=1)\n :return: x, y position information in two 1d arrays\n \"\"\"\n\n numPix_eff = numPix*subgrid_res\n deltapix_eff = deltapix/float(subgrid_res)\n a = np.arange(numPix_eff)\n matrix = np.dstack(np.meshgrid(a, a)).reshape(-1, 2)\n x_grid = matrix[:, 0] * deltapix_eff\n y_grid = matrix[:, 1] * deltapix_eff\n if left_lower is True:\n shift = -1. / 2 + 1. / (2 * subgrid_res)\n else:\n shift = np.sum(x_grid) / numPix_eff**2\n return x_grid - shift, y_grid - shift\n\n\ndef make_grid_transformed(numPix, Mpix2Angle):\n \"\"\"\n returns grid with linear transformation (deltaPix and rotation)\n :param numPix: number of Pixels\n :param Mpix2Angle: 2-by-2 matrix to mat a pixel to a coordinate\n :return: coordinate grid\n \"\"\"\n x_grid, y_grid = make_grid(numPix, deltapix=1)\n ra_grid, dec_grid = map_coord2pix(x_grid, y_grid, 0, 0, Mpix2Angle)\n return ra_grid, dec_grid\n\n\ndef make_grid_with_coordtransform(numPix, deltapix, subgrid_res=1, center_ra=0, center_dec=0, left_lower=False, inverse=True):\n \"\"\"\n same as make_grid routine, but returns the transformaton matrix and shift between coordinates and pixel\n\n :param numPix: number of pixels per axis\n :param deltapix: pixel scale per axis\n :param subgrid_res: supersampling resolution relative to the stated pixel size\n :param center_ra: center of the grid\n :param center_dec: center of the grid\n :param left_lower: sets the zero point at the lower left corner of the pixels\n :param inverse: bool, if true sets East as left, otherwise East is righrt\n :return:\n \"\"\"\n numPix_eff = numPix*subgrid_res\n deltapix_eff = deltapix/float(subgrid_res)\n a = np.arange(numPix_eff)\n matrix = np.dstack(np.meshgrid(a, a)).reshape(-1, 2)\n if inverse is True:\n delta_x = -deltapix_eff\n else:\n delta_x = deltapix_eff\n if left_lower is True:\n ra_grid = matrix[:, 0]*deltapix\n dec_grid = matrix[:, 1]*deltapix\n else:\n ra_grid = (matrix[:, 0] - (numPix_eff-1)/2.)*delta_x\n dec_grid = (matrix[:, 1] - (numPix_eff-1)/2.)*deltapix_eff\n shift = (subgrid_res-1)/(2.*subgrid_res)*deltapix\n ra_grid -= shift + center_ra\n dec_grid -= shift + center_dec\n ra_at_xy_0 = ra_grid[0]\n dec_at_xy_0 = dec_grid[0]\n\n Mpix2coord = np.array([[delta_x, 0], [0, deltapix_eff]])\n Mcoord2pix = np.linalg.inv(Mpix2coord)\n #TODO incorporate center shift in grid\n x_at_radec_0, y_at_radec_0 = map_coord2pix(-ra_at_xy_0, -dec_at_xy_0, x_0=0, y_0=0, M=Mcoord2pix)\n return ra_grid, dec_grid, ra_at_xy_0, dec_at_xy_0, x_at_radec_0, y_at_radec_0, Mpix2coord, Mcoord2pix\n\n\ndef grid_from_coordinate_transform(nx, ny, Mpix2coord, ra_at_xy_0, dec_at_xy_0):\n \"\"\"\n return a grid in x and y coordinates that satisfy the coordinate system\n\n\n :param nx: number of pixels in x-axis\n :param ny: number of pixels in y-axis\n :param Mpix2coord: transformation matrix (2x2) of pixels into coordinate displacements\n :param ra_at_xy_0: RA coordinate at (x,y) = (0,0)\n :param dec_at_xy_0: DEC coordinate at (x,y) = (0,0)\n :return: RA coordinate grid, DEC coordinate grid\n \"\"\"\n a = np.arange(nx)\n b = np.arange(ny)\n matrix = np.dstack(np.meshgrid(a, b)).reshape(-1, 2)\n x_grid = matrix[:, 0]\n y_grid = matrix[:, 1]\n ra_grid = x_grid * Mpix2coord[0, 0] + y_grid * Mpix2coord[0, 1] + ra_at_xy_0\n dec_grid = x_grid * Mpix2coord[1, 0] + y_grid * Mpix2coord[1, 1] + dec_at_xy_0\n return ra_grid, dec_grid\n\n\ndef get_axes(x, y):\n \"\"\"\n computes the axis x and y of a given 2d grid\n :param x:\n :param y:\n :return:\n \"\"\"\n n=int(np.sqrt(len(x)))\n if n**2 != len(x):\n raise ValueError(\"lenght of input array given as %s is not square of integer number!\" % (len(x)))\n x_image = x.reshape(n,n)\n y_image = y.reshape(n,n)\n x_axes = x_image[0,:]\n y_axes = y_image[:,0]\n return x_axes, y_axes\n\n\ndef averaging(grid, numGrid, numPix):\n \"\"\"\n resize 2d pixel grid with numGrid to numPix and averages over the pixels\n :param grid: higher resolution pixel grid\n :param numGrid: number of pixels per axis in the high resolution input image\n :param numPix: lower number of pixels per axis in the output image (numGrid/numPix is integer number)\n :return:\n \"\"\"\n\n Nbig = numGrid\n Nsmall = numPix\n small = grid.reshape([int(Nsmall), int(Nbig/Nsmall), int(Nsmall), int(Nbig/Nsmall)]).mean(3).mean(1)\n return small\n\n\ndef displaceAbs(x, y, sourcePos_x, sourcePos_y):\n \"\"\"\n calculates a grid of distances to the observer in angel\n\n :param mapped_cartcoord: mapped cartesian coordinates\n :type mapped_cartcoord: numpy array (n,2)\n :param sourcePos: source position\n :type sourcePos: numpy vector [x0,y0]\n :returns: array of displacement\n :raises: AttributeError, KeyError\n \"\"\"\n x_mapped = x - sourcePos_x\n y_mapped = y - sourcePos_y\n absmapped = np.sqrt(x_mapped**2+y_mapped**2)\n return absmapped\n\n\ndef get_distance(x_mins, y_mins, x_true, y_true):\n \"\"\"\n\n :param x_mins:\n :param y_mins:\n :param x_true:\n :param y_true:\n :return:\n \"\"\"\n if len(x_mins) != len(x_true):\n return 10**10\n dist = 0\n x_true_list = np.array(x_true)\n y_true_list = np.array(y_true)\n\n for i in range(0,len(x_mins)):\n dist_list = (x_mins[i] - x_true_list)**2 + (y_mins[i] - y_true_list)**2\n dist += min(dist_list)\n k = np.where(dist_list == min(dist_list))\n if type(k) != int:\n k = k[0]\n x_true_list = np.delete(x_true_list, k)\n y_true_list = np.delete(y_true_list, k)\n return dist\n\n\ndef compare_distance(x_mapped, y_mapped):\n \"\"\"\n\n :param x_mapped: array of x-positions of remapped catalogue image\n :param y_mapped: array of y-positions of remapped catalogue image\n :return: sum of distance square of positions\n \"\"\"\n X2 = 0\n for i in range(0, len(x_mapped)-1):\n for j in range(i+1, len(x_mapped)):\n dx = x_mapped[i]-x_mapped[j]\n dy = y_mapped[i]-y_mapped[j]\n X2 += dx**2+dy**2\n return X2\n\n\ndef min_square_dist(x_1, y_1, x_2, y_2):\n \"\"\"\n return minimum of quadratic distance of pairs (x1, y1) to pairs (x2, y2)\n :param x_1:\n :param y_1:\n :param x_2:\n :param y_2:\n :return:\n \"\"\"\n dist = np.zeros_like(x_1)\n for i in range(len(x_1)):\n dist[i] = np.min((x_1[i] - x_2)**2 + (y_1[i] - y_2)**2)\n return dist\n\n\ndef selectBest(array, criteria, numSelect, highest=True):\n \"\"\"\n\n :param array: numpy array to be selected from\n :param criteria: criteria of selection\n :param highest: bool, if false the lowest will be selected\n :param numSelect: number of elements to be selected\n :return:\n \"\"\"\n n = len(array)\n m = len(criteria)\n if n != m:\n raise ValueError('Elements in array (%s) not equal to elements in criteria (%s)' % (n, m))\n if n < numSelect:\n return array\n array_sorted = array[criteria.argsort()]\n if highest:\n result = array_sorted[n-numSelect:]\n else:\n result = array_sorted[0:numSelect]\n return result[::-1]\n\n\ndef select_best(array, criteria, num_select, highest=True):\n \"\"\"\n\n :param array: numpy array to be selected from\n :param criteria: criteria of selection\n :param highest: bool, if false the lowest will be selected\n :param num_select: number of elements to be selected\n :return:\n \"\"\"\n n = len(array)\n m = len(criteria)\n if n != m:\n raise ValueError('Elements in array (%s) not equal to elements in criteria (%s)' % (n, m))\n if n < num_select:\n return array\n array = np.array(array)\n if highest is True:\n indexes = criteria.argsort()[::-1][:num_select]\n else:\n indexes = criteria.argsort()[::-1][n-num_select:]\n return array[indexes]\n\n\ndef points_on_circle(radius, points):\n \"\"\"\n returns a set of uniform points around a circle\n :param radius: radius of the circle\n :param points: number of points on the circle\n :return:\n \"\"\"\n angle = np.linspace(0, 2*np.pi, points)\n x_coord = np.cos(angle)*radius\n y_coord = np.sin(angle)*radius\n return x_coord, y_coord\n\n\ndef neighborSelect(a, x, y):\n \"\"\"\n finds (local) minima in a 2d grid\n\n :param a: 1d array of displacements from the source positions\n :type a: numpy array with length numPix**2 in float\n :returns: array of indices of local minima, values of those minima\n :raises: AttributeError, KeyError\n \"\"\"\n dim = int(np.sqrt(len(a)))\n values = []\n x_mins = []\n y_mins = []\n for i in range(dim+1,len(a)-dim-1):\n if (a[i] < a[i-1]\n and a[i] < a[i+1]\n and a[i] < a[i-dim]\n and a[i] < a[i+dim]\n and a[i] < a[i-(dim-1)]\n and a[i] < a[i-(dim+1)]\n and a[i] < a[i+(dim-1)]\n and a[i] < a[i+(dim+1)]):\n if(a[i] < a[(i-2*dim-1)%dim**2]\n and a[i] < a[(i-2*dim+1)%dim**2]\n and a[i] < a[(i-dim-2)%dim**2]\n and a[i] < a[(i-dim+2)%dim**2]\n and a[i] < a[(i+dim-2)%dim**2]\n and a[i] < a[(i+dim+2)%dim**2]\n and a[i] < a[(i+2*dim-1)%dim**2]\n and a[i] < a[(i+2*dim+1)%dim**2]):\n if(a[i] < a[(i-3*dim-1)%dim**2]\n and a[i] < a[(i-3*dim+1)%dim**2]\n and a[i] < a[(i-dim-3)%dim**2]\n and a[i] < a[(i-dim+3)%dim**2]\n and a[i] < a[(i+dim-3)%dim**2]\n and a[i] < a[(i+dim+3)%dim**2]\n and a[i] < a[(i+3*dim-1)%dim**2]\n and a[i] < a[(i+3*dim+1)%dim**2]):\n x_mins.append(x[i])\n y_mins.append(y[i])\n values.append(a[i])\n return np.array(x_mins), np.array(y_mins), np.array(values)\n\n\ndef fwhm2sigma(fwhm):\n \"\"\"\n\n :param fwhm: full-widt-half-max value\n :return: gaussian sigma (sqrt(var))\n \"\"\"\n sigma = fwhm / (2 * np.sqrt(2 * np.log(2)))\n return sigma\n\n\ndef sigma2fwhm(sigma):\n \"\"\"\n\n :param sigma:\n :return:\n \"\"\"\n fwhm = sigma * (2 * np.sqrt(2 * np.log(2)))\n return fwhm\n\n\ndef hyper2F2_array(a, b, c, d, x):\n \"\"\"\n\n :param a:\n :param b:\n :param c:\n :param d:\n :param x:\n :return:\n \"\"\"\n if isinstance(x, int) or isinstance(x, float):\n out = mpmath.hyp2f2(a, b, c, d, x)\n else:\n n = len(x)\n out = np.zeros(n)\n for i in range(n):\n out[i] = mpmath.hyp2f2(a, b, c, d, x[i])\n return out\n\n\ndef make_subgrid(ra_coord, dec_coord, subgrid_res=2):\n \"\"\"\n return a grid with subgrid resolution\n :param ra_coord:\n :param dec_coord:\n :param subgrid_res:\n :return:\n \"\"\"\n ra_array = array2image(ra_coord)\n dec_array = array2image(dec_coord)\n n = len(ra_array)\n d_ra_x = ra_array[0][1] - ra_array[0][0]\n d_ra_y = ra_array[1][0] - ra_array[0][0]\n d_dec_x = dec_array[0][1] - dec_array[0][0]\n d_dec_y = dec_array[1][0] - dec_array[0][0]\n\n ra_array_new = np.zeros((n*subgrid_res, n*subgrid_res))\n dec_array_new = np.zeros((n*subgrid_res, n*subgrid_res))\n for i in range(0, subgrid_res):\n for j in range(0, subgrid_res):\n ra_array_new[i::subgrid_res, j::subgrid_res] = ra_array + d_ra_x * (-1/2. + 1/(2.*subgrid_res) + j/float(subgrid_res)) + d_ra_y * (-1/2. + 1/(2.*subgrid_res) + i/float(subgrid_res))\n dec_array_new[i::subgrid_res, j::subgrid_res] = dec_array + d_dec_x * (-1/2. + 1/(2.*subgrid_res) + j/float(subgrid_res)) + d_dec_y * (-1/2. + 1/(2.*subgrid_res) + i/float(subgrid_res))\n\n ra_coords_sub = image2array(ra_array_new)\n dec_coords_sub = image2array(dec_array_new)\n return ra_coords_sub, dec_coords_sub\n\n", "__author__ = 'sibirrer'\n\nimport numpy as np\nimport numpy.testing as npt\nfrom lenstronomy.Util import util\nfrom lenstronomy.ImSim.Numerics.grid import AdaptiveGrid\nfrom lenstronomy.LightModel.light_model import LightModel\n\nimport pytest\n\n\nclass TestAdaptiveGrid(object):\n\n def setup(self):\n deltaPix = 1.\n transform_pix2angle = np.array([[1, 0], [0, 1]]) * deltaPix\n ra_at_xy_0, dec_at_xy_0 = -5, -5\n nx, ny = 11, 11\n self._supersampling_factor = 4\n supersampling_indexes = np.zeros((nx, ny))\n supersampling_indexes = np.array(supersampling_indexes, dtype=bool)\n supersampling_indexes[5, 5] = True\n self._supersampling_indexes = supersampling_indexes\n self.nx, self.ny = nx, ny\n self._adaptive_grid = AdaptiveGrid(nx, ny, transform_pix2angle, ra_at_xy_0, dec_at_xy_0, supersampling_indexes, self._supersampling_factor)\n\n def test_coordinates_evaluate(self):\n x_grid, y_grid = self._adaptive_grid.coordinates_evaluate\n print(np.shape(x_grid), 'test shape')\n assert len(x_grid) == self._supersampling_factor**2 + self.nx * self.ny - 1\n\n def test_subpixel_coordinates(self):\n subpixel_x, subpixel_y = self._adaptive_grid._high_res_coordinates\n assert len(subpixel_x) == 4**2\n assert subpixel_x[0] == -0.375\n assert subpixel_y[0] == -0.375\n assert subpixel_y[3] == -0.375\n assert subpixel_x[3] == 0.375\n\n def test_average_subgrid(self):\n subpixel_x, subpixel_y = self._adaptive_grid._high_res_coordinates\n model = LightModel(light_model_list=['GAUSSIAN'])\n kwargs_light = [{'center_x': 0, 'center_y': 0, 'sigma_x': 1, 'sigma_y': 1, 'amp': 1}]\n subgrid_values = model.surface_brightness(subpixel_x, subpixel_y, kwargs_light)\n supersampled_values = self._adaptive_grid._average_subgrid(subgrid_values)\n assert len(supersampled_values) == 1\n\n def test_merge_low_high_res(self):\n subpixel_x, subpixel_y = self._adaptive_grid._high_res_coordinates\n x, y = self._adaptive_grid._x_low_res, self._adaptive_grid._x_low_res\n model = LightModel(light_model_list=['GAUSSIAN'])\n kwargs_light = [{'center_x': 0, 'center_y': 0, 'sigma_x': 1, 'sigma_y': 1, 'amp': 1}]\n subgrid_values = model.surface_brightness(subpixel_x, subpixel_y, kwargs_light)\n image1d = model.surface_brightness(x, y, kwargs_light)\n\n image_added = self._adaptive_grid._merge_low_high_res(image1d, subgrid_values)\n added_array = util.image2array(image_added)\n supersampled_values = self._adaptive_grid._average_subgrid(subgrid_values)\n assert added_array[util.image2array(self._supersampling_indexes)] == supersampled_values\n\n image_high_res = self._adaptive_grid._high_res_image(subgrid_values)\n assert len(image_high_res) == self.nx * self._supersampling_factor\n\n def test_flux_array2image_low_high(self):\n x, y = self._adaptive_grid.coordinates_evaluate\n model = LightModel(light_model_list=['GAUSSIAN'])\n kwargs_light = [{'center_x': 0, 'center_y': 0, 'sigma_x': 1, 'sigma_y': 1, 'amp': 1}]\n flux_values = model.surface_brightness(x, y, kwargs_light)\n image_low_res, image_high_res = self._adaptive_grid.flux_array2image_low_high(flux_values)\n assert len(image_high_res) == self.nx * self._supersampling_factor\n\n\nclass TestRegularGrid(object):\n\n def setup(self):\n pass\n\n\nif __name__ == '__main__':\n pytest.main()\n", "from lenstronomy.GalKin.light_profile import LightProfile\nfrom lenstronomy.GalKin.mass_profile import MassProfile\nfrom lenstronomy.GalKin.aperture import Aperture\nfrom lenstronomy.GalKin.anisotropy import MamonLokasAnisotropy\nfrom lenstronomy.GalKin.psf import PSF\nfrom lenstronomy.GalKin.cosmo import Cosmo\nimport lenstronomy.GalKin.velocity_util as util\nimport lenstronomy.Util.constants as const\n\nimport numpy as np\n\n\nclass Galkin(object):\n \"\"\"\n Major class to compute velocity dispersion measurements given light and mass models\n\n The class supports any mass and light distribution (and superposition thereof) that has a 3d correspondance in their\n 2d lens model distribution. For models that do not have this correspondance, you may want to apply a\n Multi-Gaussian Expansion (MGE) on their models and use the MGE to be de-projected to 3d.\n\n The computation follows Mamon&Lokas 2005 and performs the spectral rendering of the seeing convolved apperture with\n the method introduced by Birrer et al. 2016.\n\n The class supports various types of anisotropy models (see Anisotropy class) and aperture types (see Aperture class).\n Solving the Jeans Equation requires a numerical integral over the 3d light and mass profile (see Mamon&Lokas 2005).\n This class (as well as the dedicated LightModel and MassModel classes) perform those integral numerically with an\n interpolated grid.\n\n The seeing convolved integral over the aperture is computed by rendering spectra (light weighted LOS kinematics)\n from the light distribution.\n\n The cosmology assumed to compute the physical mass and distances are set via the kwargs_cosmo keyword arguments.\n D_d: Angular diameter distance to the deflector (in Mpc)\n D_s: Angular diameter distance to the source (in Mpc)\n D_ds: Angular diameter distance from the deflector to the source (in Mpc)\n\n The numerical options can be chosen through the kwargs_numerics keywords\n sampling_number: number of spectral rendering to compute the light weighted integrated LOS dispersion within\n the aperture. This keyword should be chosen high enough to result in converged results within the tolerance.\n\n interpol_grid_num: number of interpolation points in the light and mass profile (radially). This number should\n be chosen high enough to accurately describe the true light profile underneath.\n log_integration: bool, if True, performs the interpolation and numerical integration in log-scale.\n\n max_integrate: maximum 3d radius to where the numerical integration of the Jeans Equation solver is made.\n This value should be large enough to contain most of the light and to lead to a converged result.\n min_integrate: minimal integration value. This value should be very close to zero but some mass and light\n profiles are diverging and a numerically stabel value should be chosen.\n\n These numerical options should be chosen to allow for a converged result (within your tolerance) but not too\n conservative to impact too much the computational cost. Reasonable values might depend on the specific problem.\n\n \"\"\"\n def __init__(self, mass_profile_list, light_profile_list, aperture_type='slit', anisotropy_model='isotropic',\n psf_type='GAUSSIAN', fwhm=0.7, moffat_beta=2.6, kwargs_cosmo={'D_d': 1000, 'D_s': 2000, 'D_ds': 500},\n sampling_number=1000, interpol_grid_num=500, log_integration=False, max_integrate=10, min_integrate=0.001):\n \"\"\"\n\n :param mass_profile_list: list of lens (mass) model profiles\n :param light_profile_list: list of light model profiles of the lensing galaxy\n :param aperture_type: type of slit/shell aperture where the light is coming from. See details in Aperture() class.\n :param anisotropy_model: type of stellar anisotropy model. See details in MamonLokasAnisotropy() class.\n :param psf_type: string, point spread functino type, current support for 'GAUSSIAN' and 'MOFFAT'\n :param fwhm: full width at half maximum seeing condition\n :param moffat_beta: float, beta parameter of Moffat profile\n :param kwargs_cosmo: keyword arguments that define the cosmology in terms of the angular diameter distances involved\n \"\"\"\n self.massProfile = MassProfile(mass_profile_list, kwargs_cosmo, interpol_grid_num=interpol_grid_num,\n max_interpolate=max_integrate, min_interpolate=min_integrate)\n self.lightProfile = LightProfile(light_profile_list, interpol_grid_num=interpol_grid_num,\n max_interpolate=max_integrate, min_interpolate=min_integrate)\n self.aperture = Aperture(aperture_type)\n self.anisotropy = MamonLokasAnisotropy(anisotropy_model)\n\n self.cosmo = Cosmo(**kwargs_cosmo)\n self._num_sampling = sampling_number\n self._interp_grid_num = interpol_grid_num\n self._log_int = log_integration\n self._max_integrate = max_integrate # maximal integration (and interpolation) in units of arcsecs\n self._min_integrate = min_integrate # min integration (and interpolation) in units of arcsecs\n self._psf = PSF(psf_type=psf_type, fwhm=fwhm, moffat_beta=moffat_beta)\n\n def vel_disp(self, kwargs_mass, kwargs_light, kwargs_anisotropy, kwargs_apertur):\n \"\"\"\n computes the averaged LOS velocity dispersion in the slit (convolved)\n\n :param kwargs_mass: mass model parameters (following lenstronomy lens model conventions)\n :param kwargs_light: deflector light parameters (following lenstronomy light model conventions)\n :param kwargs_anisotropy: anisotropy parameters, may vary according to anisotropy type chosen.\n We refer to the Anisotropy() class for details on the parameters.\n :param kwargs_apertur: Aperture parameters, may vary depending on aperture type chosen.\n We refer to the Aperture() class for details on the parameters.\n :return: integrated LOS velocity dispersion in units [km/s]\n \"\"\"\n sigma2_R_sum = 0\n for i in range(0, self._num_sampling):\n sigma2_R = self.draw_one_sigma2(kwargs_mass, kwargs_light, kwargs_anisotropy, kwargs_apertur)\n sigma2_R_sum += sigma2_R\n sigma_s2_average = sigma2_R_sum / self._num_sampling\n # apply unit conversion from arc seconds and deflections to physical velocity disperison in (km/s)\n sigma_s2_average *= 2 * const.G # correcting for integral prefactor\n return np.sqrt(sigma_s2_average/(const.arcsec**2 * self.cosmo.D_d**2 * const.Mpc))/1000. # in units of km/s\n\n def draw_one_sigma2(self, kwargs_mass, kwargs_light, kwargs_anisotropy, kwargs_aperture):\n \"\"\"\n\n :param kwargs_mass: mass model parameters (following lenstronomy lens model conventions)\n :param kwargs_light: deflector light parameters (following lenstronomy light model conventions)\n :param kwargs_anisotropy: anisotropy parameters, may vary according to anisotropy type chosen.\n We refer to the Anisotropy() class for details on the parameters.\n :param kwargs_apertur: Aperture parameters, may vary depending on aperture type chosen.\n We refer to the Aperture() class for details on the parameters.\n :return: integrated LOS velocity dispersion in angular units for a single draw of the light distribution that\n falls in the aperture after displacing with the seeing\n \"\"\"\n while True:\n R = self.lightProfile.draw_light_2d(kwargs_light) # draw r\n x, y = util.draw_xy(R) # draw projected R\n x_, y_ = self._psf.displace_psf(x, y)\n bool = self.aperture.aperture_select(x_, y_, kwargs_aperture)\n if bool is True:\n break\n sigma2_R = self.sigma2_R(R, kwargs_mass, kwargs_light, kwargs_anisotropy)\n return sigma2_R\n\n def sigma2_R(self, R, kwargs_mass, kwargs_light, kwargs_anisotropy):\n \"\"\"\n returns unweighted los velocity dispersion for a specified projected radius\n\n :param R: 2d projected radius (in angular units)\n :param kwargs_mass: mass model parameters (following lenstronomy lens model conventions)\n :param kwargs_light: deflector light parameters (following lenstronomy light model conventions)\n :param kwargs_anisotropy: anisotropy parameters, may vary according to anisotropy type chosen.\n We refer to the Anisotropy() class for details on the parameters.\n :return:\n \"\"\"\n I_R_sigma2 = self.I_R_simga2(R, kwargs_mass, kwargs_light, kwargs_anisotropy)\n I_R = self.lightProfile.light_2d(R, kwargs_light)\n return I_R_sigma2 / I_R\n\n def I_R_simga2(self, R, kwargs_mass, kwargs_light, kwargs_anisotropy):\n \"\"\"\n equation A15 in Mamon&Lokas 2005 as a logarithmic numerical integral (if option is chosen)\n modulo pre-factor 2*G\n\n :param R: 2d projected radius (in angular units)\n :param kwargs_mass: mass model parameters (following lenstronomy lens model conventions)\n :param kwargs_light: deflector light parameters (following lenstronomy light model conventions)\n :param kwargs_anisotropy: anisotropy parameters, may vary according to anisotropy type chosen.\n We refer to the Anisotropy() class for details on the parameters.\n :return: integral of A15 in Mamon&Lokas 2005\n \"\"\"\n R = max(R, self._min_integrate)\n if self._log_int is True:\n min_log = np.log10(R+0.001)\n max_log = np.log10(self._max_integrate)\n r_array = np.logspace(min_log, max_log, self._interp_grid_num)\n dlog_r = (np.log10(r_array[2]) - np.log10(r_array[1])) * np.log(10)\n IR_sigma2_dr = self._integrand_A15(r_array, R, kwargs_mass, kwargs_light, kwargs_anisotropy) * dlog_r * r_array\n else:\n r_array = np.linspace(R+0.001, self._max_integrate, self._interp_grid_num)\n dr = r_array[2] - r_array[1]\n IR_sigma2_dr = self._integrand_A15(r_array, R, kwargs_mass, kwargs_light, kwargs_anisotropy) * dr\n IR_sigma2 = np.sum(IR_sigma2_dr) * const.arcsec * self.cosmo.D_d # integral from angle to physical scales\n return IR_sigma2\n\n def _integrand_A15(self, r, R, kwargs_mass, kwargs_light, kwargs_anisotropy):\n \"\"\"\n integrand of A15 (in log space) in Mamon&Lokas 2005\n\n :param r: 3d radius\n :param R: 2d projected radius\n :param kwargs_mass: mass model parameters (following lenstronomy lens model conventions)\n :param kwargs_light: deflector light parameters (following lenstronomy light model conventions)\n :param kwargs_anisotropy: anisotropy parameters, may vary according to anisotropy type chosen.\n We refer to the Anisotropy() class for details on the parameters.\n :return:\n \"\"\"\n k_r = self.anisotropy.K(r, R, kwargs_anisotropy)\n l_r = self.lightProfile.light_3d_interp(r, kwargs_light)\n m_r = self.massProfile.mass_3d_interp(r, kwargs_mass)\n out = k_r * l_r * m_r / r\n return out\n", "import numpy as np\nimport lenstronomy.Util.util as util\nimport lenstronomy.Util.param_util as param_util\n\n\nclass NIE(object):\n \"\"\"\n non-divergent isothermal ellipse (projected)\n \"\"\"\n param_names = ['amp', 'e1', 'e2', 's_scale', 'center_x', 'center_y']\n lower_limit_default = {'amp': 0, 'e1': -0.5, 'e2': -0.5, 's_scale': 0, 'center_x': -100, 'center_y': -100}\n upper_limit_default = {'amp': 100, 'e1': 0.5, 'e2': 0.5, 's_scale': 100, 'center_x': 100, 'center_y': 100}\n\n def function(self, x, y, amp, e1, e2, s_scale, center_x=0, center_y=0):\n \"\"\"\n\n :param x:\n :param y:\n :param theta_E:\n :param e1:\n :param e2:\n :param s_scale:\n :param center_x:\n :param center_y:\n :return:\n \"\"\"\n phi_G, q = param_util.ellipticity2phi_q(e1, e2)\n # shift\n x_ = x - center_x\n y_ = y - center_y\n # rotate\n x__, y__ = util.rotate(x_, y_, phi_G)\n # evaluate\n f_ = self._nie_simple_function(x__, y__, amp, s_scale, q)\n # rotate back\n return f_\n\n def _nie_simple_function(self, x, y, amp, s, q):\n \"\"\"\n\n :param x:\n :param y:\n :param amp:\n :param s_cale:\n :param q:\n :return:\n \"\"\"\n return amp / np.sqrt(q**2*(s**2 + x**2) + y**2)", "import numpy as np\nimport pytest\nimport lenstronomy.Util.mask as mask_util\nimport lenstronomy.Util.util as util\n\n\ndef test_get_mask():\n x = np.linspace(0, 10, 100)\n y = np.linspace(0, 10, 100)\n center_x = 5\n center_y = 5\n r = 1\n mask = mask_util.mask_center_2d(center_x, center_y, r, x, y)\n assert mask[0][0] == 1\n assert mask[5][5] == 0\n\n\ndef test_mask_half_moon():\n x, y = util.make_grid(numPix=100, deltapix=1)\n mask = mask_util.mask_half_moon(x, y, center_x=0, center_y=0, r_in=5, r_out=10, phi0=0, delta_phi=np.pi)\n assert mask[0] == 0\n\n mask = mask_util.mask_half_moon(x, y, center_x=0, center_y=0, r_in=5, r_out=10, phi0=0, delta_phi=-np.pi)\n assert mask[0] == 0\n\n\ndef test_mask_ellipse():\n x, y = util.make_grid(numPix=100, deltapix=1)\n mask = mask_util.mask_ellipse(x, y, center_x=0, center_y=0, a=10, b=20, angle=0)\n assert mask[0] == 0\n\n\nif __name__ == '__main__':\n pytest.main()\n" ]
[ [ "numpy.abs", "numpy.ones", "numpy.mean", "numpy.isscalar", "numpy.array", "numpy.zeros" ], [ "numpy.arctan2", "numpy.cos", "numpy.sqrt", "numpy.sin" ], [ "numpy.sqrt", "numpy.sort", "numpy.testing.assert_almost_equal", "numpy.max", "numpy.mean", "numpy.zeros_like", "numpy.array" ], [ "numpy.log", "numpy.sqrt", "numpy.linspace", "numpy.min", "numpy.reshape", "numpy.arange", "numpy.linalg.inv", "numpy.meshgrid", "numpy.cos", "numpy.sin", "numpy.delete", "numpy.argmax", "numpy.zeros_like", "numpy.argmin", "numpy.array", "numpy.zeros", "numpy.sum" ], [ "numpy.shape", "numpy.array", "numpy.zeros" ], [ "numpy.log", "numpy.sqrt", "numpy.linspace", "numpy.logspace", "numpy.log10", "numpy.sum" ], [ "numpy.sqrt" ], [ "numpy.linspace" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
dirty-cat/categorical-encoding
[ "fb0a1c4216533034e7516efc0698c7e4477b0243" ]
[ "benchmarks/supervectorizer_tuning.py" ]
[ "\"\"\"\nPerforms a GridSearch to find the best parameters for the SuperVectorizer\namong a selection.\n\"\"\"\n\nimport logging\nimport pandas as pd\n\nfrom sklearn.ensemble import RandomForestRegressor, RandomForestClassifier\nfrom sklearn.model_selection import GridSearchCV\nfrom sklearn.pipeline import Pipeline\n\nfrom dirty_cat import SuperVectorizer\nfrom dirty_cat.datasets import fetch_open_payments, fetch_drug_directory, \\\n fetch_road_safety, fetch_midwest_survey, fetch_medical_charge, \\\n fetch_employee_salaries, fetch_traffic_violations\n\nfrom pathlib import Path\nfrom functools import wraps\nfrom datetime import datetime\nfrom typing import List, Tuple\n\n\ndef get_classification_datasets() -> List[Tuple[dict, str]]:\n return [\n (fetch_open_payments(), 'open_payments'),\n # (fetch_drug_directory(), 'drug_directory),\n (fetch_road_safety(), 'road_safety'),\n (fetch_midwest_survey(), 'midwest_survey'),\n (fetch_traffic_violations(), 'traffic_violations'),\n ]\n\n\ndef get_regression_datasets() -> List[Tuple[dict, str]]:\n return [\n (fetch_medical_charge(), 'medical_charge'),\n (fetch_employee_salaries(), 'employee_salaries'),\n ]\n\n\ndef get_dataset(info) -> Tuple[pd.DataFrame, pd.Series]:\n df = pd.read_csv(info['path'], **info['read_csv_kwargs'])\n y = df[info['y']]\n X = df.drop(info['y'], axis=1).astype(str)\n return X, y\n\n\ndef set_logging(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n logging_level = logging.DEBUG\n\n logger = logging.getLogger()\n logger.setLevel(logging_level)\n\n formatter = logging.Formatter('%(asctime)s - [%(levelname)s] %(message)s')\n formatter.datefmt = '%m/%d/%Y %H:%M:%S'\n\n path = Path(__file__).parent / f'tuning_{str(datetime.now())[:10]}.log'\n\n fh = logging.FileHandler(filename=path, mode='w')\n fh.setLevel(logging_level)\n fh.setFormatter(formatter)\n\n # sh = logging.StreamHandler(sys.stdout)\n # sh.setLevel(logging_level)\n # sh.setFormatter(formatter)\n\n logger.addHandler(fh)\n # logger.addHandler(sh)\n\n return func(*args, **kwargs)\n return wrapper\n\n\n@set_logging\ndef main():\n logging.info('Launching !')\n\n card_possibilities = [20, 30, 40, 50]\n n_comp_possibilities = [10, 30, 50]\n\n logging.debug('Creating pipelines')\n regression_pipeline = Pipeline([\n ('sv', SuperVectorizer()),\n ('estimator', RandomForestRegressor()),\n ])\n classification_pipeline = Pipeline([\n ('sv', SuperVectorizer()),\n ('estimator', RandomForestClassifier()),\n ])\n\n logging.debug(f'With cardinality possibilities: {card_possibilities} '\n f'and n_components possibilities: {n_comp_possibilities}')\n for pipeline, datasets in zip(\n [\n regression_pipeline,\n classification_pipeline,\n ],\n [\n get_regression_datasets(),\n get_classification_datasets(),\n ]\n ):\n for info, name in datasets:\n X, y = get_dataset(info)\n if name != 'traffic_violations':\n continue\n\n csv_path = Path('.').resolve() / f'{name}_results.csv'\n if csv_path.exists():\n # If the results already exist, we'll skip to the next\n logging.debug(f'Skipping {name} as {csv_path!s} was found')\n continue\n\n logging.debug(f'Running search on {name}')\n grid = GridSearchCV(\n estimator=pipeline,\n param_grid={\n 'sv__cardinality_threshold': card_possibilities,\n 'sv__high_card_str_transformer__n_components': n_comp_possibilities,\n },\n n_jobs=30,\n )\n grid.fit(X, y)\n\n df = pd.DataFrame(grid.cv_results_)\n df.to_csv(csv_path)\n logging.info(f'Saved search results in {csv_path!s}')\n\n\nif __name__ == '__main__':\n main()\n" ]
[ [ "sklearn.ensemble.RandomForestRegressor", "sklearn.model_selection.GridSearchCV", "pandas.read_csv", "sklearn.ensemble.RandomForestClassifier", "pandas.DataFrame" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.3", "1.1", "1.5", "1.2" ], "scipy": [], "tensorflow": [] } ]
viettriit2110/face_recognition
[ "0e1821af6538c573ed4a87acc361c44900f849eb" ]
[ "examples/face_recognition_svm.py" ]
[ "# Train multiple images per person\r\n# Find and recognize faces in an image using a SVC with scikit-learn\r\n\r\n\"\"\"\r\nStructure:\r\n <test_image>.jpg\r\n <train_dir>/\r\n <person_1>/\r\n <person_1_face-1>.jpg\r\n <person_1_face-2>.jpg\r\n .\r\n .\r\n <person_1_face-n>.jpg\r\n <person_2>/\r\n <person_2_face-1>.jpg\r\n <person_2_face-2>.jpg\r\n .\r\n .\r\n <person_2_face-n>.jpg\r\n .\r\n .\r\n <person_n>/\r\n <person_n_face-1>.jpg\r\n <person_n_face-2>.jpg\r\n .\r\n .\r\n <person_n_face-n>.jpg\r\n\"\"\"\r\n\r\nimport face_recognition\r\nfrom sklearn import svm\r\nimport os\r\n\r\n# Training the SVC classifier\r\n\r\n# The training data would be all the face encodings from all the known images and the labels are their names\r\nencodings = []\r\nnames = []\r\n\r\n# Training directory\r\ntrain_dir = os.listdir('/train_dir/')\r\n\r\n# Loop through each person in the training directory\r\nfor person in train_dir:\r\n pix = os.listdir(\"/train_dir/\" + person)\r\n\r\n # Loop through each training image for the current person\r\n for person_img in pix:\r\n # Get the face encodings for the face in each image file\r\n face = face_recognition.load_image_file(\"/train_dir/\" + person + \"/\" + person_img)\r\n face_bounding_boxes = face_recognition.face_locations(face)\r\n\r\n #If training image contains none or more than faces, print an error message and exit\r\n if len(face_bounding_boxes) != 1:\r\n print(person + \"/\" + person_img + \" contains none or more than one faces and can't be used for training.\")\r\n exit()\r\n else:\r\n face_enc = face_recognition.face_encodings(face)[0]\r\n # Add face encoding for current image with corresponding label (name) to the training data\r\n encodings.append(face_enc)\r\n names.append(person)\r\n\r\n# Create and train the SVC classifier\r\nclf = svm.SVC(gamma='scale')\r\nclf.fit(encodings,names)\r\n\r\n# Load the test image with unknown faces into a numpy array\r\ntest_image = face_recognition.load_image_file('test_image.jpg')\r\n\r\n# Find all the faces in the test image using the default HOG-based model\r\nface_locations = face_recognition.face_locations(test_image)\r\nno = len(face_locations)\r\nprint(\"Number of faces detected: \", no)\r\n\r\n# Predict all the faces in the test image using the trained classifier\r\nprint(\"Found:\")\r\nfor i in range(no):\r\n test_image_enc = face_recognition.face_encodings(test_image)[i]\r\n name = clf.predict([test_image_enc])\r\n print(*name)\r\n" ]
[ [ "sklearn.svm.SVC" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
oeg-upm/ttla
[ "ab1cc5a2777b3d4fb905f4452379f469153c904b", "ab1cc5a2777b3d4fb905f4452379f469153c904b" ]
[ "commons/__init__.py", "label/classification.py" ]
[ "import os\nimport pandas as pd\nfrom easysparql import *\n\nENDPOINT = \"https://dbpedia.org/sparql\"\nMIN_NUM_OF_ENT_PER_PROP = 30 # the minimum number of entities per property (get_properties)\nQUERY_LIMIT = \"\" # At the moment, we do not put any limit on the number of results\nMIN_NUM_NUMS = 30 # The minimum number of values that will be annotated, this is to ignore small size\n\nproj_path = (os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))\n\ndata_dir = os.path.join(proj_path, 'data')\nmeta_dir = os.path.join(proj_path, 'meta')\nmodels_dir = os.path.join(proj_path, 'local_models')\nlog_dir = os.path.join(proj_path, 'local_logs')\n\n\n# kinds\nNOMINAL = \"nominal\"\nORDINAL = \"ordinal\"\nRATIO_INTERVAL = \"ratio-interval\"\n\n# sub kinds\nCATEGORICAL = \"categorical\"\nSEQUENTIAL = \"sequential\"\nHIERARCHICAL = \"hierarchical\"\nRANDOM = \"random\"\nCOUNTS = \"count\"\nOTHER = \"other\"\n\nYEAR = \"year\"\n\n\n# I am not sure of the below is useful\n# kinds and subkinds\nKINDS = {\n ORDINAL: [],\n NOMINAL: [CATEGORICAL, SEQUENTIAL, HIERARCHICAL, RANDOM],\n RATIO_INTERVAL: [COUNTS, OTHER],\n YEAR: []\n}\n\n\ndef get_column_from_meta(fname, column_id):\n \"\"\"\n :param fname:\n :param column_id:\n :return:\n \"\"\"\n fdir = os.path.join(data_dir, 'T2Dv2', fname+\".csv\")\n df = pd.read_csv(fdir)\n col_name = df.columns.values[column_id]\n return list(df[col_name])\n\n\ndef t2dv2_columns_of_kind(num_kind, sub_kind=None):\n \"\"\"\n :param num_kind: nominal, ordinal, ratio-interval\n :return: a dataframe of the specified kind\n \"\"\"\n meta_file_dir = os.path.join(meta_dir, 'T2Dv2_typology.csv')\n df = pd.read_csv(meta_file_dir)\n if sub_kind is None:\n dfkind = df[df.kind == num_kind]\n else:\n dfkind = df[df.kind == num_kind and df.sub_kind == sub_kind]\n print(dfkind)\n return dfkind\n\n\ndef get_numerics_from_list(nums_str_list):\n \"\"\"\n :param nums_str_list: list of string or numbers or a mix\n :return: list of numbers or None if less than 50% are numbers\n \"\"\"\n nums = []\n for c in nums_str_list:\n n = get_num(c)\n if n is not None:\n nums.append(n)\n if len(nums) < len(nums_str_list)/2:\n return None\n return nums\n\n\ndef get_num(num_or_str):\n \"\"\"\n :param num_or_str:\n :return: number or None if it is not a number\n \"\"\"\n if pd.isna(num_or_str):\n return None\n elif isinstance(num_or_str, (int, float)):\n return num_or_str\n elif isinstance(num_or_str, basestring):\n if '.' in num_or_str or ',' in num_or_str or num_or_str.isdigit():\n try:\n return float(num_or_str.replace(',', ''))\n except Exception as e:\n return None\n return None\n\n\ndef class_uri_to_fname(class_uri):\n \"\"\"\n :param class_uri:\n :return:\n \"\"\"\n if class_uri[:7] == \"http://\":\n class_dname = class_uri[7:]\n elif class_uri[:8] == \"https://\":\n class_dname = class_uri[8:]\n class_fname = class_dname.replace('/', '__').replace(',', '').replace('#', '_')#.replace('-', '_')\n return class_fname\n", "from loader import *\nimport os\nfrom operator import itemgetter\nfrom fuzzycmeans import FCM\nfrom detect.Detection import Detection\nimport model_construction\nimport pandas as pd\nimport numpy as np\nimport commons\nimport logging\nimport features\nfrom commons.logger import set_config\nlogger = set_config(logging.getLogger(__name__))\nnp.set_printoptions(suppress=True)\n\nTEST = False\nTOP_K = 10 # return the top k properties in the classify function\n\n\ndef classify(kind, class_uri, columns):\n \"\"\"\n :param kind:\n :param class_uri:\n :param columns: expecting a list of list\n :return: list of predictions\n each prediction is a pair of (membership_val, property_uri)\n \"\"\"\n predictions = []\n model_fdir = model_construction.build_model(class_uri=class_uri)\n fcm, centroid_names, max_num_of_features = load_model(kind, model_fdir)\n if fcm is None:\n logger.debug(\"empty fcm model\")\n return []\n for col in columns:\n logger.debug(\"detect column kind\")\n d = Detection(col)\n nums = d.cleanValues\n # Not sure if it is needed here\n # detected_kind = d.getType()\n logger.debug(\"compute features\")\n feats = features.compute_features(kind=kind, nums=nums)\n if feats is None:\n predictions.append([])\n continue\n # print(\"feats: \"+str(feats))\n if kind == commons.CATEGORICAL:\n logger.debug(\"add trailing zeros to categorical features\")\n if max_num_of_features > len(feats):\n feats += [0 for i in range((max_num_of_features)-len(feats))]\n data_array = np.array([feats])\n #data_array = data_array.transpose()\n # print(data_array)\n logger.debug(\"predict the cluster for the given column\")\n pred = fcm.predict(data_array)\n pred_with_names = zip(pred[0], centroid_names)\n pred_with_names.sort(key=itemgetter(0), reverse=True)\n #logger.debug(str(pred_with_names))\n predictions.append(pred_with_names[:TOP_K])\n return predictions\n\n\ndef load_model(kind, model_fdir):\n \"\"\"\n Get FCM model from the model file\n :param kind:\n :param model_fdir:\n :return:\n \"\"\"\n df = pd.read_csv(model_fdir, delimiter='\\t', names=['property_uri', 'kind', 'features'])\n dfkind = df[df.kind == kind]\n # print(dfkind.columns.values)\n centroids = []\n #centroids_names = list(df['property_uri'])\n centroids_names = []\n # centroids_names = [p for p in df['property_uri']]\n for idx, row in dfkind.iterrows():\n #for r in dfkind['features']:\n # print(r)\n # print(type(r))\n\n r = row['features']\n centroids_names.append(row['property_uri'])\n centroid = [float(num) for num in r.split(',')]\n centroids.append(centroid)\n\n # This is to fix for categorical\n max_num_features = 0\n if kind == commons.CATEGORICAL: # has variant num of features\n for c in centroids:\n dim = len(c)\n if dim > max_num_features:\n max_num_features = dim\n\n for c in centroids:\n num_feats = len(c)\n if num_feats < max_num_features:\n additionals = max_num_features-num_feats\n for i in range(additionals):\n c.append(0)\n logger.debug(\"for categorical max_num_features: \"+str(max_num_features))\n if len(centroids) == 0:\n return None, None, None\n fcm = FCM(n_clusters=len(centroids))\n # print(\"centroids: \"+str(centroids))\n # print(\"len: \"+str(len(centroids)))\n fcm.fit(centroids, range(len(centroids)))\n logger.debug(\"centroids: \"+str(centroids))\n logger.debug(\"fcm centroids: \"+str(fcm.cluster_centers_))\n logger.debug(\"membership: \")\n logger.debug(\"\\n\"+str(fcm.u))\n return fcm, centroids_names, max_num_features\n\n\nif __name__ == '__main__':\n # fname = \"dbpedia.org__ontology__BadmintonPlayer.tsv.test\"\n # model_fdir = os.path.join(commons.models_dir, fname)\n # load_model(commons.COUNTS, model_fdir)\n\n # print features.compute_features(kind=commons.OTHER, nums=[1,2,3,4,5,5,-20])\n\n class_uri = \"http://dbpedia.org/ontology/BadmintonPlayer\"\n data = [\n range(1, 100, 3)\n ]\n classify(kind=commons.OTHER, class_uri=class_uri, columns=data)\n\n\n # data = [\n # [1,2,1,2,1,2,1,2,3,4]\n # ]\n # classify(kind=commons.CATEGORICAL, class_uri=class_uri, columns=data)" ]
[ [ "pandas.isna", "pandas.read_csv" ], [ "numpy.array", "numpy.set_printoptions", "pandas.read_csv" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.3", "1.1", "1.5", "1.2" ], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.1", "1.5", "1.2", "1.3" ], "scipy": [], "tensorflow": [] } ]
dyabel/wsod-mmdet
[ "60fc1993ea298f992b160b5599a6134702ac0d4f" ]
[ "mmdet/models/losses/my_cross_entropy_loss.py" ]
[ "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport numpy as np\n\nfrom ..builder import LOSSES\nfrom .utils import weight_reduce_loss\neps = 0.000001\n\n\ndef cross_entropy_without_softmax(pred,\n label,\n weight=None,\n reduction='mean',\n avg_factor=None,\n class_weight=None):\n \"\"\"Calculate the CrossEntropy loss.\n\n Args:\n pred (torch.Tensor): The prediction with shape (N, C), C is the number\n of classes.\n label (torch.Tensor): The learning label of the prediction.\n weight (torch.Tensor, optional): Sample-wise loss weight.\n reduction (str, optional): The method used to reduce the loss.\n avg_factor (int, optional): Average factor that is used to average\n the loss. Defaults to None.\n class_weight (list[float], optional): The weight for each class.\n\n Returns:\n torch.Tensor: The calculated loss\n \"\"\"\n # element-wise losses\n #loss = F.cross_entropy(pred, label, weight=class_weight, reduction='none')\n\n loss = F.nll_loss(torch.log(pred), label, reduction = 'none')\n\n # apply weights and do the reduction\n if weight is not None:\n weight = weight.float()\n loss = weight_reduce_loss(\n loss, weight=weight, reduction=reduction, avg_factor=avg_factor)\n\n return loss\n\ndef cross_entropy(pred,\n label,\n weight=None,\n reduction='mean',\n avg_factor=None,\n class_weight=None):\n \"\"\"Calculate the CrossEntropy loss.\n\n Args:\n pred (torch.Tensor): The prediction with shape (N, C), C is the number\n of classes.\n label (torch.Tensor): The learning label of the prediction.\n weight (torch.Tensor, optional): Sample-wise loss weight.\n reduction (str, optional): The method used to reduce the loss.\n avg_factor (int, optional): Average factor that is used to average\n the loss. Defaults to None.\n class_weight (list[float], optional): The weight for each class.\n\n Returns:\n torch.Tensor: The calculated loss\n \"\"\"\n # element-wise losses\n loss = F.cross_entropy(pred, label, weight=class_weight, reduction='none')\n\n # apply weights and do the reduction\n if weight is not None:\n weight = weight.float()\n loss = weight_reduce_loss(\n loss, weight=weight, reduction=reduction, avg_factor=avg_factor)\n\n return loss\n\n\ndef _expand_onehot_labels(labels, label_weights, label_channels):\n bin_labels = labels.new_full((labels.size(0), label_channels), 0)\n inds = torch.nonzero(\n (labels >= 0) & (labels < label_channels), as_tuple=False).squeeze()\n if inds.numel() > 0:\n bin_labels[inds, labels[inds]] = 1\n\n if label_weights is None:\n bin_label_weights = None\n else:\n bin_label_weights = label_weights.view(-1, 1).expand(\n label_weights.size(0), label_channels)\n\n return bin_labels, bin_label_weights\n\n\ndef binary_cross_entropy(pred,\n label,\n weight=None,\n reduction='mean',\n avg_factor=None,\n class_weight=None):\n \"\"\"Calculate the binary CrossEntropy loss.\n\n Args:\n pred (torch.Tensor): The prediction with shape (N, 1).\n label (torch.Tensor): The learning label of the prediction.\n weight (torch.Tensor, optional): Sample-wise loss weight.\n reduction (str, optional): The method used to reduce the loss.\n Options are \"none\", \"mean\" and \"sum\".\n avg_factor (int, optional): Average factor that is used to average\n the loss. Defaults to None.\n class_weight (list[float], optional): The weight for each class.\n\n Returns:\n torch.Tensor: The calculated loss\n \"\"\"\n if pred.dim() != label.dim():\n label, weight = _expand_onehot_labels(label, weight, pred.size(-1))\n\n # weighted element-wise losses\n if weight is not None:\n weight = weight.float()\n\n\n pred = pred.clamp(1e-6,1-1e-6)\n label = label.clamp(0,1)\n loss = F.binary_cross_entropy(pred,label)\n\n return loss\n\n\ndef mask_cross_entropy(pred,\n target,\n label,\n reduction='mean',\n avg_factor=None,\n class_weight=None):\n \"\"\"Calculate the CrossEntropy loss for masks.\n\n Args:\n pred (torch.Tensor): The prediction with shape (N, C), C is the number\n of classes.\n target (torch.Tensor): The learning label of the prediction.\n label (torch.Tensor): ``label`` indicates the class label of the mask'\n corresponding object. This will be used to select the mask in the\n of the class which the object belongs to when the mask prediction\n if not class-agnostic.\n reduction (str, optional): The method used to reduce the loss.\n Options are \"none\", \"mean\" and \"sum\".\n avg_factor (int, optional): Average factor that is used to average\n the loss. Defaults to None.\n class_weight (list[float], optional): The weight for each class.\n\n Returns:\n torch.Tensor: The calculated loss\n \"\"\"\n # TODO: handle these two reserved arguments\n assert reduction == 'mean' and avg_factor is None\n num_rois = pred.size()[0]\n inds = torch.arange(0, num_rois, dtype=torch.long, device=pred.device)\n pred_slice = pred[inds, label].squeeze(1)\n return F.binary_cross_entropy_with_logits(\n pred_slice, target, weight=class_weight, reduction='mean')[None]\n\n\[email protected]_module()\nclass MyCrossEntropyLoss(nn.Module):\n\n def __init__(self,\n use_sigmoid=False,\n use_mask=False,\n reduction='mean',\n class_weight=None,\n loss_weight=1.0):\n \"\"\"CrossEntropyLoss.\n\n Args:\n use_sigmoid (bool, optional): Whether the prediction uses sigmoid\n of softmax. Defaults to False.\n use_mask (bool, optional): Whether to use mask cross entropy loss.\n Defaults to False.\n reduction (str, optional): . Defaults to 'mean'.\n Options are \"none\", \"mean\" and \"sum\".\n class_weight (list[float], optional): Weight of each class.\n Defaults to None.\n loss_weight (float, optional): Weight of the loss. Defaults to 1.0.\n \"\"\"\n super(MyCrossEntropyLoss, self).__init__()\n assert (use_sigmoid is False) or (use_mask is False)\n self.use_sigmoid = use_sigmoid\n self.use_mask = use_mask\n self.reduction = reduction\n self.loss_weight = loss_weight\n self.class_weight = class_weight\n\n self.cls_criterion = binary_cross_entropy\n\n def forward(self,\n cls_score,\n label,\n weight=None,\n avg_factor=None,\n reduction_override=None,\n **kwargs):\n \"\"\"Forward function.\n\n Args:\n cls_score (torch.Tensor): The prediction.\n label (torch.Tensor): The learning label of the prediction.\n weight (torch.Tensor, optional): Sample-wise loss weight.\n avg_factor (int, optional): Average factor that is used to average\n the loss. Defaults to None.\n reduction (str, optional): The method used to reduce the loss.\n Options are \"none\", \"mean\" and \"sum\".\n Returns:\n torch.Tensor: The calculated loss\n \"\"\"\n assert reduction_override in (None, 'none', 'mean', 'sum')\n reduction = (\n reduction_override if reduction_override else self.reduction)\n if self.class_weight is not None:\n class_weight = cls_score.new_tensor(\n self.class_weight, device=cls_score.device)\n else:\n class_weight = None\n loss_cls = self.loss_weight * self.cls_criterion(\n cls_score,\n label,\n weight,\n class_weight=class_weight,\n reduction=reduction,\n avg_factor=avg_factor,\n **kwargs)\n return loss_cls\n" ]
[ [ "torch.nn.functional.binary_cross_entropy_with_logits", "torch.nn.functional.cross_entropy", "torch.nn.functional.binary_cross_entropy", "torch.log", "torch.nonzero", "torch.arange" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
ForrestPi/ObjectDetection
[ "54e0821e73f67be5360c36f01229a123c34ab3b3", "0d6766038bd3ee37036e4255713d5c06e81a83ed", "54e0821e73f67be5360c36f01229a123c34ab3b3", "54e0821e73f67be5360c36f01229a123c34ab3b3", "54e0821e73f67be5360c36f01229a123c34ab3b3", "54e0821e73f67be5360c36f01229a123c34ab3b3" ]
[ "nms/benchmark/nms_numba_cpu.py", "SSD/SSD_FPN_GIoU/data/utils/augmentations.py", "YOLO/Stronger-yolo-pytorch/dataset/augment/image.py", "YOLO/yolov3_asff_pt/models/utils_loss.py", "AnchorFree/FCOS/models/utils.py", "SSD/SSD_FPN_GIoU/model/head/build_head.py" ]
[ "from __future__ import absolute_import\nimport numba\nimport numpy as np\[email protected](nopython=True)\ndef nms_cpu(dets, thresh):\n x1 = dets[:, 0]\n y1 = dets[:, 1]\n x2 = dets[:, 2]\n y2 = dets[:, 3]\n scores = dets[:, 4]\n areas = (x2 - x1 + 1) * (y2 - y1 + 1)\n order = scores.argsort()[::-1]\n\n keep = []\n while order.size > 0:\n i = order[0]\n keep.append(i)\n xx1 = np.maximum(x1[i], x1[order[1:]])\n yy1 = np.maximum(y1[i], y1[order[1:]])\n xx2 = np.minimum(x2[i], x2[order[1:]])\n yy2 = np.minimum(y2[i], y2[order[1:]])\n\n w = np.maximum(0.0, xx2 - xx1 + 1)\n h = np.maximum(0.0, yy2 - yy1 + 1)\n inter = w * h\n ovr = inter / (areas[i] + areas[order[1:]] - inter)\n\n inds = np.where(ovr <= thresh)[0]\n order = order[inds + 1]\n\n return keep\n\nif __name__ == \"__main__\":\n bbox=np.load(\"bbox.npy\")\n print(bbox.shape)\n keep=nms_cpu(bbox,0.7)\n print(len(keep))\n", "import torch\nfrom torchvision import transforms\nimport cv2\nimport numpy as np\nimport types\nfrom numpy import random\n\n\ndef intersect(box_a, box_b):\n '''\n calcute the intersect of box\n args:\n box_a = [boxs_num,4]\n box_b = [4]\n\n return iou_area = [boxs_num,1] \n '''\n max_xy = np.minimum(box_a[:, 2:], box_b[2:])\n min_xy = np.maximum(box_a[:, :2], box_b[:2])\n inter = np.clip((max_xy - min_xy), a_min=0, a_max=np.inf)\n return inter[:, 0] * inter[:, 1]\n\n\ndef jaccard_numpy(box_a, box_b):\n \"\"\"Compute the jaccard overlap of two sets of boxes. The jaccard overlap\n is simply the intersection over union of two boxes.\n E.g.:\n A ∩ B / A ∪ B = A ∩ B / (area(A) + area(B) - A ∩ B)\n Args:\n box_a: Multiple bounding boxes, Shape: [num_boxes,4]\n box_b: Single bounding box, Shape: [4]\n Return:\n jaccard overlap: Shape: [box_a.shape[0], box_a.shape[1]]\n \"\"\"\n inter = intersect(box_a, box_b)\n area_a = ((box_a[:, 2]-box_a[:, 0]) *\n (box_a[:, 3]-box_a[:, 1])) # [A,B]\n area_b = ((box_b[2]-box_b[0]) *\n (box_b[3]-box_b[1])) # [A,B]\n union = area_a + area_b - inter\n return inter / union # [A,B]\n\n\nclass Compose(object):\n \"\"\"\n Composes several augmentations together.\n Args:\n transforms (List[Transform]): list of transforms to compose.\n Example:\n augmentations.Compose([\n transforms.CenterCrop(10),\n transforms.ToTensor(),\n ])\n \"\"\"\n\n def __init__(self, transforms):\n self.transforms = transforms\n\n def __call__(self, img, boxes=None, labels=None):\n for t in self.transforms:\n img, boxes, labels = t(img, boxes, labels)\n return img, boxes, labels\n\n\nclass Lambda(object):\n \"\"\"Applies a lambda as a transform.\"\"\"\n\n def __init__(self, lambd):\n assert isinstance(lambd, types.LambdaType)\n self.lambd = lambd\n\n def __call__(self, img, boxes=None, labels=None):\n return self.lambd(img, boxes, labels)\n\n\nclass ConvertFromInts(object):\n '''\n Convert the image to ints\n '''\n def __call__(self, image, boxes=None, labels=None):\n return image.astype(np.float32), boxes, labels\n\n\nclass SubtractMeans(object):\n '''\n Sub the image means\n '''\n def __init__(self, mean):\n self.mean = np.array(mean, dtype=np.float32)\n\n def __call__(self, image, boxes=None, labels=None):\n image = image.astype(np.float32)\n image -= self.mean\n return image.astype(np.float32), boxes, labels\n\n\nclass Standform(object):\n '''\n make the image to standorm\n '''\n def __init__(self,mean,std):\n self.means = np.array(mean,dtype = np.float32)\n self.std = np.array(std,dtype = np.float32)\n def __call__(self, image, boxes=None, labels=None):\n image = image.astype(np.float32)\n return (image - self.means)/self.std,boxes,labels\n\n\nclass ToAbsoluteCoords(object):\n '''\n make the boxes to Absolute Coords\n '''\n def __call__(self, image, boxes=None, labels=None):\n height, width, channels = image.shape\n boxes[:, 0] *= width\n boxes[:, 2] *= width\n boxes[:, 1] *= height\n boxes[:, 3] *= height\n\n return image, boxes, labels\n\n\nclass ToPercentCoords(object):\n '''\n make the boxes to Percent Coords\n '''\n def __call__(self, image, boxes=None, labels=None):\n height, width, channels = image.shape\n boxes[:, 0] /= width\n boxes[:, 2] /= width\n boxes[:, 1] /= height\n boxes[:, 3] /= height\n\n return image, boxes, labels\n\n\nclass Resize(object):\n '''\n resize the image\n args:\n size = (size,size)\n '''\n def __init__(self, size=300):\n if isinstance(size,int):\n self.size = (size,size)\n elif isinstance(size,tuple):\n self.size = size\n else:\n raise Exception(\"The size is int or tuple\")\n\n def __call__(self, image, boxes=None, labels=None):\n image = cv2.resize(image, self.size)\n return image, boxes, labels\n\n\nclass RandomSaturation(object):\n '''\n Random to change the Saturation(HSV):0.0~1.0\n assert: this image is HSV\n args:\n lower,upper is the parameter to random the saturation\n '''\n def __init__(self, lower=0.5, upper=1.5):\n self.lower = lower\n self.upper = upper\n assert self.upper >= self.lower, \"contrast upper must be >= lower.\"\n assert self.lower >= 0, \"contrast lower must be non-negative.\"\n\n def __call__(self, image, boxes=None, labels=None):\n if random.randint(2):\n image[:, :, 1] *= random.uniform(self.lower, self.upper)\n\n return image, boxes, labels\n\n\nclass RandomHue(object):\n '''\n Random to change the Hue(HSV):0~360\n assert: this image is HSV\n args:\n delta is the parameters to random change the hue.\n\n '''\n def __init__(self, delta=18.0):\n assert delta >= 0.0 and delta <= 360.0\n self.delta = delta\n\n def __call__(self, image, boxes=None, labels=None):\n if random.randint(2):\n image[:, :, 0] += random.uniform(-self.delta, self.delta)\n image[:, :, 0][image[:, :, 0] > 360.0] -= 360.0\n image[:, :, 0][image[:, :, 0] < 0.0] += 360.0\n return image, boxes, labels\n\n\nclass RandomLightingNoise(object):\n def __init__(self):\n self.perms = ((0, 1, 2), (0, 2, 1),\n (1, 0, 2), (1, 2, 0),\n (2, 0, 1), (2, 1, 0))\n\n def __call__(self, image, boxes=None, labels=None):\n if random.randint(2):\n swap = self.perms[random.randint(len(self.perms))]\n shuffle = SwapChannels(swap) # shuffle channels\n image = shuffle(image)\n return image, boxes, labels\n\n\nclass ConvertColor(object):\n '''\n change the image from HSV to BGR or from BGR to HSV color\n args:\n current\n transform\n '''\n def __init__(self, current='RGB', transform='HSV'):\n self.transform = transform\n self.current = current\n\n def __call__(self, image, boxes=None, labels=None):\n if self.current == 'RGB' and self.transform == 'HSV':\n image = cv2.cvtColor(image, cv2.COLOR_RGB2HSV)\n elif self.current == 'HSV' and self.transform == 'RGB':\n image = cv2.cvtColor(image, cv2.COLOR_HSV2RGB)\n else:\n raise NotImplementedError\n return image, boxes, labels\n\n\nclass RandomContrast(object):\n '''\n Random to improve the image contrast:g(i,j) = alpha*f(i,j)\n '''\n def __init__(self, lower=0.5, upper=1.5):\n self.lower = lower\n self.upper = upper\n assert self.upper >= self.lower, \"contrast upper must be >= lower.\"\n assert self.lower >= 0, \"contrast lower must be non-negative.\"\n\n # expects float image\n def __call__(self, image, boxes=None, labels=None):\n if random.randint(2):\n alpha = random.uniform(self.lower, self.upper)\n image *= alpha\n return image, boxes, labels\n\n\nclass RandomBrightness(object):\n '''\n Random to improve the image bright:g(i,j) = f(i,j) + beta\n '''\n def __init__(self, delta=32):\n assert delta >= 0.0\n assert delta <= 255.0\n self.delta = delta\n\n def __call__(self, image, boxes=None, labels=None):\n if random.randint(2):\n delta = random.uniform(-self.delta, self.delta)\n image += delta\n return image, boxes, labels\n\n\nclass ToCV2Image(object):\n '''\n change the iamge shape c,h,w to h,w,c\n '''\n def __call__(self, tensor, boxes=None, labels=None):\n return tensor.cpu().numpy().astype(np.float32).transpose((1, 2, 0)), boxes, labels\n\n\nclass ToTensor(object):\n '''\n chage the image shape h,w,c to c,h,w\n '''\n\n def __call__(self, cvimage, boxes=None, labels=None):\n return torch.from_numpy(cvimage.astype(np.float32)).permute(2, 0, 1), boxes, labels\n\n\nclass RandomSampleCrop(object):\n \"\"\"Crop\n Arguments:\n img (Image): the image being input during training\n boxes (Tensor): the original bounding boxes in pt form\n labels (Tensor): the class labels for each bbox\n mode (float tuple): the min and max jaccard overlaps\n Return:\n (img, boxes, classes)\n img (Image): the cropped image\n boxes (Tensor): the adjusted bounding boxes in pt form\n labels (Tensor): the class labels for each bbox\n \"\"\"\n def __init__(self):\n self.sample_options = (\n # using entire original input image\n None,\n # sample a patch s.t. MIN jaccard w/ obj in .1,.3,.4,.7,.9\n (0.1, None),\n (0.3, None),\n (0.7, None),\n (0.9, None),\n # randomly sample a patch\n (None, None),\n )\n\n def __call__(self, image, boxes=None, labels=None):\n height, width, _ = image.shape\n while True:\n # randomly choose a mode\n mode = random.choice(self.sample_options)\n if mode is None:\n return image, boxes, labels\n\n min_iou, max_iou = mode\n if min_iou is None:\n min_iou = float('-inf')\n if max_iou is None:\n max_iou = float('inf')\n\n # max trails (50)\n for _ in range(50):\n current_image = image\n\n w = random.uniform(0.3 * width, width)\n h = random.uniform(0.3 * height, height)\n\n # aspect ratio constraint b/t .5 & 2\n if h / w < 0.5 or h / w > 2:\n continue\n\n left = random.uniform(width - w)\n top = random.uniform(height - h)\n\n # convert to integer rect x1,y1,x2,y2\n rect = np.array([int(left), int(top), int(left+w), int(top+h)])\n\n # calculate IoU (jaccard overlap) b/t the cropped and gt boxes\n overlap = jaccard_numpy(boxes, rect)\n\n # is min and max overlap constraint satisfied? if not try again\n if overlap.min() < min_iou and max_iou < overlap.max():\n continue\n\n # cut the crop from the image\n current_image = current_image[rect[1]:rect[3], rect[0]:rect[2],\n :]\n\n # keep overlap with gt box IF center in sampled patch\n #calcute the center in the boxes\n centers = (boxes[:, :2] + boxes[:, 2:]) / 2.0\n\n # mask in all gt boxes that above and to the left of centers\n m1 = (rect[0] < centers[:, 0]) * (rect[1] < centers[:, 1])\n\n # mask in all gt boxes that under and to the right of centers\n m2 = (rect[2] > centers[:, 0]) * (rect[3] > centers[:, 1])\n\n # mask in that both m1 and m2 are true\n #select the valid box that center in the rect\n mask = m1 * m2\n\n # have any valid boxes? try again if not\n if not mask.any():\n continue\n\n # take only matching gt boxes\n current_boxes = boxes[mask, :].copy()\n\n # take only matching gt labels\n current_labels = labels[mask]\n\n # should we use the box left and top corner or the crop's\n current_boxes[:, :2] = np.maximum(current_boxes[:, :2],\n rect[:2])\n # adjust to crop (by substracting crop's left,top)\n current_boxes[:, :2] -= rect[:2]\n\n current_boxes[:, 2:] = np.minimum(current_boxes[:, 2:],\n rect[2:])\n # adjust to crop (by substracting crop's left,top)\n current_boxes[:, 2:] -= rect[:2]\n\n return current_image, current_boxes, current_labels\n\n\nclass Expand(object):\n '''\n expand:ratio = 0.5\n '''\n def __init__(self, mean):\n self.mean = mean\n\n def __call__(self, image, boxes, labels):\n if random.randint(2):\n return image, boxes, labels\n\n height, width, depth = image.shape\n ratio = random.uniform(1, 4)\n # random to make the left and top\n left = random.uniform(0, width*ratio - width)\n top = random.uniform(0, height*ratio - height)\n\n expand_image = np.zeros(\n (int(height*ratio), int(width*ratio), depth),\n dtype=image.dtype)\n expand_image[:, :, :] = self.mean\n #put the image to the expand image\n expand_image[int(top):int(top + height),\n int(left):int(left + width)] = image\n image = expand_image\n\n boxes = boxes.copy()\n #match the box left and top\n boxes[:, :2] += (int(left), int(top))\n boxes[:, 2:] += (int(left), int(top))\n\n return image, boxes, labels\n\n'''\nhorizontal flip: ration = 0.5\n'''\nclass RandomMirror(object):\n def __call__(self, image, boxes, classes):\n _, width, _ = image.shape\n if random.randint(2):\n image = image[:, ::-1]\n boxes = boxes.copy()\n boxes[:, 0::2] = width - boxes[:, 2::-2]\n return image, boxes, classes\n\n\nclass SwapChannels(object):\n \"\"\"Transforms a tensorized image by swapping the channels in the order\n specified in the swap tuple.\n Args:\n swaps (int triple): final order of channels\n eg: (2, 1, 0)\n \"\"\"\n\n def __init__(self, swaps):\n self.swaps = swaps\n\n def __call__(self, image):\n \"\"\"\n Args:\n image (Tensor): image tensor to be transformed\n Return:\n a tensor with channels swapped according to swap\n \"\"\"\n # if torch.is_tensor(image):\n # image = image.data.cpu().numpy()\n # else:\n # image = np.array(image)\n image = image[:, :, self.swaps]\n return image\n\n\nclass PhotometricDistort(object):\n def __init__(self):\n self.pd = [\n RandomContrast(),\n ConvertColor(transform='HSV'),\n RandomSaturation(),\n RandomHue(),\n ConvertColor(current='HSV', transform='RGB'),\n RandomContrast()\n ]\n self.rand_brightness = RandomBrightness()\n self.rand_light_noise = RandomLightingNoise()\n\n def __call__(self, image, boxes, labels):\n im = image.copy()\n im, boxes, labels = self.rand_brightness(im, boxes, labels)\n if random.randint(2):\n distort = Compose(self.pd[:-1])\n else:\n distort = Compose(self.pd[1:])\n im, boxes, labels = distort(im, boxes, labels)\n return self.rand_light_noise(im, boxes, labels)\n\n\nclass SSDAugmentation(object):\n def __init__(self, size=300, mean=(104, 117, 123),std =(104, 117, 123)):\n self.mean = mean\n self.std = std\n self.size = size\n self.augment = Compose([\n ConvertFromInts(),\n ToAbsoluteCoords(),\n PhotometricDistort(),\n Expand(self.mean),\n RandomSampleCrop(),\n RandomMirror(),\n ToPercentCoords(),\n Resize(self.size),\n Standform(self.mean,self.std)\n #SubtractMeans(self.mean)\n ])\n\n def __call__(self, img, boxes, labels):\n return self.augment(img, boxes, labels)\n\ndef base_transform(image, size, mean):\n x = Standform(self.mean,self.std)\n x = cv2.resize(image, (size, size)).astype(np.float32)\n x -= mean\n x = x.astype(np.float32)\n return x\n\n\nclass BaseTransform:\n def __init__(self, size, mean,std):\n self.mean = mean\n self.std = std\n self.size = size\n self.augment = Compose([\n ConvertFromInts(),\n Resize(self.size),\n Standform(self.mean,self.std)\n \n ])\n \n def __call__(self, image, boxes=None, labels=None):\n return self.augment(image, boxes, labels)\n", "import cv2\nimport numpy as np\nimport random\nfrom PIL import Image\nimport matplotlib.pyplot as plt\n\ndef fixed_crop(src, x0, y0, w, h, size=None, interp=2):\n \"\"\"Crop src at fixed location, and (optionally) resize it to size.\n\n Parameters\n ----------\n src : NDArray\n Input image\n x0 : int\n Left boundary of the cropping area\n y0 : int\n Top boundary of the cropping area\n w : int\n Width of the cropping area\n h : int\n Height of the cropping area\n size : tuple of (w, h)\n Optional, resize to new size after cropping\n interp : int, optional, default=2\n Interpolation method. See resize_short for details.\n\n Returns\n -------\n NDArray\n An `NDArray` containing the cropped image.\n \"\"\"\n img = src[y0:y0+h,x0:x0+w,:]\n img=cv2.resize(img,(w,h))\n return img\n\ndef random_flip(src, px=0, py=0, copy=False):\n \"\"\"Randomly flip image along horizontal and vertical with probabilities.\n\n Parameters\n ----------\n src : mxnet.nd.NDArray\n Input image with HWC format.\n px : float\n Horizontal flip probability [0, 1].\n py : float\n Vertical flip probability [0, 1].\n copy : bool\n If `True`, return a copy of input\n\n Returns\n -------\n mxnet.nd.NDArray\n Augmented image.\n tuple\n Tuple of (flip_x, flip_y), records of whether flips are applied.\n\n \"\"\"\n flip_y = np.random.choice([False, True], p=[1 - py, py])\n flip_x = np.random.choice([False, True], p=[1 - px, px])\n if flip_y:\n src = np.flipud(src)\n if flip_x:\n src = np.fliplr(src)\n if copy:\n src = src.copy()\n return src, (flip_x, flip_y)\n\n\n\ndef random_color_distort(src, brightness_delta=32, contrast_low=0.5, contrast_high=1.5,\n saturation_low=0.5, saturation_high=1.5, hue_delta=18):\n \"\"\"Randomly distort image color space.\n Note that input image should in original range [0, 255].\n\n Parameters\n ----------\n src : mxnet.nd.NDArray\n Input image as HWC format.\n brightness_delta : int\n Maximum brightness delta. Defaults to 32.\n contrast_low : float\n Lowest contrast. Defaults to 0.5.\n contrast_high : float\n Highest contrast. Defaults to 1.5.\n saturation_low : float\n Lowest saturation. Defaults to 0.5.\n saturation_high : float\n Highest saturation. Defaults to 1.5.\n hue_delta : int\n Maximum hue delta. Defaults to 18.\n\n Returns\n -------\n mxnet.nd.NDArray\n Distorted image in HWC format.\n\n \"\"\"\n\n def brightness(src, delta, p=0.5):\n \"\"\"Brightness distortion.\"\"\"\n if np.random.uniform(0, 1) > p:\n delta = np.random.uniform(-delta, delta)\n src += delta\n return src\n return src\n\n def contrast(src, low, high, p=0.5):\n \"\"\"Contrast distortion\"\"\"\n if np.random.uniform(0, 1) > p:\n alpha = np.random.uniform(low, high)\n src *= alpha\n return src\n return src\n\n def saturation(src, low, high, p=0.5):\n \"\"\"Saturation distortion.\"\"\"\n if np.random.uniform(0, 1) > p:\n alpha = np.random.uniform(low, high)\n gray = src * np.array([[[0.299, 0.587, 0.114]]])\n gray = np.sum(gray, axis=2, keepdims=True)\n gray *= (1.0 - alpha)\n src *= alpha\n src += gray\n return src\n return src\n\n def hue(src, delta, p=0.5):\n \"\"\"Hue distortion\"\"\"\n if np.random.uniform(0, 1) > p:\n alpha = random.uniform(-delta, delta)\n u = np.cos(alpha * np.pi)\n w = np.sin(alpha * np.pi)\n bt = np.array([[1.0, 0.0, 0.0],\n [0.0, u, -w],\n [0.0, w, u]])\n tyiq = np.array([[0.299, 0.587, 0.114],\n [0.596, -0.274, -0.321],\n [0.211, -0.523, 0.311]])\n ityiq = np.array([[1.0, 0.956, 0.621],\n [1.0, -0.272, -0.647],\n [1.0, -1.107, 1.705]])\n t = np.dot(np.dot(ityiq, bt), tyiq).T\n src = np.dot(src, np.array(t))\n return src\n return src\n\n src = src.astype('float32')\n\n # brightness\n src = brightness(src, brightness_delta)\n\n # color jitter\n if np.random.randint(0, 2):\n src = contrast(src, contrast_low, contrast_high)\n src = saturation(src, saturation_low, saturation_high)\n src = hue(src, hue_delta)\n else:\n src = saturation(src, saturation_low, saturation_high)\n src = hue(src, hue_delta)\n src = contrast(src, contrast_low, contrast_high)\n return src\n\ndef impad_to_square(img, pad_size):\n '''Pad an image to ensure each edge to equal to pad_size.\n\n Args\n ---\n img: [height, width, channels]. Image to be padded\n pad_size: Int.\n\n Returns\n ---\n ndarray: The padded image with shape of\n [pad_size, pad_size, channels].\n '''\n shape = (pad_size, pad_size, img.shape[-1])\n pad = np.zeros(shape, dtype=img.dtype)\n pad[:img.shape[0], :img.shape[1], ...] = img\n return pad\n\n\ndef impad_to_multiple(img, divisor):\n '''Pad an image to ensure each edge to be multiple to some number.\n\n Args\n ---\n img: [height, width, channels]. Image to be padded.\n divisor: Int. Padded image edges will be multiple to divisor.\n\n Returns\n ---\n ndarray: The padded image.\n '''\n pad_h = int(np.ceil(img.shape[0] / divisor)) * divisor\n pad_w = int(np.ceil(img.shape[1] / divisor)) * divisor\n shape = (pad_h, pad_w, img.shape[-1])\n\n pad = np.zeros(shape, dtype=img.dtype)\n pad[:img.shape[0], :img.shape[1], ...] = img\n return pad\n\n\ndef img_resize(img, out_size):\n '''Resize image while keeping the aspect ratio.\n\n Args\n ---\n img: [height, width, channels]. The input image.\n out_size: Tuple of 2 integers. the image will be rescaled\n as large as possible within the scale,(w,h)\n\n Returns\n ---\n np.ndarray: the scaled image.\n '''\n # h, w = img.shape[:2]\n # max_long_edge = max(out_size)\n # max_short_edge = min(out_size)\n # scale_factor = min(max_long_edge / max(h, w),\n # max_short_edge / min(h, w))\n #\n # new_size = (int(w * float(scale_factor) + 0.5),\n # int(h * float(scale_factor) + 0.5))\n\n rescaled_img = cv2.resize(\n img, out_size, interpolation=cv2.INTER_LINEAR)\n return rescaled_img\n\n\ndef imnormalize(img, mean, std):\n '''Normalize the image.\n\n Args\n ---\n img: [height, width, channel]\n mean: Tuple or np.ndarray. [3]\n std: Tuple or np.ndarray. [3]\n\n Returns\n ---\n np.ndarray: the normalized image.\n '''\n img=img/255.0\n img = (img - mean) / std\n return img.astype(np.float32)\n\n\ndef imdenormalize(norm_img, mean, std):\n '''Denormalize the image.\n\n Args\n ---\n norm_img: [height, width, channel]\n mean: Tuple or np.ndarray. [3]\n std: Tuple or np.ndarray. [3]\n\n Returns\n ---\n np.ndarray: the denormalized image.\n '''\n img = norm_img * std + mean\n return img.astype(np.float32)\n\ndef random_expand(src, max_ratio=2, keep_ratio=True):\n \"\"\"Random expand original image with borders, this is identical to placing\n the original image on a larger canvas.\n\n Parameters\n ----------\n src : mxnet.nd.NDArray\n The original image with HWC format.\n max_ratio : int or float\n Maximum ratio of the output image on both direction(vertical and horizontal)\n fill : int or float or array-like\n The value(s) for padded borders. If `fill` is numerical type, RGB channels\n will be padded with single value. Otherwise `fill` must have same length\n as image channels, which resulted in padding with per-channel values.\n keep_ratio : bool\n If `True`, will keep output image the same aspect ratio as input.\n\n Returns\n -------\n mxnet.nd.NDArray\n Augmented image.\n tuple\n Tuple of (offset_x, offset_y, new_width, new_height)\n\n \"\"\"\n if max_ratio <= 1:\n return src, (0, 0, src.shape[1], src.shape[0])\n\n h, w, c = src.shape\n ratio_x = random.uniform(1, max_ratio)\n if keep_ratio:\n ratio_y = ratio_x\n else:\n ratio_y = random.uniform(1, max_ratio)\n\n oh, ow = int(h * ratio_y), int(w * ratio_x)\n off_y = random.randint(0, oh - h)\n off_x = random.randint(0, ow - w)\n dst=np.zeros(shape=(oh,ow,c))\n\n dst[off_y:off_y + h, off_x:off_x + w, :] = src\n return dst, (off_x, off_y, ow, oh)\n\ndef makeImgPyramids(imgs,scales,flip=False):\n rescaled_imgs=[]\n for scale in scales:\n rescaled_img=[]\n for img in imgs:\n scaled_img=cv2.resize(img,dsize=(scale,scale))\n rescaled_img.append(scaled_img)\n rescaled_imgs.append(np.array(rescaled_img))\n if not flip:\n return rescaled_imgs\n else:\n fliped_imgs=[]\n for pyramid in rescaled_imgs:\n fliped_img=[np.fliplr(img) for img in pyramid]\n fliped_imgs.append(np.array(fliped_img))\n return rescaled_imgs+fliped_imgs\n\n", "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch.autograd import Variable\n\n\nclass IOUWH_loss(nn.Module): #used for anchor guiding\n def __init__(self, reduction='none'):\n super(IOUWH_loss, self).__init__()\n self.reduction = reduction\n\n def forward(self, pred, target):\n orig_shape = pred.shape\n pred = pred.view(-1,4)\n target = target.view(-1,4)\n target[:,:2] = 0\n tl = torch.max((target[:, :2]-pred[:,2:]/2),\n (target[:, :2] - target[:, 2:]/2))\n\n br = torch.min((target[:, :2]+pred[:,2:]/2),\n (target[:, :2] + target[:, 2:]/2))\n\n area_p = torch.prod(pred[:,2:], 1)\n area_g = torch.prod(target[:,2:], 1)\n\n en = (tl< br).type(tl.type()).prod(dim=1)\n area_i = torch.prod(br-tl, 1) * en\n U = area_p+area_g-area_i+ 1e-16\n iou= area_i / U\n\n loss = 1-iou**2\n if self.reduction =='mean':\n loss = loss.mean()\n elif self.reduction == 'sum':\n loss = loss.sum()\n\n return loss\n\nclass IOUloss(nn.Module):\n def __init__(self, reduction='none'):\n super(IOUloss, self).__init__()\n self.reduction = reduction\n\n def forward(self, pred, target):\n orig_shape = pred.shape\n pred = pred.view(-1,4)\n target = target.view(-1,4)\n tl = torch.max((pred[:, :2]-pred[:,2:]/2),\n (target[:, :2] - target[:, 2:]/2))\n br = torch.min((pred[:, :2]+pred[:,2:]/2),\n (target[:, :2] + target[:, 2:]/2))\n\n area_p = torch.prod(pred[:,2:], 1)\n area_g = torch.prod(target[:,2:], 1)\n\n en = (tl< br).type(tl.type()).prod(dim=1)\n area_i = torch.prod(br-tl, 1) * en\n iou= (area_i) / (area_p+area_g-area_i+ 1e-16)\n\n loss = 1-iou**2\n if self.reduction =='mean':\n loss = loss.mean()\n elif self.reduction == 'sum':\n loss = loss.sum()\n\n return loss\n", "# -*- coding: utf-8 -*-\nimport torch\nimport torch.nn as nn\nfrom bounding_box import BoxList\nfrom boxlist_ops import cat_boxlist,boxlist_nms\n\n\nclass get_detector(nn.Module):\n def __init__(self, cfg):\n super(get_detector,self).__init__()\n self.nms_thresh = cfg['NMS_thresh']\n self.conf_thresh = cfg['conf_thres']\n self.nms_thresh_topN = cfg['nms_thresh_topN']\n self.num_classes = cfg['num_classes']\n pass\n \n def forward(self, locations, pred_cls, pred_boxes, pred_centerness, image_sizes):\n res_boxes = []\n \n for l, c, b, m in zip(locations, pred_cls, pred_boxes, pred_centerness):\n res_boxes.append(self.single_fmps_process(l, c, b, m, image_sizes))\n \n boxlists = list(zip(*res_boxes))\n boxlists = [cat_boxlist(boxlist) for boxlist in boxlists]\n boxlists = self.select_over_all_levels(boxlists)\n return boxlists\n\n \n def select_over_all_levels(self, boxlists):\n num_imgs = len(boxlists)\n results = []\n \n for i in range(num_imgs):\n scores = boxlists[i].get_field(\"scores\")\n labels = boxlists[i].get_field(\"labels\")\n boxes = boxlists[i].bbox\n boxlist = boxlists[i]\n result = []\n # skip the background\n for j in range(1, self.num_classes+1):\n idx = (labels == j).nonzero().view(-1)\n scores_j = scores[idx]\n boxes_j = boxes[idx, :].view(-1, 4)\n \n boxlist_for_class = BoxList(boxes_j, boxlist.size, mode=\"xyxy\")\n boxlist_for_class.add_field(\"scores\", scores_j)\n boxlist_for_class = boxlist_nms(boxlist_for_class, self.nms_thresh, score_field='scores')\n num_labels = len(boxlist_for_class)\n labels = torch.full((num_labels,), j, dtype=torch.int64, device=scores.device)\n boxlist_for_class.add_field('labels', labels)\n result.append(boxlist_for_class)\n\n #\n result = cat_boxlist(result)\n number_of_detections = len(result)\n # Limit to max_per_image detections **over all classes**\n if number_of_detections > self.nms_thresh_topN > 0:\n cls_scores = result.get_field(\"scores\")\n image_thresh, _ = torch.kthvalue(\n cls_scores.cpu(),\n number_of_detections - self.nms_thresh_topN + 1\n )\n keep = cls_scores >= image_thresh.item()\n keep = torch.nonzero(keep).squeeze(1)\n result = result[keep]\n results.append(result)\n return results\n \n\n def single_fmps_process(self, location, pred_cls, pred_box, pred_centerness, image_sizes):\n B, H, W, C = pred_cls.shape\n \n pred_cls = pred_cls.view(B, -1, C).sigmoid() \n pred_box = pred_box.view(B, -1, 4) \n pred_centerness = pred_centerness.view(B, -1).sigmoid() \n\n # multiply the classification scores with centerness scores\n pred_cls = pred_cls * pred_centerness[:, :, None] \n \n cls_mask = pred_cls > self.conf_thresh \n cls_mask_top_n = cls_mask.view(B, -1).sum(1) \n cls_mask_top_n = cls_mask_top_n.clamp(max=self.nms_thresh_topN)\n \n \n res = []\n for b in range(B):\n per_cls = pred_cls[b] \n per_cls_mask = cls_mask[b] \n per_cls = per_cls[per_cls_mask] \n \n per_cls_mask_nonzeros = per_cls_mask.nonzero() \n \n per_box_loc = per_cls_mask_nonzeros[:, 0]\n per_box_cls = per_cls_mask_nonzeros[:, 1] + 1 # class index\n \n\n per_box = pred_box[b]\n per_box = per_box[per_box_loc]\n per_location = location[per_box_loc]\n per_cls_mask_top_n = cls_mask_top_n[b]\n\n if per_cls_mask.sum().item() > per_cls_mask_top_n.item():\n per_cls, top_k_idx = per_cls.topk(per_cls_mask_top_n, sorted=False)\n per_box_cls = per_box_cls[top_k_idx]\n per_box = per_box[top_k_idx]\n per_location = per_location[top_k_idx]\n\n detections = torch.stack([ \n per_location[:, 0] - per_box[:, 0],\n per_location[:, 1] - per_box[:, 1],\n per_location[:, 0] + per_box[:, 2],\n per_location[:, 1] + per_box[:, 3], \n ],dim=1)\n \n h,w = image_sizes[0]\n box_list = BoxList(detections, (w,h), mode='xyxy')\n box_list.add_field('labels', per_box_cls)\n box_list.add_field('scores', per_cls) # * centerness < 0.5\n box_list = box_list.clip_to_image(remove_empty=False) \n res.append(box_list)\n \n return res\n \n\n\ndef one_hot_embedding(labels, num_classes):\n '''Embedding labels to one-hot form.\n\n Args:\n labels: (LongTensor) class labels, sized [N,].\n num_classes: (int) number of classes.\n\n Returns:\n (tensor) encoded labels, sized [N,#classes].\n '''\n y = torch.eye(num_classes, device=labels.device) # [D,D]\n return y[labels] \n\n\ndef boxes_area(boxes):\n \"\"\"Compute the area of an array of boxes.\"\"\"\n w = (boxes[:, 2] - boxes[:, 0] + 1)\n h = (boxes[:, 3] - boxes[:, 1] + 1)\n areas = w * h\n\n neg_area_idx = np.where(areas < 0)[0]\n if neg_area_idx.size:\n warnings.warn(\"Negative areas founds: %d\" % neg_area_idx.size, RuntimeWarning)\n #TODO proper warm up and learning rate may reduce the prob of assertion fail\n # assert np.all(areas >= 0), 'Negative areas founds'\n return areas, neg_area_idx\n\n\ndef unique_boxes(boxes, scale=1.0):\n \"\"\"Return indices of unique boxes.\"\"\"\n v = np.array([1, 1e3, 1e6, 1e9])\n hashes = np.round(boxes * scale).dot(v)\n _, index = np.unique(hashes, return_index=True)\n return np.sort(index)\n\nimport numpy as np\n\ndef xywh_to_xyxy(xywh):\n \"\"\"Convert [x1 y1 w h] box format to [x1 y1 x2 y2] format.\"\"\"\n if isinstance(xywh, (list, tuple)):\n # Single box given as a list of coordinates\n assert len(xywh) == 4\n x1, y1 = xywh[0], xywh[1]\n x2 = x1 + np.maximum(0., xywh[2] - 1.)\n y2 = y1 + np.maximum(0., xywh[3] - 1.)\n return (x1, y1, x2, y2)\n elif isinstance(xywh, np.ndarray):\n # Multiple boxes given as a 2D ndarray\n return np.hstack(\n (xywh[:, 0:2], xywh[:, 0:2] + np.maximum(0, xywh[:, 2:4] - 1))\n )\n else:\n raise TypeError('Argument xywh must be a list, tuple, or numpy array.')\n\n\ndef xyxy_to_xywh(xyxy):\n \"\"\"Convert [x1 y1 x2 y2] box format to [x1 y1 w h] format.\"\"\"\n if isinstance(xyxy, (list, tuple)):\n # Single box given as a list of coordinates\n assert len(xyxy) == 4\n x1, y1 = xyxy[0], xyxy[1]\n w = xyxy[2] - x1 + 1\n h = xyxy[3] - y1 + 1\n return (x1, y1, w, h)\n elif isinstance(xyxy, np.ndarray):\n # Multiple boxes given as a 2D ndarray\n return np.hstack((xyxy[:, 0:2], xyxy[:, 2:4] - xyxy[:, 0:2] + 1))\n else:\n raise TypeError('Argument xyxy must be a list, tuple, or numpy array.')\n", "import numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\n\nclass SSDHead(nn.Module):\n\n def __init__(self,\n num_classes=81,\n in_channels=[256,256,256,256,256],\n aspect_ratios=([2], [2, 3], [2, 3], [2, 3], [2], [2])):\n super(SSDHead, self).__init__()\n self.num_classes = num_classes\n self.in_channels = in_channels\n num_anchors = [len(ratios) * 2 + 2 for ratios in aspect_ratios]\n reg_convs = []\n cls_convs = []\n for i in range(len(in_channels)):\n reg_convs.append(\n nn.Conv2d(\n in_channels[i],\n num_anchors[i] * 4,\n kernel_size=3,\n padding=1))\n cls_convs.append(\n nn.Conv2d(\n in_channels[i],\n num_anchors[i] * num_classes,\n kernel_size=3,\n padding=1))\n self.reg_convs = nn.ModuleList(reg_convs)\n self.cls_convs = nn.ModuleList(cls_convs)\n\n self.init_weights()\n def init_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n torch.nn.init.xavier_uniform_(m.weight)\n\n def forward(self, feats):\n cls_scores = []\n bbox_preds = []\n for feat, reg_conv, cls_conv in zip(feats, self.reg_convs,\n self.cls_convs):\n #[num_featuremap,w,h,c]\n cls_scores.append(cls_conv(feat).permute(0, 2, 3, 1).contiguous())\n bbox_preds.append(reg_conv(feat).permute(0, 2, 3, 1).contiguous())\n \n return cls_scores, bbox_preds" ]
[ [ "numpy.load", "numpy.where", "numpy.maximum", "numpy.minimum" ], [ "numpy.maximum", "numpy.minimum", "numpy.clip", "numpy.random.choice", "numpy.random.uniform", "numpy.array", "numpy.random.randint" ], [ "numpy.dot", "numpy.random.choice", "numpy.fliplr", "numpy.flipud", "numpy.cos", "numpy.sin", "numpy.ceil", "numpy.random.uniform", "numpy.array", "numpy.zeros", "numpy.sum", "numpy.random.randint" ], [ "torch.min", "torch.prod", "torch.max" ], [ "numpy.hstack", "numpy.maximum", "torch.full", "numpy.unique", "torch.eye", "numpy.sort", "numpy.round", "torch.nonzero", "torch.stack", "numpy.array", "numpy.where" ], [ "torch.nn.ModuleList", "torch.nn.Conv2d", "torch.nn.init.xavier_uniform_" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
delemottelab/demystifying
[ "e8527b52d5fbe0570cd391921ecda5aefceb797a", "e8527b52d5fbe0570cd391921ecda5aefceb797a" ]
[ "demystifying/feature_extraction/mlp_feature_extractor.py", "demystifying/postprocessing.py" ]
[ "from __future__ import absolute_import, division, print_function\n\nimport logging\nimport sys\n\nlogging.basicConfig(\n stream=sys.stdout,\n format='%(asctime)s %(name)s-%(levelname)s: %(message)s',\n datefmt='%Y-%m-%d %H:%M:%S')\nimport numpy as np\nfrom sklearn.neural_network import MLPClassifier, MLPRegressor\n\nfrom .. import relevance_propagation as relprop\nfrom .feature_extractor import FeatureExtractor\nfrom ..postprocessing import PerFrameImportancePostProcessor\n\nlogger = logging.getLogger(\"mlp\")\n\n\nclass MlpFeatureExtractor(FeatureExtractor):\n\n def __init__(self,\n name=\"MLP\",\n activation=relprop.relu,\n randomize=True,\n supervised=True,\n one_vs_rest=False,\n per_frame_importance_outfile=None,\n per_frame_importance_samples=None,\n per_frame_importance_labels=None,\n classifier_kwargs={},\n **kwargs):\n FeatureExtractor.__init__(self,\n name=name,\n supervised=supervised,\n **kwargs)\n self.backend = \"scikit-learn\" # Only available option for now, more to come probably\n if activation not in [relprop.relu, relprop.logistic_sigmoid]:\n Exception(\"Relevance propagation currently only supported for relu or logistic\")\n self.activation = activation\n self.randomize = randomize\n self.classifier_kwargs = classifier_kwargs.copy()\n if classifier_kwargs.get('activation', None) is not None and \\\n classifier_kwargs.get('activation') != self.activation:\n logger.warn(\"Conflicting activation properiies. '%s' will be overwritten with '%s'\",\n classifier_kwargs.get('activation'),\n self.activation)\n self.classifier_kwargs['activation'] = self.activation\n if not self.randomize:\n self.classifier_kwargs['random_state'] = 89274\n self.frame_importances = None\n self.per_frame_importance_outfile = per_frame_importance_outfile\n self.per_frame_importance_samples = per_frame_importance_samples\n self.per_frame_importance_labels = per_frame_importance_labels\n if self.use_regression:\n self.one_vs_rest = False\n else:\n self.one_vs_rest = one_vs_rest\n\n logger.debug(\"Initializing MLP with the following parameters:\"\n \" activation function %s, randomize %s, classifier_kwargs %s,\"\n \" per_frame_importance_outfile %s, backend %s, per_frame_importance_samples %s, one_vs_rest %s\",\n activation, randomize, classifier_kwargs, per_frame_importance_outfile, self.backend,\n None if per_frame_importance_samples is None else per_frame_importance_samples.shape,\n self.one_vs_rest)\n\n def _train_one_vs_rest(self, data, labels):\n n_clusters = labels.shape[1]\n n_points = data.shape[0]\n\n classifiers = []\n\n for i_cluster in range(n_clusters):\n classifiers.append(self._create_classifier())\n binary_labels = np.zeros((n_points, 2))\n binary_labels[labels[:, i_cluster] == 1, 0] = 1\n binary_labels[labels[:, i_cluster] != 1, 1] = 1\n classifiers[i_cluster].fit(data, binary_labels)\n\n return classifiers\n\n def train(self, train_set, train_labels):\n \"\"\"\n TODO code duplication below for on_vs_the_rest logic, refactor with KL and RF into common superclass\n :param train_set:\n :param train_labels:\n :return:\n \"\"\"\n # Construct and train classifier\n logger.debug(\"Training %s with %s samples and %s features ...\", self.name, train_set.shape[0],\n train_set.shape[1])\n if self.one_vs_rest:\n return self._train_one_vs_rest(train_set, train_labels)\n else:\n classifier = self._create_classifier()\n classifier.fit(train_set, train_labels)\n return classifier\n\n def _normalize_relevance_per_frame(self, relevance_per_frame):\n for i in range(relevance_per_frame.shape[0]):\n # Not removing negative relevance in per frame analysis\n # ind_negative = np.where(relevance_per_frame[i, :] < 0)[0]\n # relevance_per_frame[i, ind_negative] = 0\n relevance_per_frame[i, :] = (relevance_per_frame[i, :] - np.min(relevance_per_frame[i, :])) / \\\n (np.max(relevance_per_frame[i, :]) - np.min(relevance_per_frame[i, :]) + 1e-9)\n return relevance_per_frame\n\n def _perform_lrp(self, classifier, data, labels):\n nclusters = labels.shape[1] if self.supervised else 1\n nfeatures = data.shape[1]\n relevance_per_cluster = np.zeros((nfeatures, nclusters))\n per_frame_relevance = np.zeros(data.shape)\n for c_idx in range(nclusters):\n # Get all frames belonging to a cluster\n if self.supervised:\n frame_indices = labels[:, c_idx] == 1\n cluster_data = data[frame_indices]\n cluster_labels = np.zeros((len(cluster_data), nclusters))\n cluster_labels[:, c_idx] = 1 # Only look at one class at the time\n else:\n # TODO refactor to break unsupervised code out of here. Unsupervised method have no concept of clusters/labels\n cluster_labels = labels\n frame_indices = [i for i in range(len(data))]\n cluster_data = data\n if len(cluster_data) == 0:\n continue\n # Now see what makes these frames belong to that class\n # Time for LRP\n layers = self._create_layers(classifier)\n propagator = relprop.RelevancePropagator(layers)\n cluster_frame_relevance = propagator.propagate(cluster_data, cluster_labels)\n # Rescale relevance according to min and max relevance in each frame\n cluster_frame_relevance = self._normalize_relevance_per_frame(cluster_frame_relevance)\n relevance_per_cluster[:, c_idx] = cluster_frame_relevance.mean(axis=0)\n per_frame_relevance[frame_indices] += cluster_frame_relevance\n per_frame_relevance = self._normalize_relevance_per_frame(per_frame_relevance)\n return per_frame_relevance, relevance_per_cluster\n\n def get_feature_importance(self, classifier, data, labels):\n logger.debug(\"Extracting feature importance using MLP ...\")\n if self.one_vs_rest:\n return self._get_feature_importance_binaryclass(classifier, data, labels)\n else:\n return self._get_feature_importance_multiclass(classifier, data, labels)\n\n def _get_feature_importance_binaryclass(self, classifiers, data, labels):\n n_features = data.shape[1]\n n_frames = data.shape[0]\n n_states = labels.shape[1] if len(labels.shape) > 1 else 1\n feature_importances = np.zeros((n_features, self.n_clusters))\n for i_cluster in range(n_states):\n # TODO a bit inefficent approach below where we consistenly compute LRP for all other clusters and don't use those results.\n cluster_frames = labels[:, i_cluster] == 1\n binary_labels = np.zeros((n_frames, 2))\n binary_labels[cluster_frames, 0] = 1\n binary_labels[~cluster_frames, 1] = 1\n relevance_per_frame, relevance_per_cluster = self._perform_lrp(classifiers[i_cluster], data, binary_labels)\n feature_importances[:, i_cluster] = relevance_per_cluster[:, 0]\n if self.per_frame_importance_outfile is not None:\n cluster_frame_importances, other_labels = self._compute_frame_relevance(classifiers[i_cluster],\n relevance_per_frame,\n data,\n labels)\n if self.frame_importances is None:\n self.frame_importances = np.zeros((len(other_labels), cluster_frame_importances.shape[1]))\n other_cluster_frames = other_labels[:, 0] == 1\n if len(other_labels[other_cluster_frames]) == 0:\n # No frames in this state, just move on\n continue\n nclusters_per_frame = other_labels[other_cluster_frames].sum(axis=1)[:, np.newaxis]\n self.frame_importances[other_cluster_frames, :] += cluster_frame_importances[\n other_cluster_frames] / nclusters_per_frame\n return feature_importances\n\n def _get_feature_importance_multiclass(self, classifier, data, labels):\n relevance_per_frame, relevance_per_cluster = self._perform_lrp(classifier, data, labels)\n\n if self.per_frame_importance_outfile is not None:\n frame_importances, _ = self._compute_frame_relevance(classifier, relevance_per_frame, data, labels)\n self.frame_importances = frame_importances if self.frame_importances is None else self.frame_importances + frame_importances\n return relevance_per_cluster\n\n def _compute_frame_relevance(self, classifier, relevance_per_frame, data, labels):\n if self.per_frame_importance_samples is not None:\n if self.indices_for_filtering is None:\n other_samples = self.per_frame_importance_samples\n else:\n other_samples = self.per_frame_importance_samples[:, self.indices_for_filtering]\n if self.per_frame_importance_labels is None:\n other_labels = classifier.predict(other_samples)\n else:\n other_labels = self.per_frame_importance_labels\n other_samples = self.scaler.transform(other_samples)\n frame_relevance, _ = self._perform_lrp(classifier, other_samples, other_labels)\n else:\n logger.info(\"Using same trajectory for per frame importance as was used for training.\")\n if self.n_splits != 1:\n logger.error(\n \"Cannot average frame importance to outfile if n_splits != 1. n_splits is now set to %s\",\n self.n_splits)\n if self.shuffle_datasets:\n logger.error(\"Data set has been shuffled, per frame importance will not be properly mapped\")\n frame_relevance = relevance_per_frame\n other_labels = labels\n # for every feature in every frame...\n frame_importances = np.zeros(\n (data if self.per_frame_importance_samples is None else self.per_frame_importance_samples).shape) - 1\n if self.indices_for_filtering is not None:\n frame_importances[:, self.indices_for_filtering] = 0\n niters = self.n_iterations * self.n_splits\n for frame_idx, rel in enumerate(frame_relevance):\n if self.indices_for_filtering is None:\n frame_importances[frame_idx] += rel / niters\n else:\n frame_importances[frame_idx, self.indices_for_filtering] += rel / niters\n return frame_importances, other_labels\n\n def _create_layers(self, classifier):\n weights = classifier.coefs_\n biases = classifier.intercepts_\n layers = []\n for idx, weight in enumerate(weights):\n\n if idx == 0:\n l = relprop.FirstLinear(min_val=0, max_val=1, weight=weight, bias=biases[idx])\n else:\n l = relprop.layer_for_string(self.activation, weight=weight, bias=biases[idx])\n if l is None:\n raise Exception(\n \"Cannot create layer at index {} for activation function {}\".format(idx, self.activation))\n layers.append(l)\n if idx < len(weights) - 1:\n # Add activation to all except output layer\n activation = relprop.layer_activation_for_string(self.activation)\n if activation is None:\n raise Exception(\"Unknown activation function {}\".format(self.activation))\n layers.append(activation)\n else:\n if self.backend == 'scikit-learn':\n # For scikit implementation see # https://stats.stackexchange.com/questions/243588/how-to-apply-softmax-as-activation-function-in-multi-layer-perceptron-in-scikit\n # or https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/neural_network/multilayer_perceptron.py\n out_activation = relprop.layer_activation_for_string(classifier.out_activation_)\n if out_activation is None:\n raise Exception(\"Unknown activation function {}\".format(self.activation))\n layers.append(out_activation)\n else:\n raise Exception(\"Unsupported MLP backend {}\".format(self.backend))\n\n return layers\n\n def _create_classifier(self):\n return MLPRegressor(**self.classifier_kwargs) if self.use_regression \\\n else MLPClassifier(**self.classifier_kwargs)\n\n def postprocessing(self, **kwargs):\n return PerFrameImportancePostProcessor(extractor=self,\n per_frame_importance_outfile=self.per_frame_importance_outfile,\n frame_importances=self.frame_importances,\n **kwargs)\n", "from __future__ import absolute_import, division, print_function\n\nimport logging\nimport sys\n\nlogging.basicConfig(\n stream=sys.stdout,\n format='%(asctime)s %(name)s-%(levelname)s: %(message)s',\n datefmt='%Y-%m-%d %H:%M:%S')\nimport os\nimport numpy as np\nfrom operator import itemgetter\nfrom biopandas.pdb import PandasPdb\nfrom . import utils\nfrom . import filtering\nfrom . import data_projection as dp\n\nlogger = logging.getLogger(\"postprocessing\")\n\n\nclass PostProcessor(object):\n\n def __init__(self,\n extractor=None,\n working_dir=None,\n rescale_results=True,\n filter_results=False,\n feature_to_resids=None,\n pdb_file=None,\n accuracy_method='mse',\n predefined_relevant_residues=None,\n use_GMM_estimator=True):\n \"\"\"\n Class which computes all the necessary averages and saves them as fields\n TODO move some functionality from class feature_extractor here\n :param extractor:\n :param feature_importance:\n :param std_feature_importance:\n :param cluster_indices:\n :param working_dir:\n :param feature_to_resids: an array of dimension nfeatures*2 which tells which two residues are involved in a feature\n \"\"\"\n self.extractor = extractor\n self.name = extractor.name\n self.feature_importances = extractor.feature_importance\n self.std_feature_importances = extractor.std_feature_importance\n self.supervised = extractor.supervised\n self.cluster_indices = extractor.cluster_indices\n self.nclusters = 1 if extractor.labels is None else extractor.labels.shape[1]\n self.working_dir = working_dir\n if self.working_dir is None:\n self.working_dir = os.getcwd()\n self.pdb_file = pdb_file\n self.predefined_relevant_residues = predefined_relevant_residues\n self.use_GMM_estimator = use_GMM_estimator\n\n # Rescale and filter results if needed\n self.rescale_results = rescale_results\n if self.feature_importances is not None:\n if rescale_results:\n self.feature_importances, self.std_feature_importances = utils.rescale_feature_importance(\n self.feature_importances, self.std_feature_importances)\n if filter_results:\n self.feature_importances, self.std_feature_importances = filtering.filter_feature_importance(\n self.feature_importances, self.std_feature_importances)\n\n # Put importance and std to 0 for residues pairs which were filtered out during features filtering (they are set as -1 in self.feature_importances and self.std_feature_importances)\n self.indices_filtered = np.where(self.feature_importances[:, 0] == -1)[0]\n self.feature_importances[self.indices_filtered, :] = 0\n self.std_feature_importances[self.indices_filtered, :] = 0\n # Set mapping from features to residues\n self.nfeatures = self.feature_importances.shape[0]\n else:\n self.indices_filtered = np.empty((0, 0))\n self.nfeatures = self.extractor.samples.shape[1]\n\n if feature_to_resids is None and self.pdb_file is None:\n feature_to_resids = utils.get_default_feature_to_resids(self.nfeatures)\n elif feature_to_resids is None and self.pdb_file is not None:\n feature_to_resids = utils.get_feature_to_resids_from_pdb(self.nfeatures, self.pdb_file)\n self.feature_to_resids = feature_to_resids\n self.accuracy_method = accuracy_method\n\n # Set average feature importances to None\n self.importance_per_residue_and_cluster = None\n self.std_importance_per_residue_and_cluster = None\n self.importance_per_residue = None\n self.std_importance_per_residue = None\n\n # Performance metrics\n self.predefined_relevant_residues = predefined_relevant_residues\n self.average_std = None\n if extractor.test_set_errors is not None:\n self.test_set_errors = extractor.test_set_errors.mean()\n else:\n self.test_set_errors = None\n self.data_projector = None\n self.separation_score = None\n self.accuracy = None\n self.accuracy_per_cluster = None\n self._importance_mapped_to_resids = None\n self._std_importance_mapped_to_resids = None\n\n def average(self):\n \"\"\"\n Computes average importance per cluster and residue and residue etc.\n Sets the fields importance_per_residue_and_cluster, importance_per_residue\n :return: itself\n \"\"\"\n self._map_feature_to_resids()\n self._compute_importance_per_residue()\n\n if self.supervised:\n self._compute_importance_per_residue_and_cluster()\n\n return self\n\n def evaluate_performance(self):\n \"\"\"\n Computes -average of standard deviation (per residue)\n -projection classification entropy\n -classification score (for toy model only)\n \"\"\"\n self._compute_average_std()\n self._compute_projection_classification_entropy()\n\n if self.predefined_relevant_residues is not None:\n self.compute_accuracy()\n\n return self\n\n def get_important_features(self, states=None, sort=True):\n \"\"\"\n :param states: (optional) the indices of the states\n :param sort: (optional) sort the features by their importance\n :return: np.array of shape (n_features, 2) with entries (feature_index, importance)\n \"\"\"\n fi = self.feature_importances\n if states is not None and self.supervised:\n fi = fi[:, states]\n fi = fi.sum(axis=1)\n fi, _ = utils.rescale_feature_importance(fi)\n fi = fi.squeeze()\n fi = [(e, i) for (e, i) in enumerate(fi)]\n if sort:\n fi = [(e, i) for (e, i) in sorted(fi, key=itemgetter(1), reverse=True)]\n return np.array(fi)\n\n def persist(self):\n \"\"\"\n Save .npy files of the different averages and pdb files with the beta column set to importance\n :return: itself\n \"\"\"\n directory = self.get_output_dir()\n\n if not os.path.exists(directory):\n os.makedirs(directory)\n\n np.save(directory + \"importance_per_residue\", self.importance_per_residue)\n np.save(directory + \"std_importance_per_residue\", self.std_importance_per_residue)\n np.save(directory + \"feature_importance\", self.feature_importances)\n np.save(directory + \"std_feature_importance\", self.std_feature_importances)\n\n if self.importance_per_residue_and_cluster is not None and self.std_importance_per_residue_and_cluster is not None:\n np.save(directory + \"importance_per_residue_and_cluster\", self.importance_per_residue_and_cluster)\n np.save(directory + \"std_importance_per_residue_and_cluster\", self.std_importance_per_residue_and_cluster)\n if self.separation_score is not None:\n np.save(directory + 'separation_score', self.separation_score)\n if self.predefined_relevant_residues is not None:\n np.save(directory + \"predefined_relevant_residues\", self.predefined_relevant_residues)\n if self.accuracy is not None:\n np.save(directory + 'accuracy', self.accuracy)\n if self.accuracy_per_cluster is not None:\n np.save(directory + 'accuracy_per_cluster', self.accuracy_per_cluster)\n if self.test_set_errors is not None:\n np.save(directory + 'test_set_errors', self.test_set_errors)\n if self.feature_to_resids is not None:\n np.save(directory + 'feature_to_resids', self.feature_to_resids)\n if self.pdb_file is not None:\n pdb = PandasPdb()\n pdb.read_pdb(self.pdb_file)\n self._save_to_pdb(pdb, directory + \"importance.pdb\",\n self._map_to_correct_residues(self.importance_per_residue))\n\n if self.importance_per_residue_and_cluster is not None:\n for cluster_idx, importance in enumerate(self.importance_per_residue_and_cluster.T):\n cluster_name = \"cluster_{}\".format(cluster_idx) \\\n if self.extractor.label_names is None else \\\n self.extractor.label_names[cluster_idx]\n self._save_to_pdb(pdb, directory + \"{}_importance.pdb\".format(cluster_name),\n self._map_to_correct_residues(importance))\n\n return self\n\n def _load_if_exists(self, filepath):\n if os.path.exists(filepath):\n return np.load(filepath)\n else:\n return None\n\n def get_output_dir(self):\n return self.working_dir + \"/{}/\".format(self.extractor.name)\n\n def load(self):\n \"\"\"\n Loads files dumped by the 'persist' method\n :return: itself\n \"\"\"\n directory = self.get_output_dir()\n\n if not os.path.exists(directory):\n return self\n\n self.importance_per_residue = np.load(directory + \"importance_per_residue.npy\")\n self.std_importance_per_residue = np.load(directory + \"std_importance_per_residue.npy\")\n self.feature_importances = np.load(directory + \"feature_importance.npy\")\n self.std_feature_importances = np.load(directory + \"std_feature_importance.npy\")\n\n self.importance_per_residue_and_cluster = self._load_if_exists(\n directory + \"importance_per_residue_and_cluster.npy\")\n self.std_importance_per_residue_and_cluster = self._load_if_exists(\n directory + \"std_importance_per_residue_and_cluster.npy\")\n self.separation_score = self._load_if_exists(directory + \"separation_score.npy\")\n self.predefined_relevant_residues = self._load_if_exists(directory + \"predefined_relevant_residues.npy\")\n self.accuracy = self._load_if_exists(directory + \"accuracy.npy\")\n self.accuracy_per_cluster = self._load_if_exists(directory + \"accuracy_per_cluster.npy\")\n self.test_set_errors = self._load_if_exists(directory + \"test_set_errors.npy\")\n if self.feature_to_resids is None: # Can be useful to override this in postprocesseing\n self.feature_to_resids = self._load_if_exists(directory + \"feature_to_resids.npy\")\n\n #np.unique(np.asarray(self.feature_to_resids.flatten()))\n return self\n\n def _map_feature_to_resids(self):\n # Create array of all unique reside numbers\n index_to_resid = self.get_index_to_resid()\n self.nresidues = len(index_to_resid)\n res_id_to_index = {} # a map pointing back to the index in the array index_to_resid\n for idx, resid in enumerate(index_to_resid):\n res_id_to_index[resid] = idx # Now we now which residue points to which feature\n\n _importance_mapped_to_resids = np.zeros((self.nresidues, self.feature_importances.shape[1]))\n _std_importance_mapped_to_resids = np.zeros((self.nresidues, self.feature_importances.shape[1]))\n for feature_idx, rel in enumerate(self.feature_importances):\n corresponding_residues = self.feature_to_resids[feature_idx]\n if isinstance(corresponding_residues, np.number):\n # Object not iterable, i.e. we only have one residue per features\n corresponding_residues = [corresponding_residues]\n for res_seq in corresponding_residues:\n r_idx = res_id_to_index[res_seq]\n _importance_mapped_to_resids[r_idx, :] += rel\n _std_importance_mapped_to_resids[r_idx, :] += self.std_feature_importances[feature_idx, :] ** 2\n _std_importance_mapped_to_resids = np.sqrt(_std_importance_mapped_to_resids)\n self._importance_mapped_to_resids = _importance_mapped_to_resids\n self._std_importance_mapped_to_resids = _std_importance_mapped_to_resids\n\n def _compute_importance_per_residue(self):\n\n importance_per_residue = self._importance_mapped_to_resids.mean(axis=1)\n std_importance_per_residue = np.sqrt(np.mean(self._std_importance_mapped_to_resids ** 2, axis=1))\n\n if self.rescale_results:\n # Adds a second axis to feed to utils.rescale_feature_importance\n importance_per_residue = importance_per_residue.reshape((importance_per_residue.shape[0], 1))\n std_importance_per_residue = std_importance_per_residue.reshape((std_importance_per_residue.shape[0], 1))\n importance_per_residue, std_importance_per_residue = utils.rescale_feature_importance(\n importance_per_residue, std_importance_per_residue)\n importance_per_residue = importance_per_residue[:, 0]\n std_importance_per_residue = std_importance_per_residue[:, 0]\n\n self.importance_per_residue = importance_per_residue\n self.std_importance_per_residue = std_importance_per_residue\n\n def _compute_importance_per_residue_and_cluster(self):\n if self.rescale_results:\n self._importance_mapped_to_resids, self._std_importance_mapped_to_resids = utils.rescale_feature_importance(\n self._importance_mapped_to_resids, self._std_importance_mapped_to_resids)\n\n self.importance_per_residue_and_cluster = self._importance_mapped_to_resids\n self.std_importance_per_residue_and_cluster = self._std_importance_mapped_to_resids\n\n def _compute_average_std(self):\n \"\"\"\n Computes average standard deviation\n \"\"\"\n self.average_std = self.std_importance_per_residue.mean()\n\n return self\n\n def _compute_projection_classification_entropy(self):\n \"\"\"\n Computes separation of clusters in the projected space given by the feature importances\n \"\"\"\n if self.extractor.labels is None:\n logger.warning(\"Cannot compute projection classification entropy without labels\")\n return\n if self.extractor.mixed_classes:\n logger.warning(\n \"Cannot compute projection classification entropy for dataset where not all frames belong to a unique cluster/state.\")\n return\n\n self.data_projector = dp.DataProjector(self.extractor.samples, self.extractor.labels)\n\n if self.supervised:\n self.data_projector.project(self.feature_importances).score_projection(use_GMM=self.use_GMM_estimator)\n else:\n self.data_projector.project(self.feature_importances)\n self.data_projector.separation_score = np.nan\n # self.separation_score = np.array([self.data_projector.separation_score])\n self.separation_score = self.data_projector.separation_score\n return self\n\n def compute_accuracy(self):\n \"\"\"\n Computes accuracy with an normalized MSE based metric\n \"\"\"\n if self.predefined_relevant_residues is None:\n logger.warn(\"Cannot compute accuracy without predefined relevant residues\")\n return\n relevant_residues_all_clusters = [y for x in self.predefined_relevant_residues for y in x]\n if self.accuracy_method == 'mse':\n self.accuracy = utils.compute_mse_accuracy(self.importance_per_residue,\n relevant_residues=relevant_residues_all_clusters)\n elif self.accuracy_method == 'relevant_fraction':\n self.accuracy = utils.compute_relevant_fraction_accuracy(self.importance_per_residue,\n relevant_residues=relevant_residues_all_clusters)\n else:\n raise Exception(\"Invalid accuracy method {}\".format(self.accuracy_method))\n if self.supervised:\n self.accuracy_per_cluster = 0\n for i in range(self.nclusters):\n self.accuracy_per_cluster += utils.compute_mse_accuracy(self.importance_per_residue_and_cluster[:, i],\n relevant_residues=\n self.predefined_relevant_residues[i])\n self.accuracy_per_cluster /= self.nclusters\n\n def _map_to_correct_residues(self, importance_per_residue):\n \"\"\"\n Maps importances to correct residue numbers\n \"\"\"\n residue_to_importance = {}\n index_to_resid = self.get_index_to_resid()\n for idx, rel in enumerate(importance_per_residue):\n resSeq = index_to_resid[idx]\n residue_to_importance[resSeq] = rel\n\n return residue_to_importance\n\n def _save_to_pdb(self, pdb, out_file, residue_to_importance):\n \"\"\"\n Saves importances into beta column of pdb file\n \"\"\"\n atom = pdb.df['ATOM']\n missing_residues = []\n for i, line in atom.iterrows():\n resSeq = int(line['residue_number'])\n importance = residue_to_importance.get(resSeq, None)\n if importance is None:\n missing_residues.append(resSeq)\n importance = 0\n atom.at[i, 'b_factor'] = importance\n if len(missing_residues) > 0:\n logger.debug(\"importance is None for residues %s\", [r for r in sorted(set(missing_residues))])\n pdb.to_pdb(path=out_file, records=None, gz=False, append_newline=True)\n\n return self\n\n def get_index_to_resid(self):\n return np.unique(np.asarray(self.feature_to_resids.flatten()))\n\n\nclass PerFrameImportancePostProcessor(PostProcessor):\n\n def __init__(self,\n per_frame_importance_outfile=None,\n frame_importances=None,\n **kwargs):\n PostProcessor.__init__(self, **kwargs)\n self.per_frame_importance_outfile = per_frame_importance_outfile\n self.frame_importances = frame_importances\n\n def persist(self):\n PostProcessor.persist(self)\n if self.per_frame_importance_outfile is not None and \\\n self.frame_importances is not None and self.pdb_file != None:\n with open(self.per_frame_importance_outfile, 'w') as of:\n logger.info(\"Writing per frame importance to file %s\", self.per_frame_importance_outfile)\n self.to_vmd_file(of)\n\n def to_vmd_file(self, of):\n import mdtraj as md\n \"\"\"\n writing VMD script, see https://www.ks.uiuc.edu/Research/vmd/mailing_list/vmd-l/5001.html\n :return:\n \"\"\"\n if self.pdb_file is None:\n raise Exception(\"PDB file required to write per frame importance\")\n\n # Map the feature to atoms for better performance\n top = md.load(self.pdb_file).top\n feature_to_atoms = []\n residue_to_atoms = {}\n for feature_idx, [res1, res2] in enumerate(self.feature_to_resids):\n atoms1 = residue_to_atoms.get(res1, None)\n if atoms1 is None:\n atoms1 = top.select(\"protein and resSeq {}\".format(res1))\n residue_to_atoms[res1] = atoms1\n atoms2 = residue_to_atoms.get(res2, None)\n if atoms2 is None:\n atoms2 = top.select(\"protein and resSeq {}\".format(res2))\n residue_to_atoms[res2] = atoms2\n feature_to_atoms.append(np.append(atoms1, atoms2))\n ##write to file in minibatches\n for frame_idx, importance in enumerate(self.frame_importances):\n # First normalize importance over features (not same as below)\n importance = (importance - importance.min()) / (importance.max() - importance.min() + 1e-6)\n # map importance to atom idx\n atom_to_importance = np.zeros((top.n_atoms))\n for feature_idx, atoms in enumerate(feature_to_atoms):\n fi = importance[feature_idx]\n for a in atoms:\n atom_to_importance[a] += fi\n # Normalize to values between 0 and 1\n atom_to_importance = (atom_to_importance - atom_to_importance.min()) / \\\n (atom_to_importance.max() - atom_to_importance.min() + 1e-6)\n # Go through atoms in sequential order\n lines = [\"#Frame {}\\n\".format(frame_idx)] + [\"{}\\n\".format(ai) for ai in atom_to_importance]\n of.writelines(lines)\n" ]
[ [ "sklearn.neural_network.MLPClassifier", "numpy.min", "numpy.max", "numpy.zeros", "sklearn.neural_network.MLPRegressor" ], [ "numpy.sqrt", "numpy.save", "numpy.append", "numpy.mean", "numpy.load", "numpy.array", "numpy.zeros", "numpy.where", "numpy.empty" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
JayWu7/Machine-Learning-Courses-Study-Record
[ "7586c3429514bc21c7cfe42f85ca8c0fcf8f072b", "7586c3429514bc21c7cfe42f85ca8c0fcf8f072b", "7586c3429514bc21c7cfe42f85ca8c0fcf8f072b" ]
[ "Algorithmic Methods of Data Mining/Final_project/graph_partitioning1.py", "Artificial Intelligence/r08exercise/u1b1.py", "Deep Learning/convolutional_nn.py" ]
[ "import numpy as np\nfrom sklearn.cluster import KMeans\nimport time\nfrom scipy.sparse.linalg import eigs\nfrom scipy.sparse import csr_matrix\n\n\nclass Graph:\n\n def __init__(self, data_name):\n self.filename = data_name\n self.n = None\n self.k = None\n self.edges = self.form_graph()\n # self.e = None # number of edges\n self.adj = None # adjacency list\n self.lap = None\n self.U = None\n self.labels = None\n\n def form_graph(self):\n '''\n form a graph from the .txt file\n :param file: data file\n :return: graph, in the shape used latter\n n, k\n '''\n with open('./data/{}'.format(self.filename), 'r') as f:\n first_line = f.readline()[:-1] # remove '\\n' at the end\n meta = first_line.split(' ')\n yield int(meta[2]), int(meta[-1])\n\n for i, edge in enumerate(f.readlines()):\n s, t = edge[:-1].split(' ')\n yield int(s), int(t)\n\n def generate_adj(self):\n '''\n generate the adjacency matrix of a graph\n :param graph: the edges of a graph\n :param n: the number of vertices in this graph\n :return: adjacency matrix\n '''\n a = time.time()\n self.n, self.k = next(self.edges)\n adj = [set() for _ in range(self.n)]\n for s, t in self.edges:\n adj[s].add(t)\n adj[t].add(s)\n b = time.time()\n print('Generate adjacency matrix cost: {}s'.format(b-a))\n return adj\n\n def generate_lap(self):\n '''\n From adjacency matrix and diagonal matrix build Laplacian matrix\n :param dia: diagonal matrix\n :param adj: adjacency matrix\n :return: Laplacian matrix\n '''\n a = time.time()\n self.lap = np.ndarray((self.n, self.n))\n for i, row in enumerate(self.adj):\n row_dia = np.zeros(self.n)\n row_dia[i] = len(row)\n row_adj = [1 if j in row else 0 for j in range(self.n)]\n self.lap[i] = row_dia - row_adj\n x = np.linalg.norm(self.lap)\n self.lap = self.lap / x\n b = time.time()\n print('Genearte Laplacian matrix cost: {}s'.format(b-a))\n\n def get_U(self):\n '''\n Using scipy.sparse.linalg.eigs to calculate matrix U that we need for kmeans algorithm\n :param lap: laplacian matrix\n :param k: a number\n :return: matrix U\n '''\n s = time.time()\n self.lap = csr_matrix(self.lap)\n _, first_k = eigs(self.lap, self.k, sigma=0)\n U = first_k.real\n # normalize U\n x = np.linalg.norm(U)\n U = U / x\n t = time.time()\n print('Generate U cost: {}s'.format(t - s))\n return U\n\n def k_means(self):\n '''\n Using K-means algorithm to cluster the data\n :param data: n points\n :param k: number of clusters\n :return: clusters\n '''\n s = time.time()\n kmeans = KMeans(n_clusters=self.k, algorithm='auto')\n kmeans.fit(self.U)\n t = time.time()\n print('Run k-means algorithm cost: {}s'.format(t - s))\n return kmeans.labels_\n\n def write_clusters(self):\n '''\n return the clusters of vertices\n :param labels: labels generated from kmeans method\n :return: clusters\n '''\n with open('./result/{}_res.txt'.format(self.filename[:-4]), 'w') as f:\n for i, l in enumerate(self.labels):\n f.write('{} {}\\n'.format(i, l))\n\n def main(self):\n self.adj = self.generate_adj()\n self.generate_lap()\n self.U = self.get_U()\n self.labels = self.k_means()\n self.write_clusters()\n\n\nif __name__ == '__main__':\n graph = Graph('soc-Epinions1.txt')\n graph.main()\n\n", "import numpy as np\n\na = [1, 6, 4, 2, 6, 4, 1]\nb = [1, 5, 9]\nc = [6, 4, 1]\n\n# a = 3,3,3,2,2,2,1,1,1\n# b = 3,3,2,2\n# c = [4]\n\ndata = [a, b, c]\n\n\ndef u1b1(data):\n n = len(data)\n index_n = n\n values = [d[0] + np.sqrt(2 * np.log(n)) for d in data]\n compare_values = [[values[i], i] for i in range(n)]\n already_calculate = [[data[i][0]] for i in range(n)]\n lengths = [len(d) for d in data]\n maxn = sum(lengths)\n data = [d[1:] for d in data]\n\n while n < maxn:\n n += 1\n i = max(compare_values)[1]\n for k in range(index_n):\n if i == k:\n already_calculate[k].append(data[k][0])\n data[k] = data[k][1:]\n if not data[k]:\n compare_values[k][0] = -float('inf')\n values[k] = np.mean(already_calculate[k]) + np.sqrt(2*np.log(n) / len(already_calculate[k]))\n if compare_values[k][0] != -float('inf'):\n compare_values[k][0] = values[k]\n\n print(values)\n\nu1b1(data)\n\n\n\n", "import torch\nimport torch.nn as nn\nimport torchvision\nimport torchvision.transforms as transforms\n\ndevice = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n\nnum_epochs = 5\nnum_classes = 10\nbatch_size = 100\nlearning_rate = 0.001\n\ntrain_dataset = torchvision.datasets.MNIST(\n root='./data/',\n train=True,\n transform=transforms.ToTensor(),\n download=True\n)\n\ntest_dataset = torchvision.datasets.MNIST(\n root='./data/',\n train=False,\n transform=transforms.ToTensor()\n)\n\ntrain_loader = torch.utils.data.DataLoader(\n dataset=train_dataset,\n batch_size=batch_size,\n shuffle=True\n)\n\ntest_loader = torch.utils.data.DataLoader(\n dataset=test_dataset,\n batch_size=batch_size,\n shuffle=False\n)\n\nclass ConvNet(nn.Module):\n def __init__(self, num_classes=10):\n super(ConvNet, self).__init__()\n self.layer1 = nn.Sequential(\n nn.Conv2d(1, 16, kernel_size=5, stride=1, padding=2),\n nn.BatchNorm2d(16),\n nn.ReLU(),\n nn.MaxPool2d(kernel_size=2, stride=2)\n )\n\n self.layer2 = nn.Sequential(\n nn.Conv2d(16, 32, kernel_size=5, stride=1, padding=2),\n nn.BatchNorm2d(32),\n nn.ReLU(),\n nn.MaxPool2d(kernel_size=2, stride=2)\n )\n\n self.fc = nn.Linear(7 * 7 * 32, num_classes)\n\n def forward(self, x):\n out = self.layer1(x)\n out = self.layer2(out)\n out = out.view(out.size(0), -1) # this line is nice\n out = self.fc(out)\n\n return out\n\n\nmodel = ConvNet(num_classes).to(device)\n\ncriterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n\ntotal_step = len(train_loader)\n\nfor epoch in range(num_epochs):\n for i, (images, labels) in enumerate(train_loader):\n images = images.to(device)\n labels = labels.to(device)\n\n output = model.forward(images)\n loss = criterion(output, labels)\n\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n\n if (i + 1) % 100 == 0:\n print('Epoch {}/{}, step {}/{}, Loss: {:.4f}'.format(epoch+1, num_epochs, i + 1, total_step, loss.item()))\n\nmodel.eval()\nwith torch.no_grad():\n correct = 0\n total = 0\n for images, labels in test_loader:\n images = images.to(device)\n labels = labels.to(device)\n outputs = model(images)\n _, predicted = torch.max(outputs.data, 1)\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n\n print('Test Accuracy of the model on the 10000 test images: {} %'.format(100 * correct / total))\n\n\n\n" ]
[ [ "sklearn.cluster.KMeans", "numpy.linalg.norm", "numpy.ndarray", "scipy.sparse.csr_matrix", "scipy.sparse.linalg.eigs", "numpy.zeros" ], [ "numpy.log", "numpy.mean" ], [ "torch.nn.CrossEntropyLoss", "torch.max", "torch.nn.Conv2d", "torch.utils.data.DataLoader", "torch.nn.Linear", "torch.nn.MaxPool2d", "torch.no_grad", "torch.cuda.is_available", "torch.nn.BatchNorm2d", "torch.nn.ReLU" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "1.3", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "0.16", "1.8" ], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
webclinic017/time-series-pipeline
[ "5ac418b91e395a48cba397f95d25d221adfff9bd", "5ac418b91e395a48cba397f95d25d221adfff9bd" ]
[ "EOD_api/test_EOD_api.py", "EOD_api/EOD_api.py" ]
[ "import os\nimport re\nimport datetime\nimport unittest\nfrom io import StringIO\nfrom unittest.mock import patch\n\nimport pandas as pd\n\nimport EOD_api as eod\n\nTOKEN = os.environ[\"EOD_TOKEN\"]\n\n\ndef date_parser(string):\n date_pattern = re.compile(\"([0-9]{4}-[0-9]{2}-[0-9]{2})[ ]\", re.VERBOSE)\n return date_pattern.sub(r\"\\1T\", string)\n\n\nclass TestGetEod(unittest.TestCase):\n # @classmethod\n # def setUp(cls):\n # pass\n # def tearDown(cls):\n # pass\n\n def test_idempotent__addtickers(self):\n d1 = eod.OhlcvIntraday(\n [\"AAPL.US\"], TOKEN, \"2020-10-13\", \"2020-10-17\", intraday_frec=\"5m\"\n ).add_tickers([\"MSFT.US\"])\n d2 = (\n eod.OhlcvIntraday(\n [\"AAPL.US\"], TOKEN, \"2020-10-13\", \"2020-10-17\", intraday_frec=\"5m\"\n )\n .add_tickers([\"MSFT.US\"])\n .add_tickers([\"MSFT.US\"])\n )\n self.assertEqual(d1, d2)\n\n def test_idempotent_truncate_dates(self):\n d1 = eod.Fundamental(\n [\"AAPL.US\"], TOKEN, \"2020-10-13\", \"2020-10-17\"\n ).truncate_dates(\"2020-10-14\", \"2020-10-16\")\n d2 = (\n eod.Fundamental([\"AAPL.US\"], TOKEN, \"2020-10-13\", \"2020-10-17\")\n .truncate_dates(\"2020-10-14\", \"2020-10-16\")\n .truncate_dates(\"2020-10-14\", \"2020-10-16\")\n )\n self.assertEqual(d1, d2)\n\n def test_idempotent_remove_tickers(self):\n d1 = eod.Fundamental(\n [\"AAPL.US\", \"MSFT.US\"], TOKEN, \"2020-10-13\", \"2020-10-17\"\n ).remove_tickers([\"MSFT.US\"])\n d2 = (\n eod.Fundamental([\"AAPL.US\", \"MSFT.US\"], TOKEN, \"2020-10-13\", \"2020-10-17\")\n .remove_tickers([\"MSFT.US\"])\n .remove_tickers([\"MSFT.US\"])\n )\n self.assertEqual(d1, d2)\n\n def test_add_remove(self):\n d1 = eod.OhlcvIntraday([\"AAPL.US\"], TOKEN, \"2020-10-13\", \"2020-10-17\", \"1m\")\n d2 = (\n eod.OhlcvIntraday([\"AAPL.US\"], TOKEN, \"2020-10-13\", \"2020-10-17\", \"1m\")\n .add_tickers([\"MSFT.US\"])\n .remove_tickers([\"MSFT.US\"])\n )\n self.assertEqual(d1, d2)\n\n def test_remove_all_tickers(self):\n with self.assertRaises(Exception):\n eod.Ohlcv([\"AAPL.US\"], TOKEN, \"2020-10-13\", \"2020-10-17\").remove_tickers(\n [\"AAPL.US\"]\n ).retrieve_data()\n\n def test_misspelled_input(self):\n with self.assertRaises(Exception):\n eod.OhlcvIntraday(\n [\"AAPL.US\"], TOKEN, \"2020-10-13\", \"2020-10-17\", intraday_frec=\"Daoly\"\n )\n\n def test_ohlcv_data_format_hasnt_changed(\n self,\n ): # Cambiar de antes de formatting a después de formatting\n expected_aapl = pd.read_csv(\n StringIO(\n \"\"\"\n Date Open High Low Close Adjusted_close Volume\n 2020-10-13 125.27 125.390 119.65 121.10 120.7110 262330500.0\n 2020-10-14 121.00 123.030 119.62 121.19 120.8008 151062297.0\n 2020-10-15 118.72 121.200 118.15 120.71 120.3223 112559203.0\n 2020-10-16 121.28 121.548 118.81 119.02 118.6377 115393797.0\n 275 NaN NaN NaN NaN NaN NaN\n \"\"\"\n ),\n sep=\"\\\\s+\",\n )\n\n url = \"https://eodhistoricaldata.com/api/eod/AAPL.US?api_token={}&from=2020-10-13&to=2020-10-17&period=d\".format(\n TOKEN\n )\n actual = pd.read_csv(\n url,\n usecols=[\n \"Date\",\n \"Volume\",\n \"Open\",\n \"Close\",\n \"High\",\n \"Low\",\n \"Adjusted_close\",\n ],\n )\n with patch.object(pd, \"read_csv\") as mock_read:\n mock_read.autospec = True\n mock_read.return_value = expected_aapl\n expected = pd.read_csv(\n url,\n usecols=[\n \"Date\",\n \"Volume\",\n \"Open\",\n \"Close\",\n \"High\",\n \"Low\",\n \"Adjusted_close\",\n ],\n )\n pd.testing.assert_frame_equal(actual, expected, rtol=5e-3)\n\n def test_index_formatting(self):\n expected_aapl = pd.read_csv(\n StringIO(\n \"\"\"\n Date Open High Low Close Adjusted_close Volume\n 2020-10-13 125.27 125.390 119.65 121.10 120.7110 262330500.0\n 2020-10-14 121.00 123.030 119.62 121.19 120.8008 151062297.0\n 2020-10-15 118.72 121.200 118.15 120.71 120.3223 112559203.0\n 2020-10-16 121.28 121.548 118.81 119.02 118.6377 115393797.0\n 275 NaN NaN NaN NaN NaN NaN\n \"\"\"\n ),\n sep=\"\\\\s+\",\n )\n expected_aapl_formatted = pd.read_csv(\n StringIO(\n date_parser(\n \"\"\"\n Stock Date Open High Low Close Adjusted_close Volume \n AAPL.US 2020-10-13 00:00:00+00:00 125.27 125.390 119.65 121.10 120.7110 262330500.0\n AAPL.US 2020-10-14 00:00:00+00:00 121.00 123.030 119.62 121.19 120.8008 151062297.0\n AAPL.US 2020-10-15 00:00:00+00:00 118.72 121.200 118.15 120.71 120.3223 112559203.0\n AAPL.US 2020-10-16 00:00:00+00:00 121.28 121.548 118.81 119.02 118.6377 115393797.0\n \"\"\"\n )\n ),\n sep=\"\\\\s+\",\n index_col=[0, 1],\n converters={\"Date\": lambda col: datetime.datetime.fromisoformat(col)},\n )\n\n with patch.object(pd, \"read_csv\") as mock_read:\n mock_read.autospec = True\n mock_read.return_value = expected_aapl\n formatted_mock = eod.Ohlcv(\n [\"AAPL.US\"], TOKEN, \"2020-10-13\", \"2020-10-17\"\n ).retrieve_data()\n pd.testing.assert_frame_equal(\n formatted_mock, expected_aapl_formatted, rtol=5e-3\n )\n\n\n# TODO? Write more tests:\n# Check that the data is concated/merged/joined properly, particularly when the indexes come with Nans\n# Check except clauses\n# Check duplicate df values\n# Assert errors with wrong args\n# etc\n\n# expected_ohlcv_concatted = pd.read_csv( StringIO( date_parser( \"\"\"\n# Stock Date Gmtoffset Datetime Open High Low Close Volume Returns\n# BP.LSE 2020-10-13 00:00:00+00:00 NaN NaN NaN NaN NaN NaN NaN NaN\n# BP.LSE 2020-10-14 00:00:00+00:00 0.0 2020-10-13 15:25:00 213.649993 214.000000 213.550003 213.856994 1210380.0 -0.001601\n# BP.LSE 2020-10-15 00:00:00+00:00 0.0 2020-10-14 15:25:00 213.000000 213.149993 212.600006 212.649993 1182246.0 0.019660\n# BP.LSE 2020-10-16 00:00:00+00:00 0.0 2020-10-15 15:25:00 207.149993 207.199996 206.500000 206.850006 1626720.0 -0.013826\n# AAPL.US 2020-10-13 00:00:00+00:00 NaN NaN NaN NaN NaN NaN NaN NaN\n# AAPL.US 2020-10-14 00:00:00+00:00 0.0 2020-10-13 19:55:00 121.139999 121.279998 121.029998 121.050003 4585723.0 0.003648\n# AAPL.US 2020-10-15 00:00:00+00:00 0.0 2020-10-14 19:55:00 121.580001 121.709999 121.139999 121.180000 3420583.0 0.015419\n# AAPL.US 2020-10-16 00:00:00+00:00 0.0 2020-10-15 19:55:00 120.790000 120.849998 120.580001 120.699996 3436603.0 -0.003550\n# MSFT.US 2020-10-13 00:00:00+00:00 NaN NaN NaN NaN NaN NaN NaN NaN\n# MSFT.US 2020-10-14 00:00:00+00:00 0.0 2020-10-13 19:55:00 223.320007 223.389999 222.750000 222.830001 1457493.0 0.000651\n# MSFT.US 2020-10-15 00:00:00+00:00 0.0 2020-10-14 19:55:00 221.199996 221.414993 220.600006 220.759994 1122912.0 0.012377\n# MSFT.US 2020-10-16 00:00:00+00:00 0.0 2020-10-15 19:55:00 219.639999 219.880004 219.490005 219.660003 1201342.0 -0.003900\n# \"\"\" ) ), sep=\"\\\\s+\", index_col=[0,1,2], converters = {'Date' : lambda col: datetime.datetime.fromisoformat( col ) \\\n# , 'Datetime' : lambda col: pd.to_datetime(col, format='%Y-%m-%dT%H:%M:%S', utc=True) } )\n\nif __name__ == \"__main__\":\n unittest.main()\n", "import json\nimport concurrent.futures\nfrom datetime import timedelta\nfrom datetime import datetime as dt\nfrom abc import ABCMeta, abstractmethod\nfrom inspect import getcallargs\n\nimport pandas as pd\n\npd.options.mode.chained_assignment = \"raise\"\n\n\ndef assert_arguments_in(args_to_check, allowed_values):\n def inner(f):\n def wrapper(*args, **kwargs):\n arguments = getcallargs(f, *args, **kwargs)\n for arg, values in zip(args_to_check, allowed_values):\n try:\n assert arguments[arg] in values\n except:\n raise ValueError(\n \"{.__name__}'s '{}' argument must be one of: {}\".format(\n f, arg, \", \".join(str(v) for v in values)\n )\n )\n rv = f(*args, **kwargs)\n return rv\n\n return wrapper\n\n return inner\n\n\nclass EodData(metaclass=ABCMeta):\n # Base class with methods common to the subclasses used to download data\n def __init__(self, tickers: list, token: str, start: str, end: str):\n self._tickers = set(tickers)\n self._token = token\n self._start = start # String to place into the url\n self._end = end # String to place into the url\n # The subclass' constructor/init is meant have this line: self.__df = self._download_data( self._tickers )\n\n @abstractmethod\n def _download_data(self, tickers: list) -> pd.DataFrame:\n pass\n\n def __eq__(self, comparison):\n return (\n self._tickers == comparison._tickers\n and self._token == comparison._token\n and self._start == comparison._start\n and self._end == comparison._end\n and self._df.equals(comparison._df)\n )\n\n def retrieve_data(self):\n # Returns the data. I formatted an index with 2 columns:\n # A 'Date' column with dates converted to UTC using pd.to_datetime()\n # A 'Stock' column with the tickers\n try:\n assert self._tickers != set()\n except:\n raise ValueError(\"Add at least 1 ticker\")\n return self._df.sort_values([\"Stock\", \"Date\"]).set_index([\"Stock\", \"Date\"])\n\n def add_tickers(self, added_tickers):\n added_tickers = set(added_tickers) - self._tickers\n self._tickers = self._tickers.union(added_tickers)\n if added_tickers != set():\n self._df = pd.concat([self._df, self._download_data(added_tickers)])\n return self\n\n def remove_tickers(self, removed_tickers):\n removed_tickers = set(removed_tickers).intersection(self._tickers)\n self._tickers = self._tickers - removed_tickers\n self._df = self._df[~self._df[\"Stock\"].isin(removed_tickers)]\n return self\n\n def truncate_dates(self, start, end):\n try:\n assert pd.to_datetime(start, utc=True) >= pd.to_datetime(\n self._start, utc=True\n ) and pd.to_datetime(end, utc=True) <= pd.to_datetime(self._end, utc=True)\n except:\n raise ValueError(\"The given dates are outside the current interval\")\n self._start = pd.to_datetime(start, utc=True)\n self._end = pd.to_datetime(end, utc=True)\n self._df = (\n self._df.set_index(\"Date\", drop=False)\n .groupby(by=\"Stock\")\n .apply(lambda _df: _df.truncate(before=self._start, after=self._end))\n .reset_index(drop=True)\n ) # Cambiar el ultimo por inplace = True\n return self\n\n def _multithread_download_and_concat(self, tickers, single_thread_function):\n with concurrent.futures.ThreadPoolExecutor() as executor:\n futures = [\n executor.submit(single_thread_function, ticker) for ticker in tickers\n ]\n futures = [f.result() for f in futures if not f.result().empty]\n if len(futures) > 1:\n df = pd.concat(futures)\n elif len(futures) == 1:\n df = futures[0]\n else:\n df = pd.DataFrame(columns=[\"Date\", \"Stock\"])\n return df\n\n\nclass Ohlcv(EodData):\n def __init__(self, tickers, token, start, end):\n super().__init__(tickers, token, start, end)\n self._df = self._download_data(self._tickers)\n\n def _download_data(self, tickers):\n def historical_one_ticker(ticker):\n url = \"https://eodhistoricaldata.com/api/eod/{}?from={}&to={}&api_token={}&period={}\".format(\n ticker, self._start, self._end, self._token, \"d\"\n )\n try:\n df = pd.read_csv(\n url,\n usecols=[\n \"Date\",\n \"Volume\",\n \"Open\",\n \"Close\",\n \"High\",\n \"Low\",\n \"Adjusted_close\",\n ],\n )\n except:\n print(\"Failed to download ohlcv data for {}\".format(ticker))\n return pd.DataFrame()\n else:\n if df.empty:\n print(\"No ohlcv data for {}\".format(ticker))\n return pd.DataFrame()\n df.loc[:, \"Date\"] = pd.to_datetime(\n df[\"Date\"], errors=\"coerce\", utc=True\n )\n df = df.copy().dropna(subset=[\"Date\"])\n df.loc[:, \"Stock\"] = ticker\n return df\n\n df = self._multithread_download_and_concat(tickers, historical_one_ticker)\n return df\n\n\nclass Fundamental(EodData):\n def __init__(self, tickers, token, start, end):\n super().__init__(tickers, token, start, end)\n self._df = self._download_data(self._tickers)\n\n def _download_data(self, tickers):\n # As of 4/2021 the balanceSheet, cashFlow, and incmStatement from\n # 'https://eodhistoricaldata.com/api/fundamentals/{}?from={}&to={}&api_token={}&filter=Financials'\n # come with a column called 'filing_date', but if you download earnings report dates from\n # 'https://eodhistoricaldata.com/api/calendar/earnings?api_token={}&symbols={}&fmt=csv&from={}&to={}' you get a 'report_date' colummn that\n # dates 1 or a few days before the 'filing_date' column. I believe, by estimatig price volatility on those days with intraday data,\n # that the 'filing_date' is not the date the reports where realeased, but the 'report_date' is.\n # This is important for modeling and backtesting for price forecasting, so below i substitute the 'filing_date' column with the 'report_date' column.\n def earning_reports_dates(tickers):\n tickers_url = \",\".join(list(tickers))\n url = \"https://eodhistoricaldata.com/api/calendar/earnings?api_token={}&symbols={}&fmt=csv&from={}&to={}\".format(\n self._token, tickers_url, self._start, self._end\n )\n index_df = pd.read_csv(url, usecols=[\"Code\", \"Report_Date\", \"Date\"])\n if index_df.empty:\n # If there aren't any earning report dates in the given interval because it is too small, fetch dates starting 6 months earlier\n start_6months_earlier = str(\n pd.to_datetime(self._start) - pd.DateOffset(months=6)\n ).split(\" \")[0]\n url = \"https://eodhistoricaldata.com/api/calendar/earnings?api_token={}&symbols={}&fmt=csv&from={}&to={}\".format(\n self._token, tickers_url, start_6months_earlier, self._end\n )\n index_df = pd.read_csv(url, usecols=[\"Code\", \"Report_Date\", \"Date\"])\n index_df[[\"Report_Date\", \"Date\"]] = index_df[[\"Report_Date\", \"Date\"]].apply(\n pd.to_datetime, errors=\"coerce\", utc=True, infer_datetime_format=True\n )\n index_df = index_df.copy().dropna(subset=[\"Report_Date\", \"Date\"])\n index_df.rename(\n columns={\n \"Date\": \"Period_beginning\",\n \"Report_Date\": \"Date\",\n \"Code\": \"Stock\",\n },\n inplace=True,\n )\n return index_df\n\n def fundamental_one_ticker(ticker):\n url = \"https://eodhistoricaldata.com/api/fundamentals/{}?from={}&to={}&api_token={}&filter=Financials\".format(\n ticker, self._start, self._end, self._token\n )\n try:\n df = pd.read_json(url).drop([\"currency_symbol\", \"yearly\"], axis=0)\n json_struct = json.loads(df.to_json(orient=\"split\"))\n df = pd.json_normalize(json_struct)\n balanceSheet = pd.DataFrame.from_dict(df[\"data\"][0][0][0]).T\n cashFlow = pd.DataFrame.from_dict(df[\"data\"][0][0][1]).T\n incmStatement = pd.DataFrame.from_dict(df[\"data\"][0][0][2]).T\n assert (\n balanceSheet.empty == False\n and cashFlow.empty == False\n and incmStatement.empty == False\n )\n except:\n print(\"Failed download fundamental data for {}\".format(ticker))\n return pd.DataFrame()\n else:\n if df.empty:\n print(\"No fundamental data for {}\".format(ticker))\n return pd.DataFrame()\n df = (\n balanceSheet.join(cashFlow, how=\"outer\", lsuffix=\"_DROP\")\n .filter(regex=\"^(?!.*_DROP)\")\n .join(incmStatement, how=\"left\", lsuffix=\"_DROP\")\n .filter(regex=\"^(?!.*_DROP)\")\n )\n df[\"Stock\"] = ticker\n df[\"date\"] = pd.to_datetime(\n df[\"date\"], errors=\"coerce\", utc=True, infer_datetime_format=True\n )\n df = df.copy().dropna(subset=[\"date\"])\n return df\n\n index_df = earning_reports_dates(tickers)\n df = self._multithread_download_and_concat(tickers, fundamental_one_ticker)\n df = df.filter(regex=\"^(?!filing_date)\")\n reindexed_df = index_df.merge(\n df,\n left_on=[\"Stock\", \"Period_beginning\"],\n right_on=[\"Stock\", \"date\"],\n how=\"left\",\n ) # The 'Report_Date' column renamed to 'Date' in earning_report_dates() becomes the new 'Date' column for the 'reindexed_df' variable\n reindexed_df.drop(\"date\", axis=1, inplace=True)\n return reindexed_df\n\n\nclass OhlcvIntraday(EodData):\n @assert_arguments_in([\"intraday_frec\"], [[\"1m\", \"5m\"]])\n def __init__(self, tickers, token, start, end, intraday_frec):\n super().__init__(tickers, token, start, end)\n self.__frec = intraday_frec\n self._df = self._download_data(self._tickers)\n\n def _download_data(self, tickers):\n def intraday_one_ticker(ticker):\n def intraday_one_ticker_100_days(start, end):\n start = str(start.timestamp())\n end = str(end.timestamp())\n url = \"https://eodhistoricaldata.com/api/intraday/{}?api_token={}&fmt=csv&from={}&to={}&interval={}\".format(\n ticker, token, start, end, self.__frec\n )\n try:\n df = pd.read_csv(\n url,\n usecols=[\n \"Timestamp\",\n \"Gmtoffset\",\n \"Datetime\",\n \"Open\",\n \"High\",\n \"Low\",\n \"Close\",\n \"Volume\",\n ],\n ) # Gmtoffset comes in seconds, but as of 4/2021 comes only with value 0\n except:\n print(\n \"Failed to download intraday data for {} betwen {} and {}\".format(\n ticker,\n dt.fromtimestamp(int(float(start))),\n dt.fromtimestamp(int(float(end))),\n )\n )\n return pd.DataFrame()\n else:\n if df.empty:\n print(\n \"No intraday data for {} betwen {} and {}\".format(\n ticker,\n dt.fromtimestamp(int(float(start))),\n dt.fromtimestamp(int(float(end))),\n )\n )\n return pd.DataFrame()\n return df\n\n amount_days = (pd.to_datetime(self._end) - pd.to_datetime(self._start)).days\n start = pd.to_datetime(self._start, utc=True)\n end = pd.to_datetime(self._end, utc=True)\n token = self._token\n if (\n amount_days > 100\n ): # The data provider only allows to use their screener api to get up to 100days of intraday data per api call, so a for_loop and divmod are used in order to get +100 days.\n div, remainder = divmod(amount_days, 100)\n with concurrent.futures.ThreadPoolExecutor() as executor:\n futures = [\n executor.submit(\n intraday_one_ticker_100_days,\n start=start + timedelta(days=100 * i),\n end=start + timedelta(days=100 * (i + 1)),\n )\n for i in range(0, div)\n ]\n futures = [f.result() for f in futures if not f.result().empty]\n if remainder != 0:\n last_batch = intraday_one_ticker_100_days(\n start + timedelta(days=(amount_days - remainder)), end\n )\n futures.append(last_batch)\n if len(futures) > 1:\n df = pd.concat(futures)\n elif len(futures) == 1:\n df = futures[0]\n else:\n df = pd.DataFrame()\n else:\n df = intraday_one_ticker_100_days(start, end)\n if not df.empty:\n df.loc[:, \"Stock\"] = ticker\n df.rename(columns={\"Datetime\": \"Date\"}, inplace=True)\n df.loc[:, \"Date\"] = pd.to_datetime(\n df[\"Date\"], errors=\"coerce\", utc=True\n )\n return df\n\n df = self._multithread_download_and_concat(tickers, intraday_one_ticker)\n return df.dropna(subset=[\"Date\"])\n\n\ndef get_exchange_list(token):\n url = \"https://eodhistoricaldata.com/api/exchanges-list/?api_token={}\".format(token)\n df = pd.read_json(url)\n print(df)\n\n\ndef get_all_tickers_exchange(exchange, token):\n url = (\n \"https://eodhistoricaldata.com/api/exchange-symbol-list/{}?api_token={}\".format(\n exchange, token\n )\n )\n df = pd.read_csv(url)\n return df\n\n\ndef stock_screener(\n n_stocks, token, exchange, initial_offset=0, mincap=None, maxcap=None\n):\n # Finds stocks by marketcap from max to min\n # initial_offset : number of stocks to skip\n # More ways to filter stocks can be found at: https://eodhistoricaldata.com/financial-apis/stock-market-screener-api/\n def one_api_call(offset, limit):\n if (mincap is None) and (maxcap is None):\n url = 'https://eodhistoricaldata.com/api/screener?api_token={}&sort=market_capitalization.desc&limit={}&offset={}&filters=[[\"exchange\",\"=\",\"{}\"]]'.format(\n token, limit, offset, exchange\n )\n elif (mincap is not None) and (maxcap is None):\n url = 'https://eodhistoricaldata.com/api/screener?api_token={}&sort=market_capitalization.desc&limit={}&offset={}&filters=[[\"market_capitalization\",\">\",{}],[\"exchange\",\"=\",\"{}\"]]'.format(\n token, limit, offset, mincap, exchange\n )\n elif (mincap is None) and (maxcap is not None):\n url = 'https://eodhistoricaldata.com/api/screener?api_token={}&sort=market_capitalization.desc&limit={}&offset={}&filters=[[\"market_capitalization\",\"<\",{}],[\"exchange\",\"=\",\"{}\"]]'.format(\n token, limit, offset, maxcap, exchange\n )\n else:\n url = 'https://eodhistoricaldata.com/api/screener?api_token={}&sort=market_capitalization.desc&limit={}&offset={}&filters=[[\"market_capitalization\",\">\",{}],[\"market_capitalization\",\"<\",{}],[\"exchange\",\"=\",\"{}\"]]'.format(\n token, limit, offset, mincap, maxcap, exchange\n )\n\n df = pd.read_json(url)\n json_struct = json.loads(df.to_json(orient=\"records\"))\n df = pd.json_normalize(json_struct)\n if not df.empty:\n return df\n else:\n return pd.DataFrame()\n\n stocks = list()\n if (\n n_stocks > 100\n ): # The data provider only allows to use their screener api to get up to a hundred stocks per api call, so a for_loop and divmod are used in order to screen +100 stocks.\n div, remainder = divmod(n_stocks, 100)\n for i in range(0, div):\n batch = one_api_call(offset=initial_offset + 100 * i, limit=100)\n stocks.append(batch)\n if remainder != 0:\n last_batch = one_api_call(\n offset=initial_offset + (n_stocks - remainder), limit=remainder\n )\n stocks.append(last_batch)\n else:\n only_batch = one_api_call(offset=initial_offset, limit=n_stocks)\n stocks.append(only_batch)\n if len(stocks) > 1:\n stocks = pd.concat(stocks).reset_index(drop=True)\n elif len(stocks) == 1:\n stocks = stocks[0]\n stocks.columns = [col.replace(\"data.\", \"\") for col in stocks.columns]\n stocks.loc[:, \"code\"] = stocks[\"code\"] + \".\" + stocks[\"exchange\"]\n return stocks\n\n\n# TODO? Add:\n# technical indicators, options, live,\n# fundamentals: index, etfs, macro indicators, bonds, goverment bonds, cds, insider trading, etc\n# upcoming earning, ipos, splits\n# bulk data full exchange 1 day\n# financial news\n# live data\n# etc\n# TODO? Using requests.Session might increase performance\n" ]
[ [ "pandas.read_csv", "pandas.testing.assert_frame_equal" ], [ "pandas.concat", "pandas.read_csv", "pandas.to_datetime", "pandas.DateOffset", "pandas.json_normalize", "pandas.DataFrame", "pandas.read_json", "pandas.DataFrame.from_dict" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.3", "1.1", "1.5", "1.2" ], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "2.0" ], "scipy": [], "tensorflow": [] } ]
Leylasaadi/MACT20.21_Digital_tools_Big_Data_part_2
[ "94cafa0581ec36a305867ebfdcb91c787aa77a16" ]
[ "session4/e_animations_2axis.py" ]
[ "# encoding: utf-8\n\n##################################################\n# This script shows how to create animated plots using matplotlib and a basic dataset\n# Multiple tutorials inspired the current design but they mostly came from:\n# hhttps://towardsdatascience.com/how-to-create-animated-graphs-in-python-bb619cc2dec1\n# Note: the project keeps updating every course almost yearly\n##################################################\n#\n##################################################\n# Author: Diego Pajarito\n# Credits: [Institute for Advanced Architecture of Catalonia - IAAC, Advanced Architecture group]\n# License: Apache License Version 2.0\n# Version: 1.0.0\n# Maintainer: Diego Pajarito\n# Email: [email protected]\n# Status: development\n##################################################\n\nimport matplotlib\nimport matplotlib.animation as animation\nimport matplotlib.pyplot as plt\nimport numpy as np\n# We need to import numpy and matplotlib library\n# importing libraries\nimport pandas as pd\nimport seaborn as sns\n\n# Read files and prepare data\ndata = pd.read_csv('../data/2021_seguiment-covid19-bcn.csv')\n#data = pd.read_csv('https://opendata-ajuntament.barcelona.cat/data/dataset/4f3ffbda-d5be-4f2a-a836-26a77be6df1a/resource/f627ac0a-d05f-416d-9773-eeb464a3fc44/download')\ndata.columns = ['date_indicator', 'frequency_indicator', 'place', 'name_indicator',\n 'name_variable', 'value', 'unit', 'source']\n# We will use two datasets to generate plots\ndata_daily = data[data['name_indicator'] == 'Casos de COVID-19 a Barcelona (diari)']\ndata_accumulated = data[data['name_indicator'] == 'Casos de COVID-19 a Barcelona (acumulat)']\n\n# We need the data to be in time format to calculate values in days after day zero\ndata_daily.loc[:, 'date_indicator'] = pd.to_datetime(data_daily['date_indicator'])\ninitial_day = data_daily['date_indicator'].min()\ndata_daily.loc[:, 'day_after_zero'] = data_daily['date_indicator'] - initial_day\ndata_daily.loc[:, 'day_after_zero'] = data_daily['day_after_zero']/np.timedelta64(1, 'D')\n# We need the data to be in time format to calculate values in days after day zero\ndata_accumulated.loc[:, 'date_indicator'] = pd.to_datetime(data_accumulated['date_indicator'])\ndata_accumulated.loc[:, 'day_after_zero'] = data_accumulated['date_indicator'] - initial_day\ndata_accumulated.loc[:, 'day_after_zero'] = data_accumulated['day_after_zero']/np.timedelta64(1, 'D')\n\n# we also extract some values to set the plot limits\nmax_day = data_daily['day_after_zero'].max().astype(int)\nmax_cases_daily = data_daily['value'].max()\nmax_cases_accumulated = data_accumulated['value'].max()\ntitle = 'Barcelona: '\n\n# We then prepare the writer and animation file options\nWriter = animation.writers['ffmpeg']\nwriter = Writer(fps=20, metadata=dict(artist='MaCTResearcher'), bitrate=1800)\n# If error using anaconda try to install ffmpeg\n# conda install -c conda-forge ffmpeg\n\n# We create an initial plot with basic configuration a single line\nfig, ax1 = plt.subplots()\nfig.set_size_inches(10, 6)\nplt.title(title + 'Covid-19 cases', fontsize=18)\nplt.xlabel('Day after case 1', fontsize=14)\nplt.ylim(0, max_cases_accumulated)\nplt.ylabel('Accumulated', fontsize=18)\n\n# # now we configure the secondary axis\nax2 = ax1.twinx()\nplt.ylim(0, max_cases_daily*2)\ncases_ticks = np.arange(0, max_day, 50)\n\n\n# We need to set an animation function to handle individual behaviour per frame\n# variable \"i\" is the frame id that can be used to handle queries or filters for your data\ndef animate(i):\n frame_data_daily = data_daily[data_daily['day_after_zero'] <= i]\n frame_data_accumulated = data_accumulated[data_accumulated['day_after_zero'] <= i]\n sns.lineplot(x='day_after_zero', y='value', data=frame_data_accumulated, color=\"r\", ax=ax1)\n sns.barplot(x='day_after_zero', y='value', data=frame_data_daily, color='b', ax=ax2)\n plt.ylabel('Daily', fontsize=18)\n plt.xlim(0, max_day)\n plt.xticks(cases_ticks)\n plt.xlabel('Day after case 1', fontsize=18)\n # Handling secondary axis implies different management in the animate function\n\n\nani = matplotlib.animation.FuncAnimation(fig, animate, frames=max_day, repeat=True)\nani.save('covid_cases_bcn_2axis.mp4', writer=writer)\nprint('end')\n" ]
[ [ "pandas.read_csv", "pandas.to_datetime", "matplotlib.pyplot.title", "matplotlib.pyplot.ylim", "numpy.arange", "matplotlib.pyplot.subplots", "numpy.timedelta64", "matplotlib.pyplot.xlim", "matplotlib.animation.FuncAnimation", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.xticks", "matplotlib.pyplot.ylabel" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.3", "1.1", "1.5", "1.2" ], "scipy": [], "tensorflow": [] } ]
genepattern/genepattern-utils
[ "950d748301b3c4d07ad8d24c9b037bbb9b4c80e2" ]
[ "genepattern/utils/clustering.py" ]
[ "\"\"\"\nCopied and modified from the dev branch of:\nhttps://github.com/genepattern/HierarchicalClustering\non 2018-01-31\n\"\"\"\nimport sys\nimport numpy as np\nfrom statistics import mode\nfrom sklearn.metrics import pairwise\nfrom sklearn import metrics\n\nfrom scipy.cluster.hierarchy import dendrogram\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport pandas as pd\nimport itertools\nfrom sklearn.cluster import AgglomerativeClustering\nimport scipy\nimport itertools\nfrom collections import defaultdict\nfrom .elemental import *\nfrom .information import *\n\n# check if these are repeated:\nimport os\nimport sys\n\ntasklib_path = os.path.dirname(os.path.realpath(sys.argv[0]))\n# sys.path.append(tasklib_path + \"/ccalnoir\")\n\n# 2018-02-06 Maybe uncomment these next two\n# import matplotlib as mpl\n# mpl.use('Agg')\n\n# This is forprinting the hyperlink\nfrom IPython.core.display import display, HTML\n\n# import pandas as pd\n# import numpy as np\nimport scipy\nimport seaborn as sns\nfrom matplotlib import pyplot as plt\nfrom matplotlib import gridspec\nfrom sklearn.cluster import AgglomerativeClustering\n\n# from time import time\n# import cuzcatlan as cusca\nsns.set_style(\"white\")\nimport matplotlib as mpl\n\nmpl.rcParams['ytick.labelsize'] = 16\nmpl.rcParams['xtick.labelsize'] = 16\nmpl.rcParams['axes.titlesize'] = 24\nmpl.rcParams['axes.labelsize'] = 20\n\nSIGNIFICANT_DIGITS = 7\n\ninput_col_distance_dict = {\n # These are the values I expect\n \"No column clustering\": \"No_column_clustering\",\n \"Uncentered correlation\": \"uncentered_pearson\",\n \"Pearson correlation\": \"pearson\",\n \"Uncentered correlation, absolute value\": \"absolute_uncentered_pearson\",\n \"Pearson correlation, absolute value\": \"absolute_pearson\",\n \"Spearman's rank correlation\": \"spearman\",\n \"Kendall's tau\": \"kendall\",\n \"Euclidean distance\": \"euclidean\",\n \"City-block distance\": \"manhattan\",\n \"No_column_clustering\": \"No_column_clustering\",\n # These are the values the GpUnit tests give\n \"0\": \"No_column_clustering\",\n \"1\": \"uncentered_pearson\",\n \"2\": \"pearson\",\n \"3\": \"absolute_uncentered_pearson\",\n \"4\": \"absolute_pearson\",\n \"5\": \"spearman\",\n \"6\": \"kendall\",\n \"7\": \"euclidean\",\n \"8\": \"manhattan\",\n \"9\": \"information_coefficient\",\n # These are the values I expect from the comand line\n \"no_col\": \"No_column_clustering\",\n \"uncentered_pearson\": \"uncentered_pearson\",\n \"pearson\": \"pearson\",\n \"absolute_uncentered_pearson\": \"absolute_uncentered_pearson\",\n \"absolute_pearson\": \"absolute_pearson\",\n \"spearman\": \"spearman\",\n \"kendall\": \"kendall\",\n \"euclidean\": \"euclidean\",\n \"manhattan\": \"manhattan\",\n \"Cosine\": \"cosine\",\n \"cosine\": \"cosine\",\n \"ic\": \"information_coefficient\",\n \"information_coefficient\": \"information_coefficient\",\n \"Information Coefficient\": \"information_coefficient\",\n}\n\ninput_row_distance_dict = {\n # These are the values I expect\n \"No row clustering\": \"No_row_clustering\",\n \"Uncentered correlation\": \"uncentered_pearson\",\n \"Pearson correlation\": \"pearson\",\n \"Uncentered correlation, absolute value\": \"absolute_uncentered_pearson\",\n \"Pearson correlation, absolute value\": \"absolute_pearson\",\n \"Spearman's rank correlation\": \"spearman\",\n \"Kendall's tau\": \"kendall\",\n \"Euclidean distance\": \"euclidean\",\n \"City-block distance\": \"manhattan\",\n \"No_row_clustering\": \"No_row_clustering\",\n # These are the values the GpUnit tests give\n \"0\": \"No_row_clustering\",\n \"1\": \"uncentered_pearson\",\n \"2\": \"pearson\",\n \"3\": \"absolute_uncentered_pearson\",\n \"4\": \"absolute_pearson\",\n \"5\": \"spearman\",\n \"6\": \"kendall\",\n \"7\": \"euclidean\",\n \"8\": \"manhattan\",\n \"9\": \"information_coefficient\",\n # These are the values I expect from the comand line\n \"no_row\": \"No_row_clustering\",\n \"uncentered_pearson\": \"uncentered_pearson\",\n \"pearson\": \"pearson\",\n \"absolute_uncentered_pearson\": \"absolute_uncentered_pearson\",\n \"absolute_pearson\": \"absolute_pearson\",\n \"spearman\": \"spearman\",\n \"kendall\": \"kendall\",\n \"euclidean\": \"euclidean\",\n \"manhattan\": \"manhattan\",\n \"Cosine\": \"cosine\",\n \"cosine\": \"cosine\",\n \"ic\": \"information_coefficient\",\n \"information_coefficient\": \"information_coefficient\",\n \"Information Coefficient\": \"information_coefficient\",\n}\n\ninput_clustering_method = {\n # These are the values I expect\n 'Pairwise complete-linkage': 'complete',\n 'Pairwise average-linkage': 'average',\n 'Pairwise ward-linkage': 'ward',\n # These are the values the GpUnit test give\n 'm': 'complete',\n 'a': 'average', # I think this is the default\n}\n\ninput_row_centering = {\n # These are the values I expect\n 'No': None,\n 'Subtract the mean from each row': 'Mean',\n 'Subtract the median from each row': 'Median',\n # These are the values the GpUnit test give\n 'None': None,\n 'Median': 'Median',\n 'Mean': 'Mean',\n}\n\ninput_row_normalize = {\n # These are the values I expect\n 'No': False,\n 'Yes': True,\n # These are the values the GpUnit test give\n 'False': False,\n 'True': True,\n}\n\ninput_col_centering = {\n # These are the values I expect\n 'No': None,\n 'Subtract the mean from each column': 'Mean',\n 'Subtract the median from each column': 'Median',\n # These are the values the GpUnit test give\n 'None': None,\n 'Median': 'Median',\n 'Mean': 'Mean',\n}\n\ninput_col_normalize = {\n # These are the values I expect\n 'No': False,\n 'Yes': True,\n # These are the values the GpUnit test give\n 'False': False,\n 'True': True,\n}\n\n\ndef parse_inputs(args=sys.argv):\n # inp = []\n # inp = args\n # Error handling:\n arg_n = len(args)\n if arg_n == 1:\n sys.exit(\"Not enough parameters files were provided. This module needs a GCT file to work.\")\n elif arg_n == 2:\n gct_name = args[1]\n col_distance_metric = 'euclidean'\n output_distances = False\n row_distance_metric = 'No_row_clustering'\n clustering_method = 'Pairwise average-linkage'\n output_base_name = 'HC_out'\n row_normalization = False\n col_normalization = False\n row_centering = None\n col_centering = None\n print(\"Using:\")\n print(\"\\tgct_name =\", gct_name)\n print(\"\\tcol_distance_metric = euclidean (default value)\")\n print(\"\\toutput_distances =\", output_distances, \"(default: not computing it and creating a file)\")\n print(\"\\trow_distance_metric =\", row_distance_metric, \"(default: No row clustering)\")\n print(\"\\tclustering_method =\", clustering_method, \"(default: Pairwise average-linkage)\")\n print(\"\\toutput_base_name =\", output_base_name, \"(default: HC_out)\")\n print(\"\\trow_normalization =\", row_normalization, \"(default: False)\")\n print(\"\\tcol_normalization =\", col_normalization, \"(default: False)\")\n print(\"\\trow_centering =\", row_centering, \"(default: None)\")\n print(\"\\tcol_centering =\", col_centering, \"(default: None)\")\n elif arg_n == 3:\n gct_name = args[1]\n col_distance_metric = args[2]\n output_distances = False\n row_distance_metric = 'No_row_clustering'\n clustering_method = 'Pairwise average-linkage'\n output_base_name = 'HC_out'\n row_normalization = False\n col_normalization = False\n row_centering = None\n col_centering = None\n print(\"Using:\")\n print(\"\\tgct_name =\", gct_name)\n print(\"\\tcol_distance_metric =\", input_col_distance_dict[col_distance_metric])\n print(\"\\toutput_distances =\", output_distances, \"(default: not computing it and creating a file)\")\n print(\"\\trow_distance_metric =\", row_distance_metric, \"(default: No row clustering)\")\n print(\"\\tclustering_method =\", clustering_method, \"(default: Pairwise average-linkage)\")\n print(\"\\toutput_base_name =\", output_base_name, \"(default: HC_out)\")\n print(\"\\trow_normalization =\", row_normalization, \"(default: False)\")\n print(\"\\tcol_normalization =\", col_normalization, \"(default: False)\")\n print(\"\\trow_centering =\", row_centering, \"(default: None)\")\n print(\"\\tcol_centering =\", col_centering, \"(default: None)\")\n elif arg_n == 4:\n gct_name = args[1]\n col_distance_metric = args[2]\n output_distances = args[3]\n row_distance_metric = 'No_row_clustering'\n clustering_method = 'Pairwise average-linkage'\n output_base_name = 'HC_out'\n row_normalization = False\n col_normalization = False\n row_centering = None\n col_centering = None\n\n col_distance_metric = input_col_distance_dict[col_distance_metric]\n if (output_distances == 'False') or (output_distances == 'F') \\\n or (output_distances == 'false') or (output_distances == 'f'):\n output_distances = False\n else:\n output_distances = True\n print(\"Using:\")\n print(\"\\tgct_name =\", gct_name)\n print(\"\\tcol_distance_metric =\", col_distance_metric)\n print(\"\\toutput_distances =\", output_distances)\n print(\"\\trow_distance_metric =\", row_distance_metric, \"(default: No row clustering)\")\n print(\"\\tclustering_method =\", clustering_method, \"(default: Pairwise average-linkage)\")\n print(\"\\toutput_base_name =\", output_base_name, \"(default: HC_out)\")\n print(\"\\trow_normalization =\", row_normalization, \"(default: False)\")\n print(\"\\tcol_normalization =\", col_normalization, \"(default: False)\")\n print(\"\\trow_centering =\", row_centering, \"(default: None)\")\n print(\"\\tcol_centering =\", col_centering, \"(default: None)\")\n elif arg_n == 5:\n gct_name = args[1]\n col_distance_metric = args[2]\n output_distances = args[3]\n row_distance_metric = args[4]\n clustering_method = 'Pairwise average-linkage'\n # clustering_method = 'Pairwise complete-linkage'\n output_base_name = 'HC_out'\n row_normalization = False\n col_normalization = False\n row_centering = None\n col_centering = None\n\n col_distance_metric = input_col_distance_dict[col_distance_metric]\n row_distance_metric = input_row_distance_dict[row_distance_metric]\n if (output_distances == 'False') or (output_distances == 'F') \\\n or (output_distances == 'false') or (output_distances == 'f'):\n output_distances = False\n else:\n output_distances = True\n\n print(\"Using:\")\n print(\"\\tgct_name =\", gct_name)\n print(\"\\tcol_distance_metric =\", col_distance_metric)\n print(\"\\toutput_distances =\", output_distances)\n print(\"\\trow_distance_metric =\", row_distance_metric)\n print(\"\\tclustering_method =\", clustering_method, \"(default: Pairwise average-linkage)\")\n print(\"\\toutput_base_name =\", output_base_name, \"(default: HC_out)\")\n print(\"\\trow_normalization =\", row_normalization, \"(default: False)\")\n print(\"\\tcol_normalization =\", col_normalization, \"(default: False)\")\n print(\"\\trow_centering =\", row_centering, \"(default: None)\")\n print(\"\\tcol_centering =\", col_centering, \"(default: None)\")\n elif arg_n == 6:\n gct_name = args[1]\n col_distance_metric = args[2]\n output_distances = args[3]\n row_distance_metric = args[4]\n clustering_method = args[5]\n\n col_distance_metric = input_col_distance_dict[col_distance_metric]\n row_distance_metric = input_row_distance_dict[row_distance_metric]\n clustering_method = input_clustering_method[clustering_method]\n if clustering_method not in linkage_dic.keys():\n exit(\"Clustering method chosen not supported. This should not have happened.\")\n\n if (linkage_dic[clustering_method] == 'ward') and (col_distance_metric != 'average'):\n exit(\"When choosing 'Pairwise ward-linkage' the distance metric *must* be 'average' \")\n\n output_base_name = 'HC_out'\n row_normalization = False\n col_normalization = False\n row_centering = None\n col_centering = None\n if (output_distances == 'False') or (output_distances == 'F') \\\n or (output_distances == 'false') or (output_distances == 'f'):\n output_distances = False\n else:\n output_distances = True\n\n print(\"Using:\")\n print(\"\\tgct_name =\", gct_name)\n print(\"\\tcol_distance_metric =\", col_distance_metric)\n print(\"\\toutput_distances =\", output_distances)\n print(\"\\trow_distance_metric =\", row_distance_metric)\n print(\"\\tclustering_method =\", clustering_method)\n print(\"\\toutput_base_name =\", output_base_name, \"(default: HC_out)\")\n print(\"\\trow_normalization =\", row_normalization, \"(default: False)\")\n print(\"\\tcol_normalization =\", col_normalization, \"(default: False)\")\n print(\"\\trow_centering =\", row_centering, \"(default: None)\")\n print(\"\\tcol_centering =\", col_centering, \"(default: None)\")\n elif arg_n == 7:\n gct_name = args[1]\n col_distance_metric = args[2]\n output_distances = args[3]\n row_distance_metric = args[4]\n clustering_method = args[5]\n output_base_name = args[6]\n row_normalization = False\n col_normalization = False\n row_centering = None\n col_centering = None\n\n col_distance_metric = input_col_distance_dict[col_distance_metric]\n row_distance_metric = input_row_distance_dict[row_distance_metric]\n clustering_method = input_clustering_method[clustering_method]\n if (output_distances == 'False') or (output_distances == 'F') \\\n or (output_distances == 'false') or (output_distances == 'f'):\n output_distances = False\n else:\n output_distances = True\n\n print(\"Using:\")\n print(\"\\tgct_name =\", gct_name)\n print(\"\\tcol_distance_metric =\", col_distance_metric)\n print(\"\\toutput_distances =\", output_distances)\n print(\"\\trow_distance_metric =\", row_distance_metric)\n print(\"\\tclustering_method =\", clustering_method)\n print(\"\\toutput_base_name =\", output_base_name)\n print(\"\\trow_normalization =\", row_normalization, \"(default: False)\")\n print(\"\\tcol_normalization =\", col_normalization, \"(default: False)\")\n print(\"\\trow_centering =\", row_centering, \"(default: None)\")\n print(\"\\tcol_centering =\", col_centering, \"(default: None)\")\n elif arg_n == 8:\n gct_name = args[1]\n col_distance_metric = args[2]\n output_distances = args[3]\n row_distance_metric = args[4]\n clustering_method = args[5]\n output_base_name = args[6]\n row_normalization = args[7]\n col_normalization = False\n row_centering = None\n col_centering = None\n\n col_distance_metric = input_col_distance_dict[col_distance_metric]\n row_distance_metric = input_row_distance_dict[row_distance_metric]\n clustering_method = input_clustering_method[clustering_method]\n if (output_distances == 'False') or (output_distances == 'F') \\\n or (output_distances == 'false') or (output_distances == 'f'):\n output_distances = False\n else:\n output_distances = True\n\n row_normalization = input_row_normalize[row_normalization]\n # if (row_normalization == 'False') or (row_normalization == 'F') \\\n # or (row_normalization == 'false') or (row_normalization == 'f'):\n # row_normalization = False\n # else:\n # row_normalization = True\n\n print(\"Using:\")\n print(\"\\tgct_name =\", gct_name)\n print(\"\\tcol_distance_metric =\", col_distance_metric)\n print(\"\\toutput_distances =\", output_distances)\n print(\"\\trow_distance_metric =\", row_distance_metric)\n print(\"\\tclustering_method =\", clustering_method)\n print(\"\\toutput_base_name =\", output_base_name)\n print(\"\\trow_normalization =\", row_normalization)\n print(\"\\tcol_normalization =\", col_normalization, \"(default: False)\")\n print(\"\\trow_centering =\", row_centering, \"(default: None)\")\n print(\"\\tcol_centering =\", col_centering, \"(default: None)\")\n elif arg_n == 9:\n gct_name = args[1]\n col_distance_metric = args[2]\n output_distances = args[3]\n row_distance_metric = args[4]\n clustering_method = args[5]\n output_base_name = args[6]\n row_normalization = args[7]\n col_normalization = args[8]\n row_centering = None\n col_centering = None\n\n col_distance_metric = input_col_distance_dict[col_distance_metric]\n row_distance_metric = input_row_distance_dict[row_distance_metric]\n clustering_method = input_clustering_method[clustering_method]\n if (output_distances == 'False') or (output_distances == 'F') \\\n or (output_distances == 'false') or (output_distances == 'f'):\n output_distances = False\n else:\n output_distances = True\n\n # Row normalization\n row_normalization = input_row_normalize[row_normalization]\n # if (row_normalization == 'False') or (row_normalization == 'F') \\\n # or (row_normalization == 'false') or (row_normalization == 'f'):\n # row_normalization = False\n # else:\n # row_normalization = True\n\n # Column normalization\n col_normalization = input_col_normalize[col_normalization]\n # if (col_normalization == 'False') or (col_normalization == 'F') \\\n # or (col_normalization == 'false') or (col_normalization == 'f'):\n # col_normalization = False\n # else:\n # col_normalization = True\n\n print(\"Using:\")\n print(\"\\tgct_name =\", gct_name)\n print(\"\\tcol_distance_metric =\", col_distance_metric)\n print(\"\\toutput_distances =\", output_distances)\n print(\"\\trow_distance_metric =\", row_distance_metric)\n print(\"\\tclustering_method =\", clustering_method)\n print(\"\\toutput_base_name =\", output_base_name)\n print(\"\\trow_normalization =\", row_normalization)\n print(\"\\tcol_normalization =\", col_normalization)\n print(\"\\trow_centering =\", row_centering, \"(default: None)\")\n print(\"\\tcol_centering =\", col_centering, \"(default: None)\")\n elif arg_n == 10:\n gct_name = args[1]\n col_distance_metric = args[2]\n output_distances = args[3]\n row_distance_metric = args[4]\n clustering_method = args[5]\n output_base_name = args[6]\n row_normalization = args[7]\n col_normalization = args[8]\n row_centering = args[9]\n col_centering = None\n\n col_distance_metric = input_col_distance_dict[col_distance_metric]\n row_distance_metric = input_row_distance_dict[row_distance_metric]\n clustering_method = input_clustering_method[clustering_method]\n\n if (output_distances == 'False') or (output_distances == 'F') \\\n or (output_distances == 'false') or (output_distances == 'f'):\n output_distances = False\n else:\n output_distances = True\n\n # Row normalization\n row_normalization = input_row_normalize[row_normalization]\n # if (row_normalization == 'False') or (row_normalization == 'F') \\\n # or (row_normalization == 'false') or (row_normalization == 'f'):\n # row_normalization = False\n # else:\n # row_normalization = True\n\n # Column normalization\n col_normalization = input_col_normalize[col_normalization]\n # if (col_normalization == 'False') or (col_normalization == 'F') \\\n # or (col_normalization == 'false') or (col_normalization == 'f'):\n # col_normalization = False\n # else:\n # col_normalization = True\n\n # row_centering\n row_centering = input_row_centering[row_centering]\n if (row_centering == 'None') or (col_normalization == 'N') \\\n or (row_centering == 'none') or (col_normalization == 'n'):\n col_normalization = None\n\n print(\"Using:\")\n print(\"\\tgct_name =\", gct_name)\n print(\"\\tcol_distance_metric =\", col_distance_metric)\n print(\"\\toutput_distances =\", output_distances)\n print(\"\\trow_distance_metric =\", row_distance_metric)\n print(\"\\tclustering_method =\", clustering_method)\n print(\"\\toutput_base_name =\", output_base_name)\n print(\"\\trow_normalization =\", row_normalization)\n print(\"\\tcol_normalization =\", col_normalization)\n print(\"\\trow_centering =\", row_centering)\n print(\"\\tcol_centering =\", col_centering, \"(default: None)\")\n elif arg_n == 11:\n gct_name = args[1]\n col_distance_metric = args[2]\n output_distances = args[3]\n row_distance_metric = args[4]\n clustering_method = args[5]\n output_base_name = args[6]\n row_normalization = args[7]\n col_normalization = args[8]\n row_centering = args[9]\n col_centering = args[10]\n\n col_distance_metric = input_col_distance_dict[col_distance_metric]\n row_distance_metric = input_row_distance_dict[row_distance_metric]\n clustering_method = input_clustering_method[clustering_method]\n\n if (output_distances == 'False') or (output_distances == 'F') \\\n or (output_distances == 'false') or (output_distances == 'f'):\n output_distances = False\n else:\n output_distances = True\n\n # Row normalization\n row_normalization = input_row_normalize[row_normalization]\n # if (row_normalization == 'False') or (row_normalization == 'F') \\\n # or (row_normalization == 'false') or (row_normalization == 'f'):\n # row_normalization = False\n # else:\n # row_normalization = True\n\n # Column normalization\n col_normalization = input_col_normalize[col_normalization]\n # if (col_normalization == 'False') or (col_normalization == 'F') \\\n # or (col_normalization == 'false') or (col_normalization == 'f'):\n # col_normalization = False\n # else:\n # col_normalization = True\n\n # row_centering\n row_centering = input_row_centering[row_centering]\n if (row_centering == 'None') or (col_normalization == 'N') \\\n or (row_centering == 'none') or (col_normalization == 'n'):\n col_normalization = None\n\n # col_centering\n col_centering = input_col_centering[col_centering]\n if (col_centering == 'None') or (col_centering == 'N') \\\n or (col_centering == 'none') or (col_centering == 'n'):\n col_centering = None\n\n print(\"Using:\")\n print(\"\\tgct_name =\", gct_name)\n print(\"\\tcol_distance_metric =\", col_distance_metric)\n print(\"\\toutput_distances =\", output_distances)\n print(\"\\trow_distance_metric =\", row_distance_metric)\n print(\"\\tclustering_method =\", clustering_method)\n print(\"\\toutput_base_name =\", output_base_name)\n print(\"\\trow_normalization =\", row_normalization)\n print(\"\\tcol_normalization =\", col_normalization)\n print(\"\\trow_centering =\", row_centering)\n print(\"\\tcol_centering =\", col_centering)\n else:\n sys.exit(\"Too many inputs. This module needs only a GCT file to work, \"\n \"plus an optional input choosing between Pearson Correlation or Information Coefficient.\")\n\n print(args)\n return gct_name, col_distance_metric, output_distances, row_distance_metric, clustering_method, output_base_name, \\\n row_normalization, col_normalization, row_centering, col_centering\n\n\ndef plot_dendrogram(model, data, tree, axis, dist=mydist, clustering_method='average',\n title='no_title.png', color_threshold=None, orientation='top', **kwargs):\n # plt.clf()\n\n # modified from https://github.com/scikit-learn/scikit-learn/pull/3464/files\n # Children of hierarchical clustering\n children = model.children_\n # Distances between each pair of children\n # TODO: Fix this mydist\n # distance = dendodist(children, euclidian_similarity)\n # distance = dendodist(children, dist)\n\n og_distances = better_dendodist(children, dist, tree, data, axis=axis, clustering_method=clustering_method)\n # print(og_distances)\n # og_distances = [abs(temp) for temp in og_distances]\n\n # Turn similarity into non-negative value Scipy's dendrogram needs this\n if dist in [custom_euclidean_sim, absolute_uncentered_pearson_corr, absolute_pearson_corr]:\n # These similarities are already nonnegative [0,inf) or [0,1]\n # og_distances = og_distances\n pass\n else: # all the correlation similarities [-1,-1]\n og_distances = [temp + 1 for temp in og_distances]\n\n # Now that all similarities are nonnegative, we turn them into a distance for plotting purposes\n og_distances = [1 / temp for temp in og_distances]\n\n # print(og_distances)\n distance = np.cumsum(og_distances)\n # distance = og_distances\n # distance = better_dendodist(children, dist, tree, data, axis=axis)\n\n # norm_distances = []\n # for value in distance:\n # norm_distances.append(1/value)\n # norm_distances = distance\n\n list_of_children = list(get_children(tree, leaves_are_self_children=False).values())\n no_of_observations = [len(i) for i in list_of_children if i]\n no_of_observations.append(len(no_of_observations) + 1)\n # print(len(no_of_observations))\n\n # print(children)\n\n # print(list(tree.values()))\n\n # print(norm_distances)\n\n # print(distance)\n if all(value == 0 for value in distance):\n # If all distances are zero, then use uniform distance\n distance = np.arange(len(distance))\n\n # print(distance)\n # print(np.cumsum(distance))\n\n # The number of observations contained in each cluster level\n # no_of_observations = np.arange(2, children.shape[0]+2)\n # print(no_of_observations)\n\n\n # Create linkage matrix and then plot the dendrogram\n # linkage_matrix = np.column_stack([children, distance, no_of_observations]).astype(float)\n # linkage_matrix = np.column_stack([children, np.cumsum(distance), no_of_observations]).astype(float)\n linkage_matrix = np.column_stack([children, distance, no_of_observations]).astype(float)\n # linkage_matrix = np.column_stack([children, norm_distances, no_of_observations]).astype(float)\n # print(linkage_matrix)\n # Plot the corresponding dendrogram\n\n # print(scipy.cluster.hierarchy.cut_tree(linkage_matrix, n_clusters=5))\n # print(color_threshold)\n\n # find what the height at which to cut the dendrogram\n if color_threshold is not None:\n if color_threshold == 1:\n color_threshold = 2\n if color_threshold > (len(linkage_matrix) + 1):\n color_threshold = (len(linkage_matrix) + 1)\n # print('Finding the right cut')\n color_threshold = linkage_matrix[-(color_threshold - 1)][2] - np.finfo(float).eps\n # color_threshold = linkage_matrix[-(color_threshold - 1)][2] + 10*np.finfo(float).eps # Adding more wiggle room\n # print(color_threshold)\n\n R = dendrogram(linkage_matrix, color_threshold=color_threshold, orientation=orientation, **kwargs)\n # R = dendrogram(linkage_matrix, **kwargs)\n # [label.set_rotation(90) for label in plt.gca().get_xticklabels()]\n order_of_columns = R['ivl']\n # # print(order_of_columns)\n # plt.gca().get_yaxis().set_visible(False)\n # plt.savefig(title, dpi=300)\n # plt.show()\n\n # n = len(linkage_matrix) + 1\n # cache = dict()\n # for k in range(len(linkage_matrix)):\n # c1, c2 = int(linkage_matrix[k][0]), int(linkage_matrix[k][1])\n # c1 = [c1] if c1 < n else cache.pop(c1)\n # c2 = [c2] if c2 < n else cache.pop(c2)\n # cache[n + k] = c1 + c2\n # order_of_columns = cache[2 * len(linkage_matrix)]\n\n # print(order_of_columns)\n # print(linkage_matrix)\n # print(\"---\")\n # print(no_of_observations)\n # print(\"---\")\n # print(list_of_children)\n # print(\"---\")\n #\n # print(len(order_of_columns))\n # print(color_threshold)\n # clusters2idxs, idxs2clusters = get_cluster_classes(R)\n #\n # print(clusters2idxs)\n # print(idxs2clusters)\n # print(\"---\")\n # print(get_children(tree, leaves_are_self_children=False))\n # print(\"---\")\n # print(get_children(tree, leaves_are_self_children=False, only_leaves_are_children=False))\n\n\n return order_of_columns, linkage_matrix\n\n\ndef get_clusters(tree):\n return\n\n\n\ndef get_cluster_classes(den, label='ivl'):\n # from http://www.nxn.se/valent/extract-cluster-elements-by-color-in-python\n clusters2idxs = defaultdict(list)\n idxs2clusters = {}\n # for c, pi in zip(den['color_list'], den['icoord']):\n # for leg in pi[1:3]:\n # i = (leg - 5.0) / 10.0\n # if abs(i - int(i)) < 1e-5:\n # clusters2idxs[c].append(int(i))\n # idxs2clusters[int(i)] = c\n # # print(c, i)\n\n # cluster_classes = Clusters()\n # for c, l in cluster_idxs.items():\n # i_l = [den[label][i] for i in l]\n # cluster_classes[c] = i_l\n\n # Trying something new:\n print(den.keys())\n print(len(den['icoord']))\n print(len(den['dcoord']))\n print(len(den['ivl']))\n print(len(den['leaves']))\n print(den['leaves'])\n print(len(den['color_list']))\n print(den['color_list'])\n\n return clusters2idxs, idxs2clusters\n\n\ndef order_leaves(model, data, tree, labels, axis=0, dist=mydist, reverse=False):\n # Adapted from here: https://stackoverflow.com/questions/12572436/calculate-ordering-of-dendrogram-leaves\n\n children = model.children_\n # distance = better_dendodist(children, dist, tree, data, axis=axis)\n # if all(value == 0 for value in distance):\n # distance = np.arange(len(distance))\n\n # list_of_children = list(get_children(tree, leaves_are_self_children=False).values())\n # no_of_observations = [len(i) for i in list_of_children if i]\n # no_of_observations.append(len(no_of_observations)+1)\n\n # Create linkage matrix and then plot the dendrogram\n # linkage_matrix = np.column_stack([children, distance, no_of_observations]).astype(float)\n pseudo_linkage_matrix = np.column_stack([children]).astype(float)\n\n n = len(pseudo_linkage_matrix) + 1\n\n # This orders leaves by number of clusters\n cache = dict()\n for k in range(len(pseudo_linkage_matrix)):\n c1, c2 = int(pseudo_linkage_matrix[k][0]), int(pseudo_linkage_matrix[k][1])\n c1 = [c1] if c1 < n else cache.pop(c1)\n c2 = [c2] if c2 < n else cache.pop(c2)\n cache[n + k] = c1 + c2\n numeric_order_of_leaves = cache[2 * len(pseudo_linkage_matrix)]\n\n if reverse:\n numeric_order_of_leaves = list(reversed(numeric_order_of_leaves))\n\n return [labels[i] for i in numeric_order_of_leaves]\n\n\ndef two_plot_two_dendrogram(model, dist=mydist, **kwargs):\n # modified from https://github.com/scikit-learn/scikit-learn/pull/3464/files\n # Children of hierarchical clustering\n children = model.children_\n # Distances between each pair of children\n distance = dendodist(children, dist)\n if all(value == 0 for value in distance):\n # If all distances are zero, then use uniform distance\n distance = np.arange(len(distance))\n\n # The number of observations contained in each cluster level\n no_of_observations = np.arange(2, children.shape[0] + 2)\n # Create linkage matrix and then plot the dendrogram\n linkage_matrix = np.column_stack([children, distance, no_of_observations]).astype(float)\n # Plot the corresponding dendrogram\n R = dendrogram(linkage_matrix, color_threshold=0, orientation='left', **kwargs)\n # [label.set_rotation(90) for label in plt.gca().get_xticklabels()]\n order_of_rows = R['ivl']\n # print(order_of_columns)\n plt.gca().get_xaxis().set_visible(False)\n\n return list(reversed(order_of_rows))\n\n\ndef my_affinity_generic(M, metric):\n return np.array([np.array([metric(a, b) for a in M]) for b in M])\n\n\ndef my_affinity_i(M):\n return np.array([[information_coefficient_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_ai(M):\n return np.array([[absolute_information_coefficient_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_p(M):\n return np.array([[custom_pearson_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_s(M):\n return np.array([[custom_spearman_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_k(M):\n return np.array([[custom_kendall_tau_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_ap(M):\n return np.array([[absolute_pearson_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_u(M):\n return np.array([[uncentered_pearson_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_au(M):\n return np.array([[absolute_uncentered_pearson_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_l1(M):\n return np.array([[custom_manhattan_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_l2(M):\n return np.array([[custom_euclidean_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_m(M):\n return np.array([[custom_manhattan_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_c(M):\n return np.array([[custom_cosine_dist(a, b) for a in M] for b in M])\n\n\ndef my_affinity_e(M):\n # global dist_matrix\n # dist_matrix = np.array([[mydist(a, b) for a in M]for b in M])\n # return dist_matrix\n return np.array([[custom_euclidean_dist(a, b) for a in M] for b in M])\n\n\ndef count_diff(x):\n count = 0\n compare = x[0]\n for i in x:\n if i != compare:\n count += 1\n return count\n\n\ndef count_mislabels(labels, true_labels):\n # 2017-08-17: I will make the assumption that clusters have only 2 values.\n # clusters = np.unique(true_labels)\n # mislabels = 0\n # for curr_clust in clusters:\n # print(\"for label\", curr_clust)\n # print(\"\\t\", labels[(true_labels == curr_clust)])\n # compare_to = mode(labels[(true_labels == curr_clust)])\n # print(\"\\tcompare to:\", compare_to, \"mislables: \", np.count_nonzero(labels[(true_labels == curr_clust)] != compare_to))\n # mislabels += np.count_nonzero(labels[(true_labels == curr_clust)] != compare_to)\n\n set_a = labels[true_labels == 0]\n set_b = labels[true_labels == 1]\n\n if len(set_a) <= len(set_b):\n shorter = set_a\n longer = set_b\n else:\n shorter = set_b\n longer = set_a\n\n long_mode = mode(longer) # this what the label of the longer cluster should be.\n short_mode = 1 if long_mode == 0 else 0 # Choose the other value for the label of the shorter cluster\n\n # start with the longer vector:\n # print(\"The long set is\", longer, \"it has\", np.count_nonzero(longer != long_mode), 'mislabels.')\n # print(\"The short set is\", shorter, \"it has\", np.count_nonzero(shorter != short_mode), 'mislabels.')\n\n # np.count_nonzero(longer != long_mode) + np.count_nonzero(shorter != short_mode)\n\n return np.count_nonzero(longer != long_mode) + np.count_nonzero(shorter != short_mode)\n\n\ndef plot_heatmap(df, col_order, row_order, top=5, title_text='differentially expressed genes per phenotype'):\n if not (len(col_order), len(list(df))):\n exit(\"Number of columns in dataframe do not match the columns provided for ordering.\")\n if not (len(row_order), len(df)):\n exit(\"Number of rows in dataframe do not match the columns provided for ordering.\")\n # print(list(df), col_order)\n df = df[col_order]\n df = df.reindex(row_order)\n\n plt.clf()\n sns.heatmap(df.iloc[np.r_[0:top, -top:0], :], cmap='viridis')\n plt.yticks(rotation=0)\n plt.xticks(rotation=90)\n plt.title('Top {} {}'.format(top, title_text))\n plt.ylabel('Genes')\n plt.xlabel('Sample')\n plt.savefig('heatmap.png', dpi=300, bbox_inches=\"tight\")\n\n\ndef parse_data(gct_name, row_normalization=False, col_normalization=False, row_centering=None, col_centering=None):\n # if validators.url(gct_name):\n # urlfile, __ = urllib.request.urlretrieve(gct_name)\n # else:\n # urlfile = gct_name\n # f = open(urlfile)\n # f.readline()\n # size = f.readline().strip('\\n').split('\\t')\n\n try:\n data_df = pd.read_csv(gct_name, sep='\\t', skiprows=2)\n except ValueError:\n data_df = gct_name\n # print(size)\n # print(list(data_df))\n # exit(data_df.shape)\n\n if data_df.index.name is 'Name':\n data_df['Name'] = data_df.index\n else:\n if 'Name' not in list(data_df):\n data_df['Name'] = data_df.iloc[:, 0]\n data_df.drop(data_df.columns[0], axis=1, inplace=True)\n\n if 'Description' not in list(data_df):\n data_df['Description'] = data_df['Name']\n\n data_df.set_index(data_df['Name'], inplace=True)\n og_full_gct = data_df.copy()\n og_full_gct.drop(['Name'], axis=1, inplace=True)\n data_df.drop(['Name', 'Description'], axis=1, inplace=True)\n plot_labels = list(og_full_gct.drop(['Description'], axis=1, inplace=False))\n data = data_df.as_matrix()\n row_labels = data_df.index.values\n\n og_data = data.copy()\n\n # if row_centering is not None:\n # if row_centering == 'Mean':\n # row_means = np.mean(data, axis=1)\n # row_means_col_vec = row_means.reshape((data.shape[0], 1))\n # data = data - row_means_col_vec\n # if row_centering == 'Median':\n # row_medians = np.median(data, axis=1)\n # row_medians_col_vec = row_medians.reshape((data.shape[0], 1))\n # data = data - row_medians_col_vec\n #\n # if row_normalization:\n # row_norm = np.sum(data * data, axis=1)\n # row_norm_col_vec = row_norm.reshape((data.shape[0], 1))\n # data = data / np.sqrt(row_norm_col_vec)\n #\n # if col_centering is not None:\n # if col_centering == 'Mean':\n # col_means = np.mean(data, axis=0)\n # data = data - col_means\n # if col_centering == 'Median':\n # col_medians = np.median(data, axis=0)\n # data = data - col_medians\n #\n # if col_normalization:\n # col_norm = np.sum(data*data, axis=0)\n # data = data/np.sqrt(col_norm)\n\n data = normalize_dataframe(data_df, log_normalize=None,\n row_centering=row_centering, row_normalization=row_normalization,\n col_centering=col_centering, col_normalization=col_normalization).as_matrix()\n\n # print(data_df)\n # print(data)\n new_data_df = pd.DataFrame(data=data, index=data_df.index, columns=list(data_df))\n # print(new_data_df)\n # print(og_full_gct)\n new_full_gct = new_data_df.copy()\n new_full_gct.insert(0, column='Description', value=og_full_gct['Description'])\n # print(new_full_gct)\n # exit()\n\n return og_data, data_df, data, new_data_df, plot_labels, row_labels, og_full_gct, new_full_gct\n\n\nstr2func = {\n 'custom_euclidean': my_affinity_e,\n 'uncentered_pearson': my_affinity_u,\n 'absolute_uncentered_pearson': my_affinity_au,\n 'information_coefficient': my_affinity_i,\n 'pearson': my_affinity_p,\n 'spearman': my_affinity_s,\n 'kendall': my_affinity_k,\n 'absolute_pearson': my_affinity_ap,\n 'l1': 'l1',\n 'l2': 'l2',\n 'manhattan': 'manhattan',\n 'cosine': 'cosine',\n 'euclidean': 'euclidean',\n}\n\nstr2affinity_func = {\n 'custom_euclidean': my_affinity_e,\n 'uncentered_pearson': my_affinity_u,\n 'absolute_uncentered_pearson': my_affinity_au,\n 'information_coefficient': my_affinity_i,\n 'pearson': my_affinity_p,\n 'spearman': my_affinity_s,\n 'kendall': my_affinity_k,\n 'absolute_pearson': my_affinity_ap,\n 'l1': my_affinity_l1,\n 'l2': my_affinity_l2,\n 'manhattan': my_affinity_m,\n 'cosine': my_affinity_c,\n 'euclidean': my_affinity_e,\n}\n\nstr2dist = {\n 'custom_euclidean': custom_euclidean_dist,\n 'uncentered_pearson': uncentered_pearson_dist,\n 'absolute_uncentered_pearson': absolute_uncentered_pearson_dist,\n 'information_coefficient': information_coefficient_dist,\n 'pearson': custom_pearson_dist,\n 'spearman': custom_spearman_dist,\n 'kendall': custom_kendall_tau_dist,\n 'absolute_pearson': absolute_pearson_dist,\n 'l1': custom_manhattan_dist,\n 'l2': custom_euclidean_dist,\n 'manhattan': custom_manhattan_dist,\n 'cosine': custom_cosine_dist,\n 'euclidean': custom_euclidean_dist,\n}\n\nstr2similarity = {\n 'custom_euclidean': custom_euclidean_sim,\n 'uncentered_pearson': uncentered_pearson_corr,\n 'absolute_uncentered_pearson': absolute_uncentered_pearson_corr,\n 'information_coefficient': information_coefficient,\n 'pearson': custom_pearson_corr,\n 'spearman': custom_spearman_corr,\n 'kendall': custom_kendall_tau_corr,\n 'absolute_pearson': absolute_pearson_corr,\n 'l1': custom_manhattan_sim,\n 'l2': custom_euclidean_sim,\n 'manhattan': custom_manhattan_sim,\n 'cosine': custom_cosine_sim,\n # 'euclidean': pairwise.paired_euclidean_distances,\n 'euclidean': custom_euclidean_sim,\n # 'euclidean': custom_euclidean_dist,\n}\n\nlinkage_dic = {\n 'Pairwise average-linkage': 'average',\n 'Pairwise complete-linkage': 'complete',\n 'Pairwise ward-linkage': 'ward',\n 'average': 'average',\n 'complete': 'complete',\n 'ward': 'ward',\n}\n\n\ndef make_tree(model, data=None):\n \"\"\"\n Modified from:\n https://stackoverflow.com/questions/27386641/how-to-traverse-a-tree-from-sklearn-agglomerativeclustering\n import numpy as np\n from sklearn.cluster import AgglomerativeClustering\n import itertools\n\n X = np.concatenate([np.random.randn(3, 10), np.random.randn(2, 10) + 100])\n model = AgglomerativeClustering(linkage=\"average\", affinity=\"cosine\")\n model.fit(X)\n\n ii = itertools.count(X.shape[0])\n [{'node_id': next(ii), 'left': x[0], 'right':x[1]} for x in model.children_]\n\n ---\n\n You can also do dict(enumerate(model.children_, model.n_leaves_))\n which will give you a dictionary where the each key is the ID of a node\n and the value is the pair of IDs of its children. – user76284\n\n :param model:\n :return: a dictionary where the each key is the ID of a node and the value is the pair of IDs of its children.\n \"\"\"\n # ii = itertools.count(data.shape[0]) # Setting the counter at the number of leaves.\n # tree = [{'node_id': next(ii), 'left': x[0], 'right':x[1]} for x in model.children_]\n # print(tree)\n # return tree\n\n return dict(enumerate(model.children_, model.n_leaves_))\n # return dict(enumerate(model.children_, 1))\n\n\ndef make_cdt(data, order_of_columns, order_of_rows, name='test.cdt', atr_companion=True, gtr_companion=False):\n # TODO: if order_of_columns == None, then do arange(len(list(data)))\n # TODO: if order_of_rows == None, then do arange(len(list(data)))\n # exit(data.to_csv())\n data.index.name = \"ID\"\n data.rename(columns={'Description': 'Name'}, inplace=True)\n\n temp = np.ones(len(data))\n data.insert(loc=1, column='GWEIGHT', value=temp) # adding an extra column\n\n # These three lines add a row\n data.loc['EWEIGHT'] = list(np.ones(len(list(data))))\n newIndex = ['EWEIGHT'] + [ind for ind in data.index if ind != 'EWEIGHT']\n data = data.reindex(index=newIndex)\n\n if atr_companion:\n new_AID = ['', '']\n for element in range(len(order_of_columns)):\n temp = 'ARRY' + str(element) + 'X'\n new_AID.append(temp)\n\n data.loc['AID'] = new_AID\n newIndex = ['AID'] + [ind for ind in data.index if ind != 'AID']\n data = data.reindex(index=newIndex)\n data = data[['Name', 'GWEIGHT'] + order_of_columns]\n if gtr_companion:\n new_GID = ['']\n if atr_companion:\n new_GID = ['AID', 'EWEIGHT'] # This is to make sure we fit the CDT format\n # for element in np.sort(np.unique(GID)):\n # if 'NODE' in element:\n # # print(element, 'GTR delete')\n # pass\n # else:\n # new_GID.append(element)\n for element in range(len(order_of_rows)):\n temp = 'GENE' + str(element) + 'X'\n new_GID.append(temp)\n\n data.insert(loc=0, column='GID', value=new_GID) # adding an extra column\n data.insert(loc=0, column=data.index.name, value=data.index) # Making the index a column\n\n # reorder to match dendogram\n temp = ['AID', 'EWEIGHT'] + order_of_rows\n # data = data.loc[temp]\n # print(data['GID'])\n data = data.reindex(temp)\n # print(data['GID'])\n\n # print(list(data.index))\n # print(data['GID'])\n # print(data['Name'])\n\n # Making the 'GID' the index -- for printing purposes\n data.index = data['GID']\n data.index.name = 'GID'\n data.drop(['GID'], axis=1, inplace=True)\n # print(list(data.index))\n\n # The first three lines need to be written separately due to a quirk in the CDT file format:\n\n # print(data.to_csv(sep='\\t', index=True, header=True))\n f = open(name, 'w')\n f.write(data.to_csv(sep='\\t', index=True, header=True))\n # f.write(data.to_csv(sep='\\t', index=True, header=True))\n f.close()\n # pd.options.display.float_format = '{:3.3f}'.format\n data = data.round(2)\n # print(data.to_csv())\n # exit()\n # exit(data.to_csv(sep=' ', index=True, header=True, float_format='2',))\n return\n\n\ndef make_atr(col_tree_dic, data, dist, clustering_method='average', file_name='test.atr'):\n max_val = len(col_tree_dic)\n # AID = []\n\n # compute distances\n distance_dic = {}\n for node, children in col_tree_dic.items():\n val = centroid_distances(children[0], children[1], tree=col_tree_dic, data=data, axis=1,\n distance=dist, clustering_method=clustering_method)\n # print(dist, children, val)\n # print(\"Value is\", val)\n distance_dic[node] = val\n\n # if dist == custom_euclidean_sim:\n # print(\"Euclidean distance is especial, normalizing using this scheme:\")\n # low_norm = min(distance_dic.values())\n # high_norm = max(distance_dic.values())\n # for key in distance_dic.keys():\n # # distance -= norm\n # # distance_dic[key] = distance_dic[key]/high_norm\n # # distance_dic[key] = (distance_dic[key]-low_norm)/high_norm\n # # distance_dic[key] = distance_dic[key]/high_norm\n # # distance_dic[key] = ((1/distance_dic[key])-high_norm)/low_norm\n # print(distance_dic[key])\n\n f = open(file_name, 'w')\n for node, children in col_tree_dic.items():\n elements = [translate_tree(node, max_val, 'atr'), translate_tree(children[0], max_val, 'atr'),\n translate_tree(children[1], max_val, 'atr'),\n \"{num:.{width}f}\".format(num=distance_dic[node], width=SIGNIFICANT_DIGITS)]\n # print('\\t', '\\t'.join(elements))\n # AID.append(translate_tree(children[0], max_val, 'atr'))\n # AID.append(translate_tree(children[1], max_val, 'atr'))\n f.write('\\t'.join(elements) + '\\n')\n # print('\\t'.join(elements) + '\\n')\n f.close()\n\n return\n\n\ndef make_gtr(row_tree_dic, data, dist, clustering_method='average', file_name='test.gtr'):\n max_val = len(row_tree_dic)\n # GID = []\n\n # compute distances\n distance_dic = {}\n for node, children in row_tree_dic.items():\n val = centroid_distances(children[0], children[1], tree=row_tree_dic, data=data, axis=0,\n distance=dist, clustering_method=clustering_method)\n distance_dic[node] = val\n\n f = open(file_name, 'w')\n for node, children in row_tree_dic.items():\n elements = [translate_tree(node, max_val, 'gtr'), translate_tree(children[0], max_val, 'gtr'),\n translate_tree(children[1], max_val, 'gtr'),\n \"{num:.{width}f}\".format(num=distance_dic[node], width=SIGNIFICANT_DIGITS)]\n # GID.append(translate_tree(children[0], max_val, 'gtr'))\n # GID.append(translate_tree(children[1], max_val, 'gtr'))\n f.write('\\t'.join(elements) + '\\n')\n # val -= 1\n f.close()\n\n return\n\n\ndef translate_tree(what, length, g_or_a):\n if 'a' in g_or_a:\n if what <= length:\n translation = 'ARRY' + str(what) + 'X'\n else:\n translation = 'NODE' + str(what - length) + 'X'\n elif 'g' in g_or_a:\n if what <= length:\n translation = 'GENE' + str(what) + 'X'\n else:\n translation = 'NODE' + str(what - length) + 'X'\n else:\n translation = []\n print('This function does not support g_or_a=', g_or_a)\n return translation\n\n\n# def get_children_recursively(k, model, node_dict, leaf_count, n_samples, data, verbose=False, left=None, right=None):\n# # print(k)\n# i, j = model.children_[k]\n#\n# if k in node_dict:\n# return node_dict[k]['children']\n#\n# if i < leaf_count:\n# # print(\"i if\")\n# left = [i]\n# else:\n# # print(\"i else\")\n# # read the AgglomerativeClustering doc. to see why I select i-n_samples\n# left, node_dict = get_children_recursively(i - n_samples, model, node_dict,\n# leaf_count, n_samples, data, verbose, left, right)\n#\n# if j < leaf_count:\n# # print(\"j if\")\n# right = [j]\n# else:\n# # print(\"j else\")\n# right, node_dict = get_children_recursively(j - n_samples, model, node_dict,\n# leaf_count, n_samples, data, verbose, left, right)\n#\n# if verbose:\n# print(k, i, j, left, right)\n# temp = map(lambda ii: data[ii], left)\n# left_pos = np.mean(list(temp), axis=0)\n# temp = map(lambda ii: data[ii], right)\n# right_pos = np.mean(list(temp), axis=0)\n#\n# # this assumes that agg_cluster used euclidean distances\n# dist = metrics.pairwise_distances([left_pos, right_pos], metric='euclidean')[0, 1]\n#\n# all_children = [x for y in [left, right] for x in y]\n# pos = np.mean(list(map(lambda ii: data[ii], all_children)), axis=0)\n#\n# # store the results to speed up any additional or recursive evaluations\n# node_dict[k] = {'top_child': [i, j], 'children': all_children, 'pos': pos, 'dist': dist,\n# 'node_i': k + n_samples}\n# return all_children, node_dict\n\n# def recursive_atr\n\n\ndef get_children(tree, leaves_are_self_children=False):\n # this is a recursive function\n expanded_tree = {}\n for node in range(max(tree.keys())):\n if node <= len(tree):\n if leaves_are_self_children:\n expanded_tree[node] = [node]\n else:\n expanded_tree[node] = []\n\n else:\n # expanded_tree[node] = list_children_single_node(node, tree)\n expanded_tree[node] = list_children_single_node(node, tree, leaves_are_self_children)\n\n return expanded_tree\n\n\ndef list_children_single_node(node, tree, leaves_are_self_children=False, only_leaves_are_children=True):\n # children = []\n if node <= len(tree):\n if leaves_are_self_children:\n children = [node]\n else:\n children = []\n\n else:\n children = list(tree[node])\n\n # Check each child, and add their children to the list\n for child in children:\n if child <= len(tree):\n pass\n else:\n children += list_children_single_node(child, tree, only_leaves_are_children=True)\n if only_leaves_are_children:\n # print(sorted(np.unique(i for i in children if i <= len(tree))))\n # print()\n return [i for i in sorted(np.unique(children)) if i <= len(tree)]\n else:\n return sorted(np.unique(children))\n\n\ndef centroid_distances(node_a, node_b, tree, data, axis=0, distance=mydist, clustering_method='average'):\n if axis == 0:\n pass\n elif axis == 1:\n data = np.transpose(data)\n else:\n exit(\"Variable 'data' does not have that many axises (╯°□°)╯︵ ┻━┻\")\n\n children_of_a = list_children_single_node(node_a, tree=tree, leaves_are_self_children=True)\n children_of_b = list_children_single_node(node_b, tree=tree, leaves_are_self_children=True)\n\n # if distance == custom_euclidean_sim:\n # print(\"Euclidean distance is especial, normalizing using this scheme:\")\n # distance = custom_euclidean_dist\n\n distances_list = []\n if clustering_method == 'average':\n for pair in itertools.product(data[children_of_a], data[children_of_b]):\n distances_list.append(distance(pair[0], pair[1]))\n return np.average(distances_list)\n elif clustering_method == 'complete':\n for pair in itertools.product(data[children_of_a], data[children_of_b]):\n distances_list.append(distance(pair[0], pair[1]))\n return np.min(distances_list)\n else:\n exit(\"Ony 'average' and 'complete' clustering methods are accepted at the moment (>_<)\")\n\n\ndef euclidian_similarity(x, y):\n dist = mydist(x, y)\n # return 1/(1+dist)\n return 1 / (np.exp(dist))\n\n\ndef better_dendodist(children, distance, tree, data, axis, clustering_method='average'):\n distances_list = []\n for pair in children:\n distances_list.append(centroid_distances(pair[0], pair[1], tree, data, axis, distance=distance,\n clustering_method=clustering_method))\n # print(distance, pair, distances_list[-1])\n return distances_list\n\n\ndef HierarchicalClustering(pwd: \"The current directory\",\n gct_name: \"Gene expression data filename (.gct file) or Pandas DataFrame \"\n \"where rows are genes and columns are samples\",\n col_distance_metric: \"The function to be used when comparing the distance/similarity of \"\n \"the columns in the gct_name dataset\",\n row_distance_metric: \"The function to be used when comparing the distance/similarity of \"\n \"the rows in the gct_name dataset\",\n clustering_method: \"Type of linkage to use\" = 'average',\n output_base_name: \"Base name for output file\" = 'HC_output',\n row_normalization: \"Whether to normalize each row (gene) in the data\" = False,\n col_normalization: \"Whether to normalize each column (sample) in the data\" = False,\n row_centering: \"How to center each row (gene) in the data\" = 'Mean',\n col_centering: \"How to center each column (sample) in the data\" = 'Mean',\n output_distances: \"Whether or not output the pair-wise distance matrix. \"\n \"If true, the distance between each column will be called, \"\n \"which can be very computationally intensive. \"\n \"If unsure, leave as False.\" = False,\n custom_plot: \"Plot the dendrograms by Genes, Samples, or Both\" = 'Both',\n clusters_to_highlight: \"How many clusters to highlight in the dendrogram\" = 2,\n show: \"Whether to show the plot at the end\" = False):\n \"\"\"\n This function performs hierarchical clustering to group samples (columns) with similar phenotypes\n and/or genes (rows) with similar expression profiles.\n :param pwd: The current directory\n :param gct_name: Gene expression data filename (.gct file) or Pandas DataFrame where rows are genes and\n columns are samples\n :param col_distance_metric: The function to be used when comparing the distance/similarity of\n the columns in the gct_name dataset\n :param row_distance_metric: The function to be used when comparing the distance/similarity of\n the rows in the gct_name dataset\n :param clustering_method: Type of linkage to use\n :param output_base_name: Base name for output file\n :param row_normalization: Whether to normalize each row (gene) in the data\n :param col_normalization: Whether to normalize each column (sample) in the data\n :param row_centering: How to center each row (gene) in the data\n :param col_centering: How to center each column (sample) in the data\n :param output_distances: Whether or not output the pair-wise distance matrix.\n If true, the distance between each column will be called,\n which can be very computationally intensive.\n If unsure, leave as False\n :param custom_plot: Plot the dendrograms by Genes, Samples, or Both\n :param clusters_to_highlight: How many clusters to highlight in the dendrogram\n :param show: Whether to show the plot at the end\n :return:\n \"\"\"\n\n # gct_name, col_distance_metric, output_distances, row_distance_metric, clustering_method, output_base_name, \\\n # row_normalization, col_normalization, row_centering, col_centering = parse_inputs(sys.argv)\n\n if col_distance_metric == \"No_column_clustering\":\n custom_plot = 'Genes'\n if row_distance_metric == \"No_row_clustering\":\n custom_plot = 'Samples'\n\n og_data, og_data_df, data, data_df, col_labels, row_labels, og_full_gct, new_full_gct = \\\n parse_data(gct_name, row_normalization, col_normalization, row_centering, col_centering)\n order_of_columns = list(data_df)\n order_of_rows = list(data_df.index)\n\n data_transpose = np.transpose(data)\n\n # print(data)\n # print(data_df)\n\n atr_companion = False\n col_model = None\n col_tree = None\n\n gtr_companion = False\n row_model = None\n row_tree = None\n\n AID = None\n GID = None\n\n if col_distance_metric != 'No_column_clustering':\n atr_companion = True\n col_model = AgglomerativeClustering(linkage=linkage_dic[clustering_method], n_clusters=clusters_to_highlight,\n affinity=str2func[col_distance_metric])\n\n col_model.fit(data_transpose)\n col_tree = make_tree(col_model)\n order_of_columns = order_leaves(col_model, tree=col_tree, data=data_transpose,\n dist=str2similarity[col_distance_metric], labels=col_labels, reverse=True)\n\n path_to_atr = output_base_name + '.atr'\n make_atr(col_tree, file_name=path_to_atr, data=data,\n dist=str2similarity[col_distance_metric], clustering_method=linkage_dic[clustering_method])\n\n if row_distance_metric != 'No_row_clustering':\n gtr_companion = True\n row_model = AgglomerativeClustering(linkage=linkage_dic[clustering_method], n_clusters=clusters_to_highlight,\n affinity=str2func[row_distance_metric])\n # y_col = row_model.fit_predict(np.transpose(data))\n # print(y_col)\n row_model.fit(data)\n row_tree = make_tree(row_model)\n order_of_rows = order_leaves(row_model, tree=row_tree, data=data,\n dist=str2similarity[row_distance_metric], labels=row_labels)\n path_to_gtr = output_base_name + '.gtr'\n make_gtr(row_tree, data=data, file_name=output_base_name + '.gtr', dist=str2similarity[row_distance_metric])\n\n if output_distances:\n # TODO: check which col or row was selected, or both\n row_distance_matrix = str2affinity_func[row_distance_metric](data)\n # col_distance_matrix = str2affinity_func[col_distance_metric](np.transpose(data))\n dist_file = open(output_base_name + '_pairwise_distances.csv', 'w')\n dist_file.write('labels,')\n dist_file.write(\",\".join(col_model.labels_.astype(str)) + \"\\n\")\n dist_file.write('samples,')\n dist_file.write(\",\".join(list(data_df)) + \"\\n\")\n i = 0\n for row in row_distance_matrix:\n dist_file.write('distances row=' + str(i) + \",\" + \",\".join(row.astype(str)) + \"\\n\")\n i += 1\n\n path_to_cdt = output_base_name + '.cdt'\n make_cdt(data=new_full_gct, name=path_to_cdt, atr_companion=atr_companion,\n gtr_companion=gtr_companion,\n order_of_columns=order_of_columns, order_of_rows=order_of_rows)\n\n if custom_plot == 'Samples':\n # Plotting the heatmap with dendrogram\n plt.clf()\n # fig = plt.figure(figsize=(16, 9), dpi=300)\n fig = plt.figure(figsize=(16, 9))\n gs = gridspec.GridSpec(2, 1, height_ratios=[1, 5])\n gs.update(wspace=0.0, hspace=0.0)\n ax0 = plt.subplot(gs[0]) # Doing dendrogram first\n ax0.axis('off')\n\n col_order, link = plot_dendrogram(col_model, data, col_tree, axis=1,\n dist=str2similarity[col_distance_metric],\n clustering_method=clustering_method,\n color_threshold=clusters_to_highlight,\n title='no_title.png', orientation='top')\n col_order = [int(i) for i in col_order]\n\n # print(col_order)\n named_col_order = [col_labels[i] for i in col_order]\n # print(named_col_order)\n # print(col_order)\n # print(col_model.labels_)\n\n ax1 = plt.subplot(gs[1])\n\n # Row-normalizing for display purposes only:\n data_df = data_df.subtract(data_df.min(axis=1), axis=0)\n data_df = data_df.div(data_df.max(axis=1), axis=0)\n\n sns.heatmap(data_df[named_col_order], ax=ax1, cbar=False, cmap='bwr')\n # ax1.xaxis.tick_top()\n [label.set_rotation(90) for label in ax1.get_xticklabels()]\n file_path_plot = output_base_name + '.pdf'\n plt.savefig(file_path_plot, bbox_inches='tight')\n\n print(\"----------------------------------------------------------------------\")\n print(\"The PDF of this heatmap can be downloaded here:\")\n display(HTML('<a href=\"' + file_path_plot + '\" target=\"_blank\">PDF of the heatmap</a>'))\n print(\"----------------------------------------------------------------------\")\n print(\"The CDF which is compatible with HierarchicalClusteringViewer is here:\")\n display(HTML('<a href=\"' + path_to_cdt + '\" target=\"_blank\">TXT containing the output data</a>'))\n print(\"----------------------------------------------------------------------\")\n print(\"The ATR which is compatible with HierarchicalClusteringViewer is here:\")\n display(HTML('<a href=\"' + path_to_atr + '\" target=\"_blank\">TXT containing the output data</a>'))\n print(\"----------------------------------------------------------------------\")\n\n if show:\n # plt.show()\n pass\n\n # col_order = [int(i) for i in col_order]\n # print(col_order)\n # named_col_order = [col_labels[i] for i in col_order]\n # print(named_col_order)\n # print(col_order)\n # print(idxs2clusters)\n cls_list = col_model.labels_\n # for i in range(len(col_order)):\n # cls_list.append(idxs2clusters[i])\n # print(cls_list)\n # order_by = [col_order.index(i) for i in range(len(col_order))]\n # list2intlist(cls_list, custom_order=order_by)\n # in_list = np.array(cls_list)\n # print(cls_list)\n # print(np.array(list2intlist(cls_list, custom_order=order_by)))\n\n list2cls(np.array(list2intlist(cls_list)), name_of_out=output_base_name+'.cls', sep=' ')\n\n\n if custom_plot == 'Genes':\n # Plotting the heatmap with dendrogram\n plt.clf()\n # fig = plt.figure(figsize=(16, 9), dpi=300)\n fig = plt.figure(figsize=(16, 9))\n gs = gridspec.GridSpec(1, 2, width_ratios=[5, 1])\n gs.update(wspace=0.0, hspace=0.0)\n ax0 = plt.subplot(gs[1]) # Doing dendrogram first\n ax0.axis('off')\n\n row_order, link = plot_dendrogram(row_model, data_transpose, row_tree, axis=1,\n dist=str2similarity[row_distance_metric],\n clustering_method=clustering_method,\n color_threshold=clusters_to_highlight,\n orientation='right', title='no_title.png')\n # row_order = [int(i) for i in row_order]\n\n # named_row_order = [row_labels[i] for i in row_order]\n\n ax1 = plt.subplot(gs[0])\n\n # Row-normalizing for display purposes only:\n data_df = data_df.subtract(data_df.min(axis=1), axis=0)\n data_df = data_df.div(data_df.max(axis=1), axis=0)\n\n sns.heatmap(data_df.iloc[row_order], ax=ax1, cbar=False, cmap='bwr')\n # ax1.xaxis.tick_top()\n [label.set_rotation(90) for label in ax1.get_xticklabels()]\n file_path_plot = output_base_name + '.pdf'\n plt.savefig(file_path_plot, bbox_inches='tight')\n\n print(\"----------------------------------------------------------------------\")\n print(\"The PDF of this heatmap can be downloaded here:\")\n display(HTML('<a href=\"' + file_path_plot + '\" target=\"_blank\">PDF of the heatmap</a>'))\n print(\"----------------------------------------------------------------------\")\n print(\"The CDF which is compatible with HierarchicalClusteringViewer is here:\")\n display(HTML('<a href=\"' + path_to_cdt + '\" target=\"_blank\">TXT containing the output data</a>'))\n print(\"----------------------------------------------------------------------\")\n print(\"The GTR which is compatible with HierarchicalClusteringViewer is here:\")\n display(HTML('<a href=\"' + path_to_gtr + '\" target=\"_blank\">TXT containing the output data</a>'))\n print(\"----------------------------------------------------------------------\")\n\n if show:\n plt.show()\n\n if custom_plot == 'Both':\n # Plotting the heatmap with dendrogram\n plt.clf()\n # fig = plt.figure(figsize=(16, 9), dpi=300)\n fig = plt.figure(figsize=(16, 9))\n gs = gridspec.GridSpec(2, 2, width_ratios=[5, 1], height_ratios=[1, 5])\n gs.update(wspace=0.0, hspace=0.0)\n\n # Doing TOP dendrogram first\n ax0 = plt.subplot(gs[0])\n ax0.axis('off')\n\n col_order, link = plot_dendrogram(col_model, data, col_tree, axis=1,\n dist=str2similarity[col_distance_metric],\n clustering_method=clustering_method,\n color_threshold=clusters_to_highlight,\n title='no_title.png', orientation='top')\n col_order = [int(i) for i in col_order]\n named_col_order = [col_labels[i] for i in col_order]\n\n # Doing RIGHT dendrogram\n ax3 = plt.subplot(gs[3])\n ax3.axis('off')\n\n row_order, link = plot_dendrogram(row_model, data_transpose, row_tree, axis=1,\n dist=str2similarity[row_distance_metric],\n clustering_method=clustering_method,\n color_threshold=clusters_to_highlight,\n orientation='right', title='no_title.png')\n\n # Plotting the heatmap now\n ax1 = plt.subplot(gs[2])\n\n # Row-normalizing for display purposes only:\n data_df = data_df.subtract(data_df.min(axis=1), axis=0)\n data_df = data_df.div(data_df.max(axis=1), axis=0)\n\n sns.heatmap(data_df[named_col_order].iloc[row_order], ax=ax1, cbar=False, cmap='bwr')\n # ax1.xaxis.tick_top()\n [label.set_rotation(90) for label in ax1.get_xticklabels()]\n file_path_plot = output_base_name + '.pdf'\n plt.savefig(file_path_plot, bbox_inches='tight')\n\n print(\"----------------------------------------------------------------------\")\n print(\"The PDF of this heatmap can be downloaded here:\")\n display(HTML('<a href=\"' + file_path_plot + '\" target=\"_blank\">PDF of the heatmap</a>'))\n print(\"----------------------------------------------------------------------\")\n print(\"The CDF which is compatible with HierarchicalClusteringViewer is here:\")\n display(HTML('<a href=\"' + path_to_cdt + '\" target=\"_blank\">TXT containing the output data</a>'))\n print(\"----------------------------------------------------------------------\")\n print(\"The GTR which is compatible with HierarchicalClusteringViewer is here:\")\n display(HTML('<a href=\"' + path_to_gtr + '\" target=\"_blank\">TXT containing the output data</a>'))\n print(\"----------------------------------------------------------------------\")\n\n if show:\n plt.show()\n\n\n return col_model, row_model\n\n\ndef hc_samples(\n input_gene_expression: \"gene expression data filename (.gct file) where rows are genes and columns are samples\",\n clustering_type: \"single or consensus -- Only single is suported at the moment\",\n distance_metric: \"the function to be used when comparing the distance/similarity of the columns in the \"\n \"input_gene_expression dataset\",\n file_basename: \"the name to use when naming output files\" = 'HC_out',\n clusters_to_highlight: \"how many clusters to highlight in the dendrogram\" = None):\n \"\"\"\n Perform hierarchical clustering to group samples with similar phenotypes.\n :param input_gene_expression: str; gene expression data filename (.gct file)\n where rows are genes and columns are samples\n :param clustering_type: str; single or consensus\n :param distance_metric: str; the function to be used when comparing the distance/similarity of the columns\n in the input_gene_expression dataset\n :param file_basename: str; the name to use when naming output files\n :param clusters_to_highlight: int; how many clusters to highlight in the dendrogram\n :return: object; Sklearn's AgglomerativeClustering fitted model\n \"\"\"\n\n print(\"Currenty clustering_type is being ignored, only 'single' is supported.\")\n pwd = '.'\n gct_name = input_gene_expression\n col_distance_metric = distance_metric\n output_distances = False\n row_distance_metric = 'No_row_clustering'\n clustering_method = 'average'\n output_base_name = file_basename\n row_normalization = False\n col_normalization = False\n row_centering = 'Mean'\n col_centering = 'Mean'\n custom_plot = 'Samples'\n show = True\n\n # print(\"This are the parameters to be used (for debugging purposes)\")\n # print(\"\"\"\n # pwd = '.'\n # gct_name = {gct_name}\n # col_distance_metric = {col_distance_metric}\n # output_distances = {output_distances}\n # row_distance_metric = {row_distance_metric}\n # clustering_method = {clustering_method}\n # output_base_name = {output_base_name}\n # row_normalization = {row_normalization}\n # col_normalization = {col_normalization}\n # row_centering = {row_centering}\n # col_centering = {col_centering}\n # \"\"\".format(\n # gct_name=gct_name, col_distance_metric=col_distance_metric,\n # output_distances=str(output_distances),\n # row_distance_metric=row_distance_metric, clustering_method=clustering_method,\n # output_base_name=output_base_name,\n # row_normalization=str(row_normalization), col_normalization=str(col_normalization),\n # row_centering=row_centering, col_centering=col_centering\n # )\n # )\n print(\"Now we will start performing hierarchical clustering, this may take a little while.\")\n\n col_model, row_model = HierarchicalClustering(pwd,\n gct_name,\n col_distance_metric,\n row_distance_metric,\n clustering_method,\n output_base_name,\n row_normalization,\n col_normalization,\n row_centering,\n col_centering,\n output_distances,\n custom_plot,\n clusters_to_highlight,\n show)\n print(\"Done with Hierarchical Clustering!\")\n\n return col_model\n\n\ndef hc_genes(\n input_gene_expression: \"gene expression data filename (.gct file) where rows are genes and columns are samples\",\n clustering_type: \"single or consensus -- Only single is suported at the moment\",\n distance_metric: \"the function to be used when comparing the distance/similarity of the rows in the \"\n \"input_gene_expression dataset\",\n file_basename: \"the name to use when naming output files\" = 'HC_out',\n clusters_to_highlight: \"how many clusters to highlight in the dendrogram\" = None):\n \"\"\"\n Perform hierarchical clustering to group genes with similar expression profile.\n :param input_gene_expression: str; gene expression data filename (.gct file)\n where rows are genes and columns are samples\n :param clustering_type: str; single or consensus\n :param distance_metric: str; the function to be used when comparing the distance/similarity of the rows\n in the input_gene_expression dataset\n :param file_basename: str; the name to use when naming output files\n :param clusters_to_highlight: int; how many clusters to highlight in the dendrogram\n :return: object; Sklearn's AgglomerativeClustering fitted model\n \"\"\"\n\n print(\"Currenty clustering_type is being ignored, only 'single' is supported.\")\n pwd = '.'\n gct_name = input_gene_expression\n col_distance_metric = 'No_column_clustering'\n output_distances = False\n row_distance_metric = distance_metric\n clustering_method = 'average'\n output_base_name = file_basename\n row_normalization = False\n col_normalization = False\n row_centering = 'Mean'\n col_centering = 'Mean'\n custom_plot = 'Genes'\n show = True\n\n # print(\"This are the parameters to be used (for debugging purposes)\")\n # print(\"\"\"\n # pwd = '.'\n # gct_name = {gct_name}\n # col_distance_metric = {col_distance_metric}\n # output_distances = {output_distances}\n # row_distance_metric = {row_distance_metric}\n # clustering_method = {clustering_method}\n # output_base_name = {output_base_name}\n # row_normalization = {row_normalization}\n # col_normalization = {col_normalization}\n # row_centering = {row_centering}\n # col_centering = {col_centering}\n # \"\"\".format(\n # gct_name=gct_name, col_distance_metric=col_distance_metric,\n # output_distances=str(output_distances),\n # row_distance_metric=row_distance_metric, clustering_method=clustering_method,\n # output_base_name=output_base_name,\n # row_normalization=str(row_normalization), col_normalization=str(col_normalization),\n # row_centering=row_centering, col_centering=col_centering\n # )\n # )\n print(\"Now we will start performing hierarchical clustering, this may take a little while.\")\n\n col_model, row_model = HierarchicalClustering(pwd,\n gct_name,\n col_distance_metric,\n row_distance_metric,\n clustering_method,\n output_base_name,\n row_normalization,\n col_normalization,\n row_centering,\n col_centering,\n output_distances,\n custom_plot,\n clusters_to_highlight,\n show)\n print(\"Done with Hierarchical Clustering!\")\n\n return row_model\n\n\ndef normalize_dataframe(df, log_normalize=None,\n row_centering='Mean', row_normalization=True,\n col_centering='Mean', col_normalization=True):\n \"\"\"\n This function Takes in a DataFrame and some flags and normalizes the data it contains. Order of operations is:\n 1- Log-normalize\n 2- Row (gene) center\n 3- Row (gene) normalize\n 4- Column (sample) center\n 5- Column (sample) normalize\n\n :param df: (Pandas DataFrame) A DataFrame to be normalized\n :param log_normalize:(float, None) Whether to log-normalize the data. Value is the base of the logarithm to use\n :param row_centering: Whether or not to subtract the mean or median from every element of each row\n :param row_normalization: Whether or not to set the maximum value of a row to 1 and the minimum value to 0\n :param col_centering: Whether or not to subtract the mean or median from every element of each column\n :param col_normalization: Whether or not to set the maximum value of a column to 1 and the minimum value to 0\n :return:\n \"\"\"\n\n if (log_normalize is None) \\\n and (row_centering == 'No') and (col_centering == 'No') \\\n and (row_normalization is False) and (col_normalization is False):\n print(\"No normalization has been requested ಠ_ಠ¯\")\n return df\n\n data = df.as_matrix()\n\n # Log Normalizing\n if log_normalize is not None:\n print(\"I'm sorry, log-normalization is not supported at the moment (u_u)\")\n\n # Row Centering\n if row_centering != 'No':\n if row_centering == 'Mean':\n row_means = np.mean(data, axis=1)\n row_means_col_vec = row_means.reshape((data.shape[0], 1))\n data = data - row_means_col_vec\n elif row_centering == 'Median':\n row_medians = np.median(data, axis=1)\n row_medians_col_vec = row_medians.reshape((data.shape[0], 1))\n data = data - row_medians_col_vec\n else:\n print(\"row_centering has an unexpected value:\", row_centering)\n\n # Row Normalizing\n if row_normalization:\n row_norm = np.sum(data * data, axis=1)\n row_norm_col_vec = row_norm.reshape((data.shape[0], 1))\n data = data / np.sqrt(row_norm_col_vec)\n\n # Column Centering\n if col_centering != 'No':\n if col_centering == 'Mean':\n col_means = np.mean(data, axis=0)\n data = data - col_means\n elif col_centering == 'Median':\n col_medians = np.median(data, axis=0)\n data = data - col_medians\n else:\n print(\"col_centering has an unexpected value: \", col_centering)\n\n # Column Normalizing\n if col_normalization:\n col_norm = np.sum(data * data, axis=0)\n data = data / np.sqrt(col_norm)\n\n normalized_df = pd.DataFrame(data=data, index=df.index, columns=list(df))\n\n return normalized_df\n\n\ndef display_heatmap(data,\n name='heatmap',\n log_normalize=None,\n row_centering: \"How to center each row (gene) in the data\" = 'No',\n row_normalization: \"Whether to normalize each row (gene) in the data\" = True,\n col_centering: \"How to center each column (sample) in the data\" = 'No',\n col_normalization: \"Whether to normalize each column (sample) in the data\" = False,\n mostrar=False):\n\n if isinstance(data, pd.DataFrame):\n data_to_plot = data.copy()\n elif os.path.isfile(data):\n data_to_plot = pd.read_table(data, skiprows=2, sep='\\t')\n data_to_plot.set_index('Name', inplace=True)\n data_to_plot.drop('Description', axis=1, inplace=True)\n else:\n try:\n data_to_plot = pd.read_table(data, skiprows=2, sep='\\t')\n except urllib.error.HTTPError:\n print(\"I don't know what the variable 'data' contains.\")\n print('data=')\n print(data)\n exit(\"If this is a url it may not be accessible.\\n\"\n \"(╯°□°)╯︵ ┻━┻\")\n data_to_plot.set_index('Name', inplace=True)\n data_to_plot.drop('Description', axis=1, inplace=True)\n\n data_to_plot = normalize_dataframe(data_to_plot, log_normalize=log_normalize,\n row_centering=row_centering, row_normalization=row_normalization,\n col_centering=col_centering, col_normalization=col_normalization)\n\n plt.clf()\n\n # # figure reshape from:\n # # https://stackoverflow.com/questions/35127920/overlapping-yticklabels-is-it-possible-to-control-cell-size-of-heatmap-in-seabo\n # # and from:\n # # https://matplotlib.org/users/customizing.html\n\n # get the tick label font size\n fontsize_pt = plt.rcParams['ytick.labelsize']\n dpi = 72.27\n\n # compute the matrix height in points and inches\n matrix_height_pt = fontsize_pt * data_to_plot.as_matrix().shape[0]\n matrix_height_in = (matrix_height_pt / dpi) * 1.2\n\n # compute the required figure height\n top_margin = 0.01 # in percentage of the figure height\n bottom_margin = 0.01 # in percentage of the figure height\n figure_height = matrix_height_in / (1 - top_margin - bottom_margin)\n\n # build the figure instance with the desired height\n fig, ax = plt.subplots(\n figsize=(6, figure_height),\n gridspec_kw=dict(top=1 - top_margin, bottom=bottom_margin))\n\n sns.heatmap(data_to_plot, cmap='bwr', yticklabels=True, square=True,\n cbar_kws={'use_gridspec': False,\n 'location': \"right\",\n 'shrink': 0.5,\n 'label': ''}\n\n )\n\n if not name.endswith('.pdf'):\n name = name + '.pdf'\n\n plt.savefig(name, dpi=dpi, bbox_inches='tight')\n # plt.savefig(name, dpi=dpi)\n print(name, \"has been created!\")\n\n if mostrar:\n # print(data_to_plot.head())\n plt.show()\n\n print(\"The PDF of this heatmap can be downloaded here:\")\n display(HTML('<a href=\"' + name + '\" target=\"_blank\">PDF of the heatmap</a>'))\n return\n" ]
[ [ "numpy.sqrt", "numpy.cumsum", "numpy.mean", "sklearn.cluster.AgglomerativeClustering", "numpy.exp", "matplotlib.pyplot.gca", "pandas.read_csv", "numpy.unique", "numpy.arange", "numpy.finfo", "matplotlib.pyplot.subplot", "matplotlib.gridspec.GridSpec", "numpy.count_nonzero", "numpy.column_stack", "matplotlib.pyplot.figure", "numpy.min", "numpy.median", "matplotlib.pyplot.savefig", "pandas.read_table", "numpy.transpose", "scipy.cluster.hierarchy.dendrogram", "matplotlib.pyplot.show", "numpy.sum", "matplotlib.pyplot.xticks", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.clf", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.yticks", "numpy.average" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.3", "1.1", "1.5", "1.2" ], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "0.16", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "1.3", "1.8" ], "tensorflow": [] } ]
jsevo/taxumap
[ "1a02518dca822a65847994910177c74607243dae", "1a02518dca822a65847994910177c74607243dae" ]
[ "taxumap-manuscript-notebooks/embeddings.py", "taxumap/tools.py" ]
[ "from sklearn.manifold import TSNE \nfrom sklearn.decomposition import PCA, KernelPCA\nfrom umap import UMAP\nfrom sklearn.preprocessing import MinMaxScaler\n\nRUNEMBEDDINGS = False\nif RUNEMBEDDINGS:\n #simple PCA\n pcaembedding = PCA(n_components=2).fit_transform(XASV.fillna(0))\n \n #base embedding (kernel pca)\n kernelpcaembedding = KernelPCA(n_components=2).fit_transform(XASV.fillna(0))\n \n # non-phylo umap\n embedding_non_phylo_unscaled = UMAP(n_neighbors=120,min_dist=0.2, metric=\"manhattan\").fit_transform(XASV)\n \n \n # embedding_non_phylo_scaled = UMAP(n_neighbors=120,min_dist=0.2, metric=\"manhattan\").fit_transform(MinMaxScaler().fit_transform(XASV))\n\n\nRUNTAXUMAPS = False\nif RUNTAXUMAPS: \n from taxumap.taxumap import taxumap\n agg_levels = [\"Phylum\", \"Family\"]\n withscaling = False # do not scale the columns of X\n distanceperlevel = False # do not calculate a separate distance matrix at each phylogenetic level because we are using the manhattan distance \n distancemetric = \"manhattan\"\n printfigure=False\n printwithdiversity=False #dont plot the average diversity in the background of the scatter plot\n X_in = XASV\n tax = taxonomy\n withusercolors=taxonomy_meta[[\"HexColor\"]]\n\n\n# TAXUMAP, X_embedded, taxumap_Xscaled, taxumap_X = taxumap(agg_levels,\n# withscaling,\n# distanceperlevel,\n# distancemetric,\n# printfigure,\n# printwithdiversity,\n# X_in,\n# tax,\n# withusercolors,\n# debug=True, #return tables\n# save_embedding=False #save xy coordinates\n# );\n \n TAXUMAP_alllevels, X_embedded_alllevels, taxumap_Xscaled_alllevels, taxumap_X_alllevels = taxumap([\"Phylum\", \"Class\", \"Order\", \"Family\", \"Genus\"],\n withscaling,\n distanceperlevel,\n distancemetric,\n printfigure,\n printwithdiversity,\n X_in,\n tax,\n withusercolors,\n debug=True, #return tables\n save_embedding=False #save xy coordinates\n );\n\n# TAXUMAPSCALED, X_embedded_scaled, taxumap_Xscaled_scaled, taxumap_X_scaled = taxumap(\n# agg_levels,\n# True,\n# False,\n# \"euclidean\",\n# printfigure,\n# printwithdiversity,\n# X_in,\n# tax,\n# withusercolors,\n# debug=True, #return tables\n# save_embedding=True#save xy coordinates\n# );\n\n# TAXUMAPSCALEDeuclidean, X_embedded_scaledeuclidean, taxumap_Xscaled_scaledeuclidean, taxumap_X_scaledeuclidean = taxumap(\n# agg_levels,\n# True,\n# False,\n# \"euclidean\",\n# printfigure,\n# printwithdiversity,\n# X_in,\n# tax,\n# withusercolors,\n# debug=True, #return tables\n# save_embedding=True#save xy coordinates\n# );\nLOADPCoAS = False\nif LOADPCoAS:\n pcoa_embedding_unweighted_unifrac = PCA(n_components=2).fit_transform(unweighted_unifrac.set_index(\"SampleID\"))\n #Weighted Unifrac\n pcoa_embedding_weighted_unifrac = PCA(n_components=2).fit_transform(weighted_unifrac.set_index(\"SampleID\"))\n\n \ndel unweighted_unifrac\ndel weighted_unifrac\n#del TAXUMAPSCALED, taxumap_Xscaled_scaled, taxumap_X_scaled\n#del TAXUMAPSCALEDeuclidean, taxumap_Xscaled_scaledeuclidean, taxumap_X_scaledeuclidean\ndel TAXUMAP_alllevels, taxumap_Xscaled_alllevels, taxumap_X_alllevels\n\nwrite_now=False\nif write_now:\n for (em,n) in zip(\n [pcaembedding,\n pcoa_embedding_unweighted_unifract[:,0:2], \n pcoa_embedding_weighted_unifract, \n embedding_non_phylo_unscaled,\n X_embedded_alllevels.values,\n X_embedded.values],\n [\"pcaembedding\",\n \"pcoa_unweighted_unifrac_embedding\", \n \"pcoa_weighted_unifrac_embedding\",\n \"embedding_nontax_umap_unscaled\",\n \"taxumap_alllevels\",\n \"current_taxumap_embedding\"]):\n pd.DataFrame(em, index=XASV.index).to_csv(\"results/%s.csv\"%n)", "# Authors: Jonas Schluter <[email protected]>, Grant Hussey <[email protected]>\n# License: MIT\n\nimport os\nimport sys\nimport warnings\n\nfrom pathlib import Path\n\nimport numpy as np\nimport pandas as pd\nimport scipy.spatial.distance as ssd\nfrom sklearn.preprocessing import MinMaxScaler\n\nfrom taxumap.custom_logging import setup_logger\n\nlogger_tools = setup_logger(\"tools\", verbose=False, debug=False)\n\n\ndef tax_agg(rel_abundances, taxonomy, agg_levels, distance_metric, weights):\n \"\"\"Generates a distance matrix aggregated on each designated taxon\n\n Args:\n rel_abundances (Pandas df): Relative abundance df with row-wise compositional data, row: sample, columns: OTU/ASV label\n taxonomy (Pandas df): Row: OTU/ASV label, columns: hierarchy of taxonomy for that ASV/OTU\n agg_levels (list of str): Taxons to aggregate\n distance_metric (str): String to pass to ssd.cdist()\n weights (list of int): Weights of the non-ASV/OTU taxons\n\n Returns:\n pandas df: distance table, row and columns are sample ids\n \"\"\"\n\n _X = rel_abundances.copy()\n # remove columns that are always zero\n _X = _X.loc[:, (_X != 0).any(axis=0)]\n Xdist = ssd.cdist(_X, _X, distance_metric)\n Xdist = pd.DataFrame(Xdist, index=_X.index, columns=_X.index)\n\n for agg_level, weight in zip(agg_levels, weights):\n logger_tools.info(\"aggregating on %s\" % agg_level)\n Xagg = aggregate_at_taxlevel(_X, taxonomy, agg_level)\n Xagg = ssd.cdist(Xagg, Xagg, distance_metric)\n Xagg = pd.DataFrame(Xagg, index=_X.index, columns=_X.index)\n Xagg = Xagg * weight\n\n Xdist = Xdist + Xagg\n\n return Xdist\n\n\ndef aggregate_at_taxlevel(X, tax, level):\n \"\"\"Helper function. For a given taxonomic level, aggregate relative abundances by summing all members of corresponding taxon.\"\"\"\n _X_agg = X.copy()\n _X_agg.columns = [tax.loc[x][level] for x in _X_agg.columns]\n _X_agg = _X_agg.groupby(_X_agg.columns, axis=1).sum()\n try:\n assert np.allclose(\n _X_agg.sum(axis=1), 1.0\n ), \"At taxonomic aggregation level %s, rows do not sum to 1.\"\n except AssertionError:\n print(\"moving on anyway\")\n\n return _X_agg\n\n\ndef scale(X, scaler=MinMaxScaler(), remove_rare_asv_level=0):\n \"\"\"Min max scaling of relative abundances to ensure that different taxonomic levels have comparable dynamic ranges.\n Params\n ===============\n X: ASV table\n scaler: one of the sklearn.preprocessing scalers, defaults to MinMaxScaler \n\n Returns\n ===============\n Xscaled: scaled ASV table\n \"\"\"\n # X_sum = X.sum()\n X_stats = X.apply([\"max\"]).T\n\n if remove_rare_asv_level > 0:\n # if an ASV has never reached at least `remove_rare_asv_level` threshold, ignore.\n X_consider = X_stats.applymap(lambda v: v > remove_rare_asv_level).apply(\n np.any, axis=1\n )\n X_consider = X_consider[X_consider.values].index\n else:\n X_consider = X.columns\n\n Xscaled = scaler.fit_transform(X[X_consider])\n\n return Xscaled\n\n\n\n" ]
[ [ "sklearn.decomposition.KernelPCA", "sklearn.decomposition.PCA" ], [ "scipy.spatial.distance.cdist", "pandas.DataFrame", "sklearn.preprocessing.MinMaxScaler" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "0.16", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "1.3", "1.8" ], "tensorflow": [] } ]
catalys1/dnnutil
[ "a55a73ae59c5ac0117f58d8d8136bdd32902141f" ]
[ "dnnutil/training.py" ]
[ "import torch\nimport numpy as np\nimport dnnutil.network as network\nimport time\n\n\n__all__ = ['calculate_accuracy', 'Trainer', 'ClassifierTrainer', 'AutoencoderTrainer']\n\n\ndef calculate_accuracy(prediction, label, axis=1):\n '''calculate_accuracy(prediction, label)\n \n Computes the mean accuracy over a batch of predictions and corresponding\n ground-truth labels.\n\n Args:\n prediction (Tensor): A batch of predictions. Assumed to have shape\n [batch-size, nclasses, [d0, d1, ...]].\n label (LongTensor): A batch of labels. Assumed to have shape\n [batch-size, [d0, d1, ...]]). The number of dimensions should be\n one less than prediction.\n\n Returns:\n accuracy (Tensor): A single-element Tensor containing the percent of\n correct predictions in the batch as a value between 0 and 1.\n '''\n return torch.eq(prediction.argmax(axis), label).float().mean().item()\n\n\nclass Trainer(object):\n '''Trainer(net, optim, loss_fn, accuracy_metric=None)\n \n Base class for all network trainers. Network trainer classes provide \n methods to facilitate training and testing deep network models. The goal\n is to encapsulate the common functionality, to reduce the boilerplate\n code that needs to be repeated across projects.\n\n Args:\n net (torch.nn.Module): An instance of a network that inherits from\n torch.nn.Module.\n optim (torch.optim.Optimizer): An instance of an optimizer that\n inherits from torch.optim.Optimizer.\n loss_fn (callable): A callable that calculates and returns a loss\n value. The loss value should be a single-element Tensor.\n accuracy_metric (callable): A callabel that calculates and returns\n an accuracy value. Usually this will be a floating point number\n in [0, 1].\n '''\n def __init__(self, net, optim, loss_fn, accuracy_metric=None):\n self.net = net\n self.loss_fn = loss_fn\n self.optim = optim\n if accuracy_metric is not None:\n self.measure_accuracy = accuracy_metric\n else:\n self.measure_accuracy = calculate_accuracy\n\n self.train_loss = 0.\n self.train_acc = 0.\n self.test_loss = 0.\n self.test_acc = 0.\n \n def _set_train_stats(self, stats):\n '''TODO:docs\n '''\n self.train_loss = stats[0]\n self.train_acc = stats[1]\n\n def _set_test_stats(self, stats):\n '''TODO:docs\n '''\n self.test_loss = stats[0]\n self.test_acc = stats[1]\n\n def get_stats(self):\n '''TODO:docs\n '''\n return (self.train_loss, self.train_acc,\n self.test_loss, self.test_acc)\n\n def train(self, dataloader, epoch):\n '''Train the Trainer's network.\n\n Args:\n dataloader (torch.utils.data.DataLoader): An instance of a\n DataLoader, which will provide access to the training data.\n epoch (int): The current epoch.\n\n Returns:\n loss (float): The mean loss over the epoch.\n accuracy (float): The mean accuracy over the epoch (in [0, 1]).\n '''\n self.net.train()\n stats = self._run_epoch(dataloader, epoch)\n self._set_train_stats(stats)\n return stats\n\n def eval(self, dataloader, epoch):\n '''Evaluate the Trainer's network.\n\n Args:\n dataloader (torch.utils.data.DataLoader): An instance of a\n DataLoader, which will provide access to the testing data.\n epoch (int): The current epoch.\n Returns:\n loss (float): The mean loss over the epoch.\n accuracy (float): The mean accuracy over the epoch (in [0, 1]).\n '''\n self.net.eval()\n stats = self._run_epoch(dataloader, epoch)\n self._set_test_stats(stats)\n return stats\n \n def _run_epoch(self, dataloader, epoch):\n '''Perform a single epoch of either training or evaluation.\n\n Args:\n dataloader (torch.utils.data.DataLoader): An instance of a\n DataLoader, which will provide access to the testing data.\n epoch (int): The current epoch.\n Returns:\n loss (float): The mean loss over the epoch.\n accuracy (float): The mean accuracy over the epoch (in [0, 1]).\n '''\n N = len(dataloader.batch_sampler)\n msg = 'train' if self.net.training else 'test'\n func = self.train_batch if self.net.training else self.test_batch\n loss = []\n acc = []\n at = 0\n for i, batch in enumerate(dataloader):\n t = time.time()\n if self.net.training:\n self.update_lr(epoch * N + i + 1)\n batch_loss, batch_acc = func(batch)\n t = time.time() - t\n if i == 0:\n at = t\n else:\n at = at * i / (i + 1) + t / (i + 1)\n\n loss.append(batch_loss)\n acc.append(batch_acc)\n\n print(f'\\rEPOCH {epoch}: {msg} '\n f'batch {i + 1:04d}/{N} '\n f'lr[ {self.optim.param_groups[0][\"lr\"]:1.3e} ] '\n f'[ {t:.3f} ({at:.3f}) secs ]'\n f'{\" \"*10}',\n end='', flush=True)\n\n loss = np.mean(loss)\n acc = np.mean(acc)\n\n return loss, acc\n\n def update_lr(self, i=None):\n '''Update the optimizer's learning rate. Used for batch-level\n learning rate scheduling. If using an epoch-level scheduler, \n define and use it in the epoch loop. If the iteration number is\n not provided (None) or the Trainer has no lr_schedule attribute,\n this function does nothing and returns.\n\n Args:\n i (int): iteration number (starts at 1 for the first batch).\n '''\n if i is None or not hasattr(self, 'lr_schedule'):\n return\n self.lr_schedule.step(i)\n \n def train_batch(self, batch):\n '''Train the Trainer's network on a single training batch.\n '''\n raise NotImplementedError()\n\n def test_batch(self, batch):\n '''Test the Trainer's network on a single testing batch.\n '''\n raise NotImplementedError()\n\n\nclass ClassifierTrainer(Trainer):\n '''ClassifierTrainer(net, optim, loss_fn, accuracy_metric=None)\n \n Trainer for training a network to do image classification.\n\n Args:\n net (torch.nn.Module): An instance of a network that inherits from\n torch.nn.Module.\n optim (torch.optim.Optimizer): An instance of an optimizer that\n inherits from torch.optim.Optimizer.\n loss_fn (callable): A callable that calculates and returns a loss\n value. The loss value should be a single-element Tensor.\n accuracy_metric (callable): A callabel that calculates and returns\n an accuracy value. Usually this will be a floating point number\n in [0, 1].\n '''\n def train_batch(self, batch):\n '''Train the Trainer's network on a single training batch.\n\n Args:\n batch (iterable): A 2-tuple of (images, labels). Images is a 4-d\n Tensor of shape (BxCxHxW), and labels is a Tensor of 2 or more\n dimensions (BxLx*) which matches images in the first (batch)\n dimension. The exact dimensionality of labels will depend on\n the application and loss function chosen, but often consists\n of integer class-indexes.\n Returns:\n loss (float): The mean loss over the batch.\n accuracy (float): The mean accuracy over the batch (in [0, 1]).\n '''\n self.optim.zero_grad()\n\n imgs, labels = network.tocuda(batch)\n\n predictions = self.net(imgs)\n loss = self.loss_fn(predictions, labels)\n\n loss.backward()\n self.optim.step()\n\n loss = loss.item()\n with torch.no_grad():\n accuracy = self.measure_accuracy(predictions, labels)\n return loss, accuracy\n\n @torch.no_grad()\n def test_batch(self, batch):\n '''Evaluate the Trainer's network on a single testing batch.\n\n Args:\n batch (iterable): A 2-tuple of (images, labels). Images is a 4-d\n Tensor of shape (BxCxHxW), and labels is a Tensor of 2 or more\n dimensions (BxLx*) which matches images in the first (batch)\n dimension. The exact dimensionality of labels will depend on\n the application and loss function chosen, but often consists\n of integer class-indexes.\n Returns:\n loss (float): The mean loss over the batch.\n accuracy (float): The mean accuracy over the batch (in [0, 1]).\n '''\n imgs, labels = network.tocuda(batch)\n predictions = self.net(imgs)\n loss = self.loss_fn(predictions, labels).item()\n accuracy = self.measure_accuracy(predictions, labels)\n return loss, accuracy\n\n\nclass AutoencoderTrainer(Trainer):\n '''AutoencoderTrainer(net, optim, loss_fn)\n\n Trainer for training an autoencoder network.\n\n Args:\n net (torch.nn.Module): An instance of a network that inherits from\n torch.nn.Module.\n optim (torch.optim.Optimizer): An instance of an optimizer that\n inherits from torch.optim.Optimizer.\n loss_fn (callable): A callable that calculates and returns a loss\n value. The loss value should be a single-element Tensor.\n '''\n def __init__(self, net, optim, loss_fn):\n super(AutoencoderTrainer, self).__init__(\n net, optim, loss_fn, None)\n delattr(self, 'measure_accuracy')\n\n def train_batch(self, batch):\n '''Train the Trainer's network on a single training batch.\n\n Args:\n batch (iterable): A 2-tuple of (images, labels). Images is a 4-d\n Tensor of shape (BxCxHxW), and labels is a Tensor of 2 or more\n dimensions (BxLx*) which matches images in the first (batch)\n dimension. The exact dimensionality of labels will depend on\n the application and loss function chosen, but often consists\n of integer class-indexes.\n Returns:\n loss (float): The mean loss over the batch.\n '''\n self.optim.zero_grad()\n\n imgs = network.tocuda(batch)\n\n predictions = self.net(imgs)\n loss = self.loss_fn(predictions, imgs)\n\n loss.backward()\n self.optim.step()\n\n loss = loss.item()\n\n return loss\n\n @torch.no_grad()\n def test_batch(self, batch):\n '''Evaluate the Trainer's network on a single testing batch.\n\n Args:\n batch (iterable): A 2-tuple of (images, labels). Images is a 4-d\n Tensor of shape (BxCxHxW), and labels is a Tensor of 2 or more\n dimensions (BxLx*) which matches images in the first (batch)\n dimension. The exact dimensionality of labels will depend on\n the application and loss function chosen, but often consists\n of integer class-indexes.\n Returns:\n loss (float): The mean loss over the batch.\n '''\n imgs = network.tocuda(batch)\n predictions = self.net(imgs)\n loss = self.loss_fn(predictions, imgs).item()\n return loss\n\n def _run_epoch(self, dataloader, epoch):\n '''Perform a single epoch of either training or evaluation.\n\n Args:\n dataloader (torch.utils.data.DataLoader): An instance of a\n DataLoader, which will provide access to the testing data.\n epoch (int): The current epoch.\n Returns:\n loss (float): The mean loss over the epoch.\n '''\n N = int(np.ceil(len(dataloader.dataset) / dataloader.batch_size))\n msg = 'train' if self.net.training else 'test'\n func = self.train_batch if self.net.training else self.test_batch\n loss = []\n for i, batch in enumerate(dataloader):\n batch_loss = func(batch)\n loss.append(batch_loss)\n\n print(f'\\rEPOCH {epoch}: {msg} batch {i:04d}/{N}{\" \"*10}',\n end='', flush=True)\n\n loss = np.mean(loss)\n\n return loss\n\n" ]
[ [ "torch.no_grad", "numpy.mean" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
vinayphadnis/NeMo
[ "9dc7773c48e164b8a82051bb558a728c6eeb85ec", "9dc7773c48e164b8a82051bb558a728c6eeb85ec", "9dc7773c48e164b8a82051bb558a728c6eeb85ec", "9dc7773c48e164b8a82051bb558a728c6eeb85ec" ]
[ "nemo/collections/asr/models/classification_models.py", "nemo/collections/nlp/models/neural_machine_translation/neural_machine_translation_model.py", "nemo/collections/tts/losses/uniglowloss.py", "nemo/collections/tts/modules/squeezewave_submodules.py" ]
[ "# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport copy\nfrom typing import Dict, List, Optional, Union\n\nimport torch\nfrom omegaconf import DictConfig, ListConfig, OmegaConf\nfrom pytorch_lightning import Trainer\n\nfrom nemo.collections.asr.data.audio_to_text import AudioLabelDataset\nfrom nemo.collections.asr.models.asr_model import ASRModel\nfrom nemo.collections.asr.parts.features import WaveformFeaturizer\nfrom nemo.collections.asr.parts.perturb import process_augmentations\nfrom nemo.collections.common.losses import CrossEntropyLoss\nfrom nemo.collections.common.metrics import TopKClassificationAccuracy, compute_topk_accuracy\nfrom nemo.core.classes.common import PretrainedModelInfo, typecheck\nfrom nemo.core.neural_types import *\nfrom nemo.utils import logging\n\n__all__ = ['EncDecClassificationModel', 'MatchboxNet']\n\n\nclass EncDecClassificationModel(ASRModel):\n \"\"\"Encoder decoder CTC-based models.\"\"\"\n\n def __init__(self, cfg: DictConfig, trainer: Trainer = None):\n super().__init__(cfg=cfg, trainer=trainer)\n self._update_decoder_config(self.cfg.decoder)\n\n self.preprocessor = EncDecClassificationModel.from_config_dict(self._cfg.preprocessor)\n self.encoder = EncDecClassificationModel.from_config_dict(self._cfg.encoder)\n self.decoder = EncDecClassificationModel.from_config_dict(self._cfg.decoder)\n self.loss = CrossEntropyLoss()\n if hasattr(self._cfg, 'spec_augment') and self._cfg.spec_augment is not None:\n self.spec_augmentation = EncDecClassificationModel.from_config_dict(self._cfg.spec_augment)\n else:\n self.spec_augmentation = None\n if hasattr(self._cfg, 'crop_or_pad_augment') and self._cfg.crop_or_pad_augment is not None:\n self.crop_or_pad = EncDecClassificationModel.from_config_dict(self._cfg.crop_or_pad_augment)\n else:\n self.crop_or_pad = None\n\n # Setup metric objects\n self._accuracy = TopKClassificationAccuracy()\n\n def transcribe(self, paths2audio_files: str) -> str:\n raise NotImplementedError(\"Classification models do not transcribe audio.\")\n\n def _setup_dataloader_from_config(self, config: Optional[Dict]):\n if config.get('manifest_filepath') is None:\n return\n\n if 'augmentor' in config:\n augmentor = process_augmentations(config['augmentor'])\n else:\n augmentor = None\n\n featurizer = WaveformFeaturizer(\n sample_rate=config['sample_rate'], int_values=config.get('int_values', False), augmentor=augmentor\n )\n dataset = AudioLabelDataset(\n manifest_filepath=config['manifest_filepath'],\n labels=config['labels'],\n featurizer=featurizer,\n max_duration=config.get('max_duration', None),\n min_duration=config.get('min_duration', None),\n trim=config.get('trim_silence', True),\n load_audio=config.get('load_audio', True),\n )\n\n return torch.utils.data.DataLoader(\n dataset=dataset,\n batch_size=config['batch_size'],\n collate_fn=dataset.collate_fn,\n drop_last=config.get('drop_last', False),\n shuffle=config['shuffle'],\n num_workers=config.get('num_workers', 0),\n pin_memory=config.get('pin_memory', False),\n )\n\n def setup_training_data(self, train_data_config: Optional[Union[DictConfig, Dict]]):\n if 'shuffle' not in train_data_config:\n train_data_config['shuffle'] = True\n self._train_dl = self._setup_dataloader_from_config(config=train_data_config)\n\n def setup_validation_data(self, val_data_config: Optional[Union[DictConfig, Dict]]):\n if 'shuffle' not in val_data_config:\n val_data_config['shuffle'] = False\n self._validation_dl = self._setup_dataloader_from_config(config=val_data_config)\n\n def setup_test_data(self, test_data_config: Optional[Union[DictConfig, Dict]]):\n if 'shuffle' not in test_data_config:\n test_data_config['shuffle'] = False\n self._test_dl = self._setup_dataloader_from_config(config=test_data_config)\n\n def test_dataloader(self):\n if self._test_dl is not None:\n return self._test_dl\n\n @classmethod\n def list_available_models(cls) -> Optional[List[PretrainedModelInfo]]:\n \"\"\"\n This method returns a list of pre-trained model which can be instantiated directly from NVIDIA's NGC cloud.\n\n Returns:\n List of available pre-trained models.\n \"\"\"\n result = []\n model = PretrainedModelInfo(\n pretrained_model_name=\"MatchboxNet-3x1x64-v1\",\n location=\"https://api.ngc.nvidia.com/v2/models/nvidia/nemospeechmodels/versions/1.0.0a5/files/MatchboxNet-3x1x64-v1.nemo\",\n description=\"MatchboxNet model trained on Google Speech Commands dataset (v1, 30 classes) which obtains 97.32% accuracy on test set.\",\n )\n result.append(model)\n\n model = PretrainedModelInfo(\n pretrained_model_name=\"MatchboxNet-3x2x64-v1\",\n location=\"https://api.ngc.nvidia.com/v2/models/nvidia/nemospeechmodels/versions/1.0.0a5/files/MatchboxNet-3x2x64-v1.nemo\",\n description=\"MatchboxNet model trained on Google Speech Commands dataset (v1, 30 classes) which obtains 97.68% accuracy on test set.\",\n )\n result.append(model)\n\n model = PretrainedModelInfo(\n pretrained_model_name=\"MatchboxNet-3x1x64-v2\",\n location=\"https://api.ngc.nvidia.com/v2/models/nvidia/nemospeechmodels/versions/1.0.0a5/files/MatchboxNet-3x1x64-v2.nemo\",\n description=\"MatchboxNet model trained on Google Speech Commands dataset (v2, 35 classes) which obtains 97.12% accuracy on test set.\",\n )\n result.append(model)\n\n model = PretrainedModelInfo(\n pretrained_model_name=\"MatchboxNet-3x1x64-v2\",\n location=\"https://api.ngc.nvidia.com/v2/models/nvidia/nemospeechmodels/versions/1.0.0a5/files/MatchboxNet-3x1x64-v2.nemo\",\n description=\"MatchboxNet model trained on Google Speech Commands dataset (v2, 30 classes) which obtains 97.29% accuracy on test set.\",\n )\n result.append(model)\n\n model = PretrainedModelInfo(\n pretrained_model_name=\"MatchboxNet-3x1x64-v2-subset-task\",\n location=\"https://api.ngc.nvidia.com/v2/models/nvidia/nemospeechmodels/versions/1.0.0a5/files/MatchboxNet-3x1x64-v2-subset-task.nemo\",\n description=\"MatchboxNet model trained on Google Speech Commands dataset (v2, 10+2 classes) which obtains 98.2% accuracy on test set.\",\n )\n result.append(model)\n\n model = PretrainedModelInfo(\n pretrained_model_name=\"MatchboxNet-3x2x64-v2-subset-task\",\n location=\"https://api.ngc.nvidia.com/v2/models/nvidia/nemospeechmodels/versions/1.0.0a5/files/MatchboxNet-3x2x64-v2-subset-task.nemo\",\n description=\"MatchboxNet model trained on Google Speech Commands dataset (v2, 10+2 classes) which obtains 98.4% accuracy on test set.\",\n )\n result.append(model)\n\n model = PretrainedModelInfo(\n pretrained_model_name=\"MatchboxNet-VAD-3x2\",\n location=\"https://api.ngc.nvidia.com/v2/models/nvidia/nemospeechmodels/versions/1.0.0a5/files/MatchboxNet_VAD_3x2.nemo\",\n description=\"Voice Activity Detection MatchboxNet model trained on google speech command (v2) and freesound background data, which obtains 0.992 accuracy on testset from same source and 0.852 TPR for FPR=0.315 on testset (ALL) of AVA movie data\",\n )\n result.append(model)\n return result\n\n @property\n def input_types(self) -> Optional[Dict[str, NeuralType]]:\n if hasattr(self.preprocessor, '_sample_rate'):\n audio_eltype = AudioSignal(freq=self.preprocessor._sample_rate)\n else:\n audio_eltype = AudioSignal()\n return {\n \"input_signal\": NeuralType(('B', 'T'), audio_eltype),\n \"input_signal_length\": NeuralType(tuple('B'), LengthsType()),\n }\n\n @property\n def output_types(self) -> Optional[Dict[str, NeuralType]]:\n return {\"outputs\": NeuralType(('B', 'D'), LogitsType())}\n\n @typecheck()\n def forward(self, input_signal, input_signal_length):\n processed_signal, processed_signal_len = self.preprocessor(\n input_signal=input_signal, length=input_signal_length,\n )\n # Crop or pad is always applied\n if self.crop_or_pad is not None:\n processed_signal, processed_signal_len = self.crop_or_pad(\n input_signal=processed_signal, length=processed_signal_len\n )\n # Spec augment is not applied during evaluation/testing\n if self.spec_augmentation is not None and self.training:\n processed_signal = self.spec_augmentation(input_spec=processed_signal)\n encoded, encoded_len = self.encoder(audio_signal=processed_signal, length=processed_signal_len)\n logits = self.decoder(encoder_output=encoded)\n return logits\n\n # PTL-specific methods\n def training_step(self, batch, batch_nb):\n self.training_step_end()\n audio_signal, audio_signal_len, labels, labels_len = batch\n logits = self.forward(input_signal=audio_signal, input_signal_length=audio_signal_len)\n loss_value = self.loss(logits=logits, labels=labels)\n\n tensorboard_logs = {\n 'train_loss': loss_value,\n 'learning_rate': self._optimizer.param_groups[0]['lr'],\n }\n\n correct_counts, total_counts = self._accuracy(logits=logits, labels=labels)\n\n for ki in range(correct_counts.shape[-1]):\n correct_count = correct_counts[ki]\n total_count = total_counts[ki]\n top_k = self._accuracy.top_k[ki]\n\n tensorboard_logs['training_batch_accuracy_top@{}'.format(top_k)] = correct_count / float(total_count)\n\n return {'loss': loss_value, 'log': tensorboard_logs}\n\n def validation_step(self, batch, batch_idx, dataloader_idx=0):\n audio_signal, audio_signal_len, labels, labels_len = batch\n logits = self.forward(input_signal=audio_signal, input_signal_length=audio_signal_len)\n loss_value = self.loss(logits=logits, labels=labels)\n correct_counts, total_counts = self._accuracy(logits=logits, labels=labels)\n return {'val_loss': loss_value, 'val_correct_counts': correct_counts, 'val_total_counts': total_counts}\n\n def test_step(self, batch, batch_idx, dataloader_idx=0):\n audio_signal, audio_signal_len, labels, labels_len = batch\n logits = self.forward(input_signal=audio_signal, input_signal_length=audio_signal_len)\n loss_value = self.loss(logits=logits, labels=labels)\n correct_counts, total_counts = self._accuracy(logits=logits, labels=labels)\n return {'test_loss': loss_value, 'test_correct_counts': correct_counts, 'test_total_counts': total_counts}\n\n def multi_validation_epoch_end(self, outputs, dataloader_idx: int = 0):\n val_loss_mean = torch.stack([x['val_loss'] for x in outputs]).mean()\n correct_counts = torch.stack([x['val_correct_counts'] for x in outputs])\n total_counts = torch.stack([x['val_total_counts'] for x in outputs])\n\n topk_scores = compute_topk_accuracy(correct_counts, total_counts)\n\n tensorboard_log = {'val_loss': val_loss_mean}\n for top_k, score in zip(self._accuracy.top_k, topk_scores):\n tensorboard_log['val_epoch_top@{}'.format(top_k)] = score\n\n return {'log': tensorboard_log}\n\n def multi_test_epoch_end(self, outputs, dataloader_idx: int = 0):\n test_loss_mean = torch.stack([x['test_loss'] for x in outputs]).mean()\n correct_counts = torch.stack([x['test_correct_counts'].unsqueeze(0) for x in outputs])\n total_counts = torch.stack([x['test_total_counts'].unsqueeze(0) for x in outputs])\n\n topk_scores = compute_topk_accuracy(correct_counts, total_counts)\n\n tensorboard_log = {'test_loss': test_loss_mean}\n for top_k, score in zip(self._accuracy.top_k, topk_scores):\n tensorboard_log['test_epoch_top@{}'.format(top_k)] = score\n\n return {'log': tensorboard_log}\n\n def change_labels(self, new_labels: List[str]):\n \"\"\"\n Changes labels used by the decoder model. Use this method when fine-tuning on from pre-trained model.\n This method changes only decoder and leaves encoder and pre-processing modules unchanged. For example, you would\n use it if you want to use pretrained encoder when fine-tuning on a data in another dataset.\n\n If new_labels == self.decoder.vocabulary then nothing will be changed.\n\n Args:\n\n new_labels: list with new labels. Must contain at least 2 elements. Typically, \\\n this is set of labels for the dataset.\n\n Returns: None\n\n \"\"\"\n if new_labels is not None and not isinstance(new_labels, ListConfig):\n new_labels = ListConfig(new_labels)\n\n if self._cfg.labels == new_labels:\n logging.warning(\n f\"Old labels ({self._cfg.labels}) and new labels ({new_labels}) match. Not changing anything\"\n )\n else:\n if new_labels is None or len(new_labels) == 0:\n raise ValueError(f'New labels must be non-empty list of labels. But I got: {new_labels}')\n\n # Update config\n self._cfg.labels = new_labels\n\n decoder_config = self.decoder.to_config_dict()\n new_decoder_config = copy.deepcopy(decoder_config)\n self._update_decoder_config(new_decoder_config)\n del self.decoder\n self.decoder = EncDecClassificationModel.from_config_dict(new_decoder_config)\n\n OmegaConf.set_struct(self._cfg.decoder, False)\n self._cfg.decoder = new_decoder_config\n OmegaConf.set_struct(self._cfg.decoder, True)\n\n if 'train_ds' in self._cfg and self._cfg.train_ds is not None:\n self._cfg.train_ds.labels = new_labels\n\n if 'validation_ds' in self._cfg and self._cfg.validation_ds is not None:\n self._cfg.validation_ds.labels = new_labels\n\n if 'test_ds' in self._cfg and self._cfg.test_ds is not None:\n self._cfg.test_ds.labels = new_labels\n\n logging.info(f\"Changed decoder output to {self.decoder.num_classes} labels.\")\n\n def _update_decoder_config(self, cfg):\n \"\"\"\n Update the number of classes in the decoder based on labels provided.\n\n Args:\n cfg: The config of the decoder which will be updated.\n \"\"\"\n OmegaConf.set_struct(cfg, False)\n\n labels = self.cfg.labels\n\n if 'params' in cfg:\n cfg.params.num_classes = len(labels)\n else:\n cfg.num_classes = len(labels)\n\n OmegaConf.set_struct(cfg, True)\n\n\nclass MatchboxNet(EncDecClassificationModel):\n pass\n", "# Copyright 2018 The Google AI Language Team Authors and\n# The HuggingFace Inc. team.\n# Copyright (c) 2020, MeetKai Inc. All rights reserved.\n# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nfrom typing import Dict, List, Optional, Tuple, Union\n\nimport torch\nfrom omegaconf import DictConfig, OmegaConf\nfrom pytorch_lightning import Trainer\nfrom transformers import AutoModel, BartForConditionalGeneration, EncoderDecoderModel\n\nfrom nemo.collections.nlp.data.neural_machine_translation import NeuralMachineTranslationDataset\nfrom nemo.collections.nlp.metrics import Perplexity\nfrom nemo.collections.nlp.modules.common.tokenizer_utils import get_tokenizer\nfrom nemo.core.classes.common import typecheck\nfrom nemo.core.classes.modelPT import ModelPT\nfrom nemo.core.neural_types import ChannelType, LossType, MaskType, NeuralType\nfrom nemo.utils import logging\n\n__all__ = [\"NeuralMachineTranslationModel\"]\n\n\nclass NeuralMachineTranslationModel(ModelPT):\n @property\n def input_types(self) -> Optional[Dict[str, NeuralType]]:\n return {\n \"input_ids\": NeuralType(('B', 'T'), ChannelType()),\n \"attention_mask\": NeuralType(('B', 'T'), MaskType(), optional=True),\n \"decoder_input_ids\": NeuralType(('B', 'T'), ChannelType(), optional=True),\n \"labels\": NeuralType(('B', 'T'), ChannelType(), optional=True),\n }\n\n @property\n def output_types(self) -> Optional[Dict[str, NeuralType]]:\n return {\n \"loss\": NeuralType((), LossType()),\n \"decoder_hidden_states\": NeuralType((\"B\", \"T\", \"D\"), ChannelType(), optional=True),\n \"encoder_hidden_states\": NeuralType((\"B\", \"T\", \"D\"), ChannelType(), optional=True),\n }\n\n def __init__(self, cfg: DictConfig, trainer: Trainer = None):\n\n # must assign tokenizers before init\n if cfg.language_model.pretrained_model_name:\n if cfg.language_model.pretrained_encoder_model_name or cfg.language_model.pretrained_decoder_model_name:\n raise ValueError(\n \"Must have either pretrained_model_name or both pretrained_encoder_model name and \"\n \"pretrained_decoder_model_name.\"\n )\n # setup tokenizer\n self.encoder_tokenizer = self.setup_tokenizer(cfg.encoder_tokenizer)\n self.encoder_add_special_tokens = cfg.encoder_tokenizer.add_special_tokens\n\n # set decoder to encoder\n self.decoder_tokenizer = self.encoder_tokenizer\n self.decoder_add_special_tokens = self.encoder_add_special_tokens\n else:\n if not (\n cfg.language_model.pretrained_encoder_model_name and cfg.language_model.pretrained_decoder_model_name\n ):\n raise ValueError(\"Both encoder and decoder must be specified\")\n\n # setup tokenizers\n self.encoder_tokenizer = self.setup_tokenizer(cfg.encoder_tokenizer)\n self.encoder_add_special_tokens = cfg.encoder_tokenizer.add_special_tokens\n\n self.decoder_tokenizer = self.setup_tokenizer(cfg.decoder_tokenizer)\n self.decoder_add_special_tokens = cfg.decoder_tokenizer.add_special_tokens\n\n if not self.encoder_tokenizer:\n raise TypeError(\"encoder_tokenizer failed to initialize\")\n if not self.decoder_tokenizer:\n raise TypeError(\"decoder_tokenizer failed to initialize\")\n\n # init superclass\n super().__init__(cfg=cfg, trainer=trainer)\n\n # must assign modules after init\n if cfg.language_model.pretrained_model_name:\n # Setup end-to-end model\n if \"bart\" in cfg.language_model.pretrained_model_name:\n self.model = BartForConditionalGeneration.from_pretrained(cfg.language_model.pretrained_model_name)\n else:\n self.model = AutoModel.from_pretrained(cfg.language_model.pretrained_model_name)\n else:\n if not (\n cfg.language_model.pretrained_encoder_model_name and cfg.language_model.pretrained_decoder_model_name\n ):\n raise ValueError(\"Both encoder and decoder must be specified\")\n\n # Setup encoder/decoder model\n self.model = EncoderDecoderModel.from_encoder_decoder_pretrained(\n encoder=cfg.language_model.pretrained_encoder_model_name,\n decoder=cfg.language_model.pretrained_decoder_model_name,\n )\n\n self.perplexity_metric = Perplexity()\n\n self.setup_optimization(cfg.optim)\n\n @typecheck()\n def forward(\n self,\n input_ids: torch.Tensor,\n attention_mask: torch.Tensor = None,\n decoder_input_ids: torch.Tensor = None,\n labels: torch.Tensor = None,\n ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[torch.Tensor]]:\n \"\"\"\n No special modification required for Lightning, define it as you normally would\n in the `nn.Module` in vanilla PyTorch.\n \"\"\"\n outputs = self.model(\n input_ids=input_ids, attention_mask=attention_mask, decoder_input_ids=decoder_input_ids, labels=labels,\n )\n return outputs\n\n @typecheck.disable_checks()\n def generate(self, input_ids: Union[torch.Tensor, torch.LongTensor]) -> torch.Tensor:\n \"\"\"Wraps huggingface EncoderDecoder.generate().\"\"\"\n outputs = self.model.generate(\n input_ids=input_ids,\n pad_token_id=self.encoder_tokenizer.pad_id,\n bos_token_id=self.encoder_tokenizer.bos_id,\n eos_token_id=self.encoder_tokenizer.eos_id,\n decoder_start_token_id=self.decoder_tokenizer.bos_id,\n **self._cfg.generate,\n )\n return outputs\n\n def training_step(self, batch: Tuple, batch_idx: int) -> Dict:\n \"\"\"\n Lightning calls this inside the training loop with the data from the training dataloader\n passed in as `batch`. Loss calculation from HuggingFace's BartForConditionalGeneration.\n \"\"\"\n input_ids, input_mask, decoder_input_ids, labels = batch\n loss = self.forward(\n input_ids=input_ids, attention_mask=input_mask, decoder_input_ids=decoder_input_ids, labels=labels\n )[0]\n\n tensorboard_logs = {\"train_loss\": loss, \"lr\": self._optimizer.param_groups[0][\"lr\"]}\n\n return {\"loss\": loss, \"log\": tensorboard_logs}\n\n def validation_step(self, batch: Tuple, batch_idx: int) -> Dict:\n \"\"\"\n Lightning calls this inside the validation loop with the data from the validation dataloader\n passed in as `batch`. Loss calculation from HuggingFace's BartForConditionalGeneration.\n \"\"\"\n input_ids, input_mask, decoder_input_ids, labels = batch\n loss = self.forward(\n input_ids=input_ids, attention_mask=input_mask, decoder_input_ids=decoder_input_ids, labels=labels\n )[0]\n\n perplexity = self.perplexity_metric(loss)\n\n tensorboard_logs = {\"val_loss\": loss, \"perplexity\": perplexity}\n\n return {\"val_loss\": loss, \"log\": tensorboard_logs}\n\n def validation_epoch_end(self, outputs: List[Dict]) -> Dict:\n \"\"\"\n Called at the end of validation to aggregate outputs.\n :param outputs: list of individual outputs of each validation step.\n \"\"\"\n avg_loss = torch.stack([x[\"val_loss\"] for x in outputs]).mean()\n perplexity = torch.stack([x[\"log\"][\"perplexity\"] for x in outputs]).mean()\n tensorboard_logs = {\"val_loss\": avg_loss, \"perplexity\": perplexity}\n logging.info(f\"evaluation perplexity {perplexity.item()}\")\n return {\"val_loss\": avg_loss, \"log\": tensorboard_logs}\n\n @typecheck.disable_checks()\n def test_step(self, batch: Tuple, batch_idx: int) -> torch.Tensor:\n \"\"\"Lightning calls this inside the test loop with data from the test dataloader.\"\"\"\n input_ids, input_mask, decoder_input_ids, labels = batch\n sequences = self.generate(input_ids=input_ids)\n return sequences\n\n @typecheck.disable_checks()\n def test_epoch_end(self, outputs: List[torch.Tensor]) -> Dict[str, List[str]]:\n \"\"\"Called at the end of test to aggregate outputs and decode them.\"\"\"\n texts = [self.encoder_tokenizer.ids_to_text(seq) for batch in outputs for seq in batch]\n return {\"texts\": texts}\n\n def setup_tokenizer(self, cfg: DictConfig):\n tokenizer = get_tokenizer(\n tokenizer_name=cfg.tokenizer_name,\n tokenizer_model=cfg.tokenizer_model,\n special_tokens=OmegaConf.to_container(cfg.special_tokens) if cfg.special_tokens else None,\n vocab_file=cfg.vocab_file,\n )\n return tokenizer\n\n def setup_training_data(self, train_data_config: Optional[DictConfig]):\n self._train_dl = self.setup_dataloader_from_config(cfg=train_data_config)\n\n def setup_validation_data(self, val_data_config: Optional[DictConfig]):\n self._validation_dl = self.setup_dataloader_from_config(cfg=val_data_config)\n\n def setup_test_data(self, test_data_config: Optional[DictConfig]):\n self._test_dl = self.setup_dataloader_from_config(cfg=test_data_config)\n\n def setup_dataloader_from_config(self, cfg: DictConfig):\n dataset = NeuralMachineTranslationDataset(\n filepath=cfg.filepath,\n encoder_tokenizer=self.encoder_tokenizer,\n decoder_tokenizer=self.decoder_tokenizer,\n encoder_add_special_tokens=self.encoder_add_special_tokens,\n decoder_add_special_tokens=self.decoder_add_special_tokens,\n max_seq_length=self._cfg.max_seq_length,\n num_samples=cfg.get(\"num_samples\", -1),\n convert_labels=self._cfg.convert_labels,\n )\n\n return torch.utils.data.DataLoader(\n dataset=dataset,\n batch_size=self._cfg.batch_size,\n shuffle=cfg.shuffle,\n num_workers=cfg.get(\"num_workers\", 2),\n pin_memory=cfg.get(\"pin_memory\", False),\n drop_last=cfg.get(\"drop_last\", False),\n collate_fn=dataset.collate_fn,\n )\n\n @classmethod\n def list_available_models(cls) -> Optional[Dict[str, str]]:\n pass\n", "# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# The following STFT-loss implementation is taken from:\n# https://github.com/kan-bayashi/ParallelWaveGAN/blob/master/parallel_wavegan/losses/stft_loss.py\n#\n# The MIT License\n#\n# Copyright (c) 2019 Tomoki Hayashi\n#\n# Permission is hereby granted, free of charge, to any person obtaining a\n# copy of this software and associated documentation files (the \"Software\"),\n# to deal in the Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish, distribute, sublicense,\n# and/or sell copies of the Software, and to permit persons to whom the\n# Software is furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n# DEALINGS IN THE SOFTWARE.\n\nimport torch\nimport torch.nn.functional as F\n\nfrom nemo.core.classes import Loss, typecheck\nfrom nemo.core.neural_types.elements import AudioSignal, LossType, NormalDistributionSamplesType, VoidType\nfrom nemo.core.neural_types.neural_type import NeuralType\n\n\ndef stft(x, fft_size, hop_size, win_length, window):\n \"\"\"Perform STFT and convert to magnitude spectrogram.\n Args:\n x (Tensor): Input signal tensor (B, T).\n fft_size (int): FFT size.\n hop_size (int): Hop size.\n win_length (int): Window length.\n window (str): Window function type.\n Returns:\n Tensor: Magnitude spectrogram (B, #frames, fft_size // 2 + 1).\n \"\"\"\n x_stft = torch.stft(x, fft_size, hop_size, win_length, window)\n real = x_stft[..., 0]\n imag = x_stft[..., 1]\n\n # NOTE(kan-bayashi): clamp is needed to avoid nan or inf\n return torch.sqrt(torch.clamp(real ** 2 + imag ** 2, min=1e-7)).transpose(2, 1)\n\n\nclass SpectralConvergenceLoss(torch.nn.Module):\n \"\"\"Spectral convergence loss module.\"\"\"\n\n def __init__(self):\n \"\"\"Initilize spectral convergence loss module.\"\"\"\n super(SpectralConvergenceLoss, self).__init__()\n\n def forward(self, x_mag, y_mag):\n \"\"\"Calculate forward propagation.\n Args:\n x_mag (Tensor): Magnitude spectrogram of predicted signal (B, #frames, #freq_bins).\n y_mag (Tensor): Magnitude spectrogram of groundtruth signal (B, #frames, #freq_bins).\n Returns:\n Tensor: Spectral convergence loss value.\n \"\"\"\n return torch.norm(y_mag - x_mag, p=\"fro\") / torch.norm(y_mag, p=\"fro\")\n\n\nclass LogSTFTMagnitudeLoss(torch.nn.Module):\n \"\"\"Log STFT magnitude loss module.\"\"\"\n\n def __init__(self):\n \"\"\"Initilize los STFT magnitude loss module.\"\"\"\n super(LogSTFTMagnitudeLoss, self).__init__()\n\n def forward(self, x_mag, y_mag):\n \"\"\"Calculate forward propagation.\n Args:\n x_mag (Tensor): Magnitude spectrogram of predicted signal (B, #frames, #freq_bins).\n y_mag (Tensor): Magnitude spectrogram of groundtruth signal (B, #frames, #freq_bins).\n Returns:\n Tensor: Log STFT magnitude loss value.\n \"\"\"\n return F.l1_loss(torch.log(y_mag), torch.log(x_mag))\n\n\nclass STFTLoss(torch.nn.Module):\n \"\"\"STFT loss module.\"\"\"\n\n def __init__(self, fft_size=1024, shift_size=120, win_length=600, window=\"hann_window\"):\n \"\"\"Initialize STFT loss module.\"\"\"\n super(STFTLoss, self).__init__()\n self.fft_size = fft_size\n self.shift_size = shift_size\n self.win_length = win_length\n self.window = getattr(torch, window)(win_length)\n self.spectral_convergence_loss = SpectralConvergenceLoss()\n self.log_stft_magnitude_loss = LogSTFTMagnitudeLoss()\n\n def forward(self, x, y):\n \"\"\"Calculate forward propagation.\n Args:\n x (Tensor): Predicted signal (B, T).\n y (Tensor): Groundtruth signal (B, T).\n Returns:\n Tensor: Spectral convergence loss value.\n Tensor: Log STFT magnitude loss value.\n \"\"\"\n x_mag = stft(x, self.fft_size, self.shift_size, self.win_length, self.window)\n y_mag = stft(y, self.fft_size, self.shift_size, self.win_length, self.window)\n sc_loss = self.spectral_convergence_loss(x_mag, y_mag)\n mag_loss = self.log_stft_magnitude_loss(x_mag, y_mag)\n\n return sc_loss, mag_loss\n\n\nclass MultiResolutionSTFTLoss(torch.nn.Module):\n \"\"\"Multi resolution STFT loss module.\"\"\"\n\n def __init__(\n self, fft_sizes=[1024, 2048, 512], hop_sizes=[120, 240, 50], win_lengths=[600, 1200, 240], window=\"hann_window\"\n ):\n \"\"\"Initialize Multi resolution STFT loss module.\n Args:\n fft_sizes (list): List of FFT sizes.\n hop_sizes (list): List of hop sizes.\n win_lengths (list): List of window lengths.\n window (str): Window function type.\n \"\"\"\n super(MultiResolutionSTFTLoss, self).__init__()\n assert len(fft_sizes) == len(hop_sizes) == len(win_lengths)\n self.stft_losses = torch.nn.ModuleList()\n for fs, ss, wl in zip(fft_sizes, hop_sizes, win_lengths):\n self.stft_losses += [STFTLoss(fs, ss, wl, window)]\n\n def forward(self, x, y):\n \"\"\"Calculate forward propagation.\n Args:\n x (Tensor): Predicted signal (B, T).\n y (Tensor): Groundtruth signal (B, T).\n Returns:\n Tensor: Multi resolution spectral convergence loss value.\n Tensor: Multi resolution log STFT magnitude loss value.\n \"\"\"\n sc_loss = 0.0\n mag_loss = 0.0\n for f in self.stft_losses:\n sc_l, mag_l = f(x, y)\n sc_loss += sc_l\n mag_loss += mag_l\n sc_loss /= len(self.stft_losses)\n mag_loss /= len(self.stft_losses)\n\n return sc_loss, mag_loss\n\n\nclass UniGlowLoss(Loss):\n \"\"\"A Loss module that computes loss for UniGlow\"\"\"\n\n def __init__(self, stft_loss_coef):\n super(UniGlowLoss, self).__init__()\n self.stft_loss = MultiResolutionSTFTLoss()\n self.stft_loss_coef = stft_loss_coef\n\n @property\n def input_types(self):\n return {\n \"z\": NeuralType(('B', 'flowgroup', 'T'), NormalDistributionSamplesType()),\n \"logdet\": NeuralType(elements_type=VoidType()),\n \"gt_audio\": NeuralType(('B', 'T'), AudioSignal()),\n \"predicted_audio\": NeuralType(('B', 'T'), AudioSignal()),\n \"sigma\": NeuralType(optional=True),\n }\n\n @property\n def output_types(self):\n return {\n \"loss\": NeuralType(elements_type=LossType()),\n }\n\n @typecheck()\n def forward(self, *, z, logdet, gt_audio, predicted_audio, sigma=1.0):\n nll_loss = torch.sum(z * z) / (2 * sigma * sigma) - logdet\n nll_loss = nll_loss / (z.size(0) * z.size(1) * z.size(2))\n sc_loss, mag_loss = self.stft_loss(predicted_audio, gt_audio)\n stft_loss = sc_loss + mag_loss\n loss = nll_loss + self.stft_loss_coef * stft_loss\n return loss\n", "# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# MIT License\n#\n# Copyright (c) 2020 Tianren Gao, Bohan Zhai, Flora Xue,\n# Daniel Rothchild, Bichen Wu, Joseph E. Gonzalez, Kurt Keutzer\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in all\n# copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nimport torch\n\nfrom nemo.collections.tts.helpers.helpers import remove\nfrom nemo.collections.tts.modules.submodules import fused_add_tanh_sigmoid_multiply\n\n\ndef fuse_conv_and_bn(conv, bn):\n fusedconv = torch.nn.Conv1d(\n conv.in_channels,\n conv.out_channels,\n kernel_size=conv.kernel_size,\n padding=conv.padding,\n bias=True,\n groups=conv.groups,\n )\n w_conv = conv.weight.clone().view(conv.out_channels, -1)\n w_bn = torch.diag(bn.weight.div(torch.sqrt(bn.eps + bn.running_var)))\n w_bn = w_bn.clone()\n fusedconv.weight.data = torch.mm(w_bn, w_conv).view(fusedconv.weight.size())\n if conv.bias is not None:\n b_conv = conv.bias\n else:\n b_conv = torch.zeros(conv.weight.size(0))\n b_bn = bn.bias - bn.weight.mul(bn.running_mean).div(torch.sqrt(bn.running_var + bn.eps))\n b_bn = torch.unsqueeze(b_bn, 1)\n bn_3 = b_bn.expand(-1, 3)\n b = torch.matmul(w_conv, torch.transpose(bn_3, 0, 1))[range(b_bn.size()[0]), range(b_bn.size()[0])]\n fusedconv.bias.data = b_conv + b\n return fusedconv\n\n\ndef remove_batchnorm(conv_list):\n new_conv_list = torch.nn.ModuleList()\n for old_conv in conv_list:\n depthwise = fuse_conv_and_bn(old_conv[1], old_conv[0])\n pointwise = old_conv[2]\n new_conv_list.append(torch.nn.Sequential(depthwise, pointwise))\n return new_conv_list\n\n\ndef remove_weightnorm(model):\n squeezewave = model\n for wavenet in squeezewave.wavenet:\n wavenet.start = torch.nn.utils.remove_weight_norm(wavenet.start)\n wavenet.in_layers = remove_batchnorm(wavenet.in_layers)\n wavenet.cond_layer = torch.nn.utils.remove_weight_norm(wavenet.cond_layer)\n wavenet.res_skip_layers = remove(wavenet.res_skip_layers)\n return squeezewave\n\n\nclass SqueezeWaveNet(torch.nn.Module):\n \"\"\"\n This is the WaveNet like layer for the affine coupling. The primary\n difference from WaveNet is the convolutions need not be causal. There is\n also no dilation size reset. The dilation only doubles on each layer\n \"\"\"\n\n def __init__(self, n_in_channels, n_mel_channels, n_layers, n_channels, kernel_size):\n super().__init__()\n assert kernel_size % 2 == 1\n assert n_channels % 2 == 0\n self.n_layers = n_layers\n self.n_channels = n_channels\n self.in_layers = torch.nn.ModuleList()\n self.res_skip_layers = torch.nn.ModuleList()\n self.upsample = torch.nn.Upsample(scale_factor=2, mode='nearest')\n\n start = torch.nn.Conv1d(n_in_channels, n_channels, 1)\n start = torch.nn.utils.weight_norm(start, name='weight')\n self.start = start\n\n # Initializing last layer to 0 makes the affine coupling layers\n # do nothing at first. This helps with training stability\n end = torch.nn.Conv1d(n_channels, 2 * n_in_channels, 1)\n end.weight.data.zero_()\n end.bias.data.zero_()\n self.end = end\n\n cond_layer = torch.nn.Conv1d(n_mel_channels, 2 * n_channels * n_layers, 1)\n self.cond_layer = torch.nn.utils.weight_norm(cond_layer, name='weight')\n\n padding = (kernel_size - 1) // 2\n for i in range(n_layers):\n self.in_layers.append(\n torch.nn.Sequential(\n torch.nn.BatchNorm1d(n_channels),\n torch.nn.Conv1d(n_channels, n_channels, kernel_size, padding=padding, groups=n_channels),\n torch.nn.Conv1d(n_channels, 2 * n_channels, 1),\n )\n )\n\n res_skip_layer = torch.nn.Conv1d(n_channels, n_channels, 1)\n res_skip_layer = torch.nn.utils.weight_norm(res_skip_layer, name='weight')\n self.res_skip_layers.append(res_skip_layer)\n\n def forward(self, forward_input):\n audio, spect = forward_input\n audio = self.start(audio)\n n_channels_tensor = torch.IntTensor([self.n_channels])\n\n spect = self.cond_layer(spect)\n\n for i in range(self.n_layers):\n spect_offset = i * 2 * self.n_channels\n cond = spect[:, spect_offset : spect_offset + 2 * self.n_channels, :]\n if cond.size(2) < audio.size(2):\n cond = self.upsample(cond)\n\n acts = fused_add_tanh_sigmoid_multiply(self.in_layers[i](audio), cond, n_channels_tensor)\n\n res_skip_acts = self.res_skip_layers[i](acts)\n audio = audio + res_skip_acts\n\n return self.end(audio)\n" ]
[ [ "torch.stack" ], [ "torch.stack" ], [ "torch.norm", "torch.nn.ModuleList", "torch.sum", "torch.log", "torch.clamp", "torch.stft" ], [ "torch.nn.Sequential", "torch.nn.BatchNorm1d", "torch.mm", "torch.transpose", "torch.sqrt", "torch.nn.utils.weight_norm", "torch.nn.ModuleList", "torch.IntTensor", "torch.nn.utils.remove_weight_norm", "torch.unsqueeze", "torch.nn.Upsample", "torch.nn.Conv1d" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
nlindqv/pytorch_RVAE
[ "d9e58134965f69aad557fb3bd2478500a51210f8", "d9e58134965f69aad557fb3bd2478500a51210f8" ]
[ "human_eval.py", "best_scores.py" ]
[ "import argparse\r\nimport os\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport torch as t\r\nfrom torch.optim import Adam\r\nimport pickle5 as pickle\r\nimport json\r\nimport random\r\n\r\nfrom sample import sample_with_input, sample_with_beam\r\nfrom utils.batch_loader import BatchLoader, clean_str\r\nfrom model.paraphraser import Paraphraser\r\nfrom model.generator import Generator\r\nfrom synonym_paraphraser import SynonymParaphraser\r\n\r\ndef main():\r\n parser = argparse.ArgumentParser(description='Paraphraser')\r\n parser.add_argument('--use-cuda', type=bool, default=False, metavar='CUDA', help='use cuda (default: False)')\r\n parser.add_argument('--seq-len', default=30, metavar='SL', help='max length of sequence (default: 30)')\r\n parser.add_argument('--ml', type=bool, default=True, metavar='ML', help='sample by maximum likelihood')\r\n\r\n args = parser.parse_args()\r\n\r\n # Read data\r\n if not os.path.exists('datasets/human_test.csv'):\r\n source_file = 'datasets/test.csv'\r\n source_data = pd.read_csv(source_file)[['question1', 'question2']]\r\n sentence_categories = [[] for _ in range(5)]\r\n for i in range(len(source_data)):\r\n\r\n sent = clean_str(source_data['question1'][i])\r\n sent_len = len(sent.split())\r\n if sent_len < 6:\r\n j = 0\r\n elif sent_len < 11:\r\n j = 1\r\n elif sent_len < 16:\r\n j = 2\r\n elif sent_len < 21:\r\n j = 3\r\n else:\r\n j = 4\r\n sentence_categories[j].append([source_data['question1'][i], source_data['question2'][i]])\r\n\r\n sample_data = []\r\n for category in sentence_categories:\r\n sample_data += random.sample(category, 20)\r\n source_data = pd.DataFrame(sample_data, columns=['question1', 'question2'])\r\n source_data.to_csv('datasets/human_test.csv')\r\n else:\r\n source_data = pd.read_csv('datasets/human_test_1.csv')[['question1', 'question2']]\r\n\r\n\r\n # Sample from Guptas original model\r\n batch_loader = BatchLoader()\r\n from model.parameters import Parameters\r\n parameters = Parameters(batch_loader.max_seq_len, batch_loader.vocab_size)\r\n paraphraser = Paraphraser(parameters)\r\n paraphraser.load_state_dict(t.load('saved_models/trained_paraphraser_ori_32', map_location=t.device('cpu')))\r\n\r\n samples_ori, target, source_ori = sample_with_input(batch_loader, paraphraser, args,\r\n decoder_only=True,\r\n file_name='datasets/human_test.csv')\r\n\r\n ref_items = generate_items(source_ori, target, 'ref')\r\n ori_items = generate_items(source_ori, samples_ori[0], 'ori')\r\n\r\n # Sample from Guptas model with two-path-loss\r\n batch_loader = BatchLoader()\r\n parameters = Parameters(batch_loader.max_seq_len, batch_loader.vocab_size, use_two_path_loss=True)\r\n paraphraser = Paraphraser(parameters)\r\n paraphraser.load_state_dict(t.load('saved_models/trained_paraphraser_tpl_16_32', map_location=t.device('cpu')))\r\n\r\n samples_tpl, target, source_tpl = sample_with_input(batch_loader, paraphraser, args,\r\n decoder_only=False,\r\n file_name='datasets/human_test.csv')\r\n tpl_items = generate_items(source_tpl, samples_tpl[0], 'tpl')\r\n\r\n # Sample from GAN model\r\n batch_loader = BatchLoader()\r\n from model.parametersGAN import Parameters\r\n parameters = Parameters(batch_loader.max_seq_len, batch_loader.vocab_size)\r\n paraphraser = Generator(parameters)\r\n paraphraser.load_state_dict(t.load('saved_models/trained_generator_gan_140k', map_location=t.device('cpu')))\r\n samples_gan, target, source_gan = sample_with_input(batch_loader, paraphraser, args,\r\n decoder_only=False,\r\n file_name='datasets/human_test.csv')\r\n gan_items = generate_items(source_gan, samples_gan[0], 'gan')\r\n\r\n # Sample from synonym model\r\n paraphraser = SynonymParaphraser()\r\n samples_synonym = paraphraser.generate_paraphrases('datasets/human_test.csv')\r\n base_items = generate_items(source_data['question1'], samples_synonym, 'base')\r\n\r\n all_items = ref_items + ori_items + tpl_items + gan_items + base_items\r\n\r\n eval_results = {'name' : 'Paraphrase Survey Full Ordered', 'items' : all_items}\r\n res = json.dumps(eval_results, ensure_ascii=False)\r\n with open('datasets/human_test_ordered.json', 'w') as f:\r\n f.write(res)\r\n\r\n random.shuffle(all_items)\r\n\r\n eval_results = {'name' : 'Paraphrase Survey Full Shuffled', 'items' : all_items}\r\n res = json.dumps(eval_results, ensure_ascii=False)\r\n with open('datasets/human_test_shuffled.json', 'w') as f:\r\n f.write(res)\r\n\r\n for i in range(10):\r\n eval_results = {'name' : f'Paraphrase Survey Part {i+1}/{10}', 'items' : all_items[i*50:((i+1)*50)-1]}\r\n res = json.dumps(eval_results, ensure_ascii=False)\r\n with open(f'datasets/human_test_p_{i}_{10}.json', 'w') as f:\r\n f.write(res)\r\n\r\ndef generate_items(original, paraphrase, model):\r\n items = []\r\n for i in range(len(original)):\r\n\r\n questions = 'Fråga 1: ' + original[i] + '?<br>Fråga 2: ' + paraphrase[i] + '?'\r\n item = {\r\n 'question' : questions,\r\n 'required' : True,\r\n 'extra' : {'model' : model},\r\n 'order': -1,\r\n 'answer_sets' : [\r\n {\r\n \"type\": \"radio\",\r\n \"name\": \"Fråga 1 är grammatiskt korrekt: \",\r\n \"choices\": [ \"0\", \"1\", \"2\", \"3\"]\r\n },\r\n {\r\n \"type\": \"radio\",\r\n \"name\": \"Fråga 2 är grammatiskt korrekt: \",\r\n \"choices\": [ \"0\", \"1\", \"2\", \"3\"]\r\n },\r\n {\r\n \"type\": \"radio\",\r\n \"name\": \"Fråga 2 är betyder samma sak som Fråga 1: \",\r\n \"choices\": [ \"0\", \"1\", \"2\", \"3\"]\r\n }]\r\n }\r\n items.append(item)\r\n return items\r\n\r\n\r\n\r\n\r\n\r\nif __name__ == '__main__':\r\n main()\r\n", "import argparse, re\r\nimport os, os.path, subprocess\r\nfrom subprocess import *\r\nfrom nlgeval import NLGEval\r\n\r\nimport numpy as np\r\nimport torch as t\r\nfrom torch.optim import Adam\r\n\r\nfrom sample import sample_with_input, sample_with_beam\r\nfrom utils.batch_loader import BatchLoader\r\nfrom model.paraphraser import Paraphraser\r\nfrom model.generator import Generator\r\n\r\nTEST_DATA_SIZE = 4000\r\n\r\ndef create_files(model_name):\r\n path = f'logs/{model_name}/samples'\r\n # Create locations to store samples\r\n if not os.path.isdir(path + '/tmp/'):\r\n os.mkdir(path + '/tmp/')\r\n\r\n for file_name in os.listdir(path):\r\n if os.path.isdir(path + '/' + file_name):\r\n continue\r\n sentences = list(np.loadtxt(path + '/' + file_name, dtype='U', delimiter='\\n'))\r\n for idx, sentence in enumerate(sentences):\r\n if not os.path.isdir(path + '/tmp/' + str(idx) + '/'):\r\n os.mkdir(path + '/tmp/' + str(idx) + '/')\r\n\r\n if file_name.startswith(\"sampled\"):\r\n sample_nr = re.findall('\\d*\\.?\\d+', file_name)[0]\r\n np.savetxt(f'{path}/tmp/{idx}/sampled_{sample_nr}', np.array([sentence]), delimiter='\\n', fmt='%s')\r\n elif file_name.startswith(\"source\"):\r\n np.savetxt(f'{path}/tmp/{idx}/source', np.array([sentence]), delimiter='\\n', fmt='%s')\r\n\r\ndef avg_meteor(model_name, mode, num_samples):\r\n sampled_file_dst = []\r\n for i in range(num_samples):\r\n sampled_file_dst.append(f'logs/{model_name}/samples/sampled_{mode}_{i}.txt')\r\n\r\n target_file_dst = f'logs/{model_name}/samples/target_{mode}.txt'\r\n\r\n scores = []\r\n for i in range(num_samples):\r\n args = ['multeval-0.5.1/lib/meteor-1.4/meteor-1.4.jar', sampled_file_dst[i], target_file_dst, '-l', 'se']\r\n scores.append(jarWrapper(*args))\r\n\r\n avg_score = sum(scores) / len(scores)\r\n\r\n print(f'Model: {model_name}, score: {avg_score}')\r\n\r\n\r\ndef meteor(model_name, mode, num_samples):\r\n sampled_file_dst = []\r\n for i in range(num_samples):\r\n sampled_file_dst.append(f'logs/{model_name}/samples/sampled_{mode}_{i}.txt')\r\n\r\n source_file_dst = f'logs/{model_name}/samples/source_{mode}.txt'\r\n\r\n sampled = [list(np.loadtxt(sampled_file_dst[i], dtype='U', delimiter='\\n')) for i in range(num_samples)]\r\n source = list(np.loadtxt(source_file_dst, dtype='U', delimiter='\\n'))\r\n\r\n if not os.path.isdir(f'logs/{model_name}/samples/tmp/'):\r\n os.mkdir(f'logs/{model_name}/samples/tmp/')\r\n print(len(source), len(sampled), [len(sampled[i]) for i in range(len(sampled))])\r\n\r\n best_meteor = []\r\n for i in range(len(source)):\r\n curr_best = ('', 0.0)\r\n np.savetxt(f'logs/{model_name}/samples/tmp/source', np.array([source[i]]), delimiter='\\n', fmt='%s')\r\n for j in range(num_samples):\r\n np.savetxt(f'logs/{model_name}/samples/tmp/sample', np.array([sampled[j][i]]), delimiter='\\n', fmt='%s')\r\n args = ['multeval-0.5.1/lib/meteor-1.4/meteor-1.4.jar', f'logs/{model_name}/samples/tmp/sample', f'logs/{model_name}/samples/tmp/source', '-l', 'se']\r\n score = jarWrapper(*args)\r\n if score > curr_best[1]:\r\n curr_best = (sampled[j][i], score)\r\n print(f'Sentence pair {i}.{j}:')\r\n print('source : ', source[i])\r\n print('sampled : ', sampled[j][i])\r\n print('\\n')\r\n best_meteor.append(curr_best[0])\r\n np.savetxt(f'logs/{model_name}/samples/best_meteor_{mode}', np.array(best_meteor), delimiter='\\n', fmt='%s')\r\n\r\ndef jarWrapper(*args):\r\n process = Popen(['java', '-jar']+list(args), stdout=PIPE, stderr=PIPE)\r\n ret = []\r\n while process.poll() is None:\r\n line = process.stdout.readline()\r\n if line != '' and line.endswith(b'\\n'):\r\n ret.append(line[:-1])\r\n stdout, stderr = process.communicate()\r\n ret += stdout.split(b'\\n')\r\n ret.remove(b'')\r\n for i in range(len(ret)):\r\n ret[i] = ret[i].decode()\r\n ref = ''\r\n i = -1\r\n while 'Final score' not in ref:\r\n i += 1\r\n ref = ret[i]\r\n score = float(re.findall('\\d*\\.?\\d+', ref)[0])\r\n\r\n return score\r\n\r\ndef bleu(model_name, mode, num_samples):\r\n sampled_file_dst = []\r\n for i in range(num_samples):\r\n sampled_file_dst.append(f'logs/{model_name}/samples/sampled_{mode}_{i}.txt')\r\n\r\n source_file_dst = f'logs/{model_name}/samples/source_{mode}.txt'\r\n\r\n sampled = [list(np.loadtxt(sampled_file_dst[i], dtype='U', delimiter='\\n')) for i in range(num_samples)]\r\n source = list(np.loadtxt(source_file_dst, dtype='U', delimiter='\\n'))\r\n\r\n nlgeval = NLGEval(metrics_to_omit=['METEOR', 'ROUGE_L', 'CIDEr', 'SkipThoughtCS'])\r\n best_bleu = []\r\n print(len(source), len(sampled), [len(sampled[i]) for i in range(len(sampled))])\r\n for i in range(len(source)):\r\n curr_best = ('', 0.0)\r\n for j in range(num_samples):\r\n score = nlgeval.compute_individual_metrics([source[i]], sampled[j][i])['Bleu_4']\r\n if score > curr_best[1]:\r\n curr_best = (sampled[j][i], score)\r\n if i % 100 == 0:\r\n print(f'Sentence pair {i}.{j}:')\r\n print('source : ', source[i])\r\n print('sampled : ', curr_best[0])\r\n print('score : ', curr_best[1])\r\n print('\\n')\r\n best_bleu.append(curr_best[0])\r\n np.savetxt(f'logs/{model_name}/samples/best_bleu_{mode}', np.array(best_bleu), delimiter='\\n', fmt='%s')\r\n\r\n\r\n\r\ndef main():\r\n parser = argparse.ArgumentParser(description='Paraphraser')\r\n parser.add_argument('--model-name', default='', metavar='MN', help='name of model to save (default: \"\")')\r\n parser.add_argument('--metric', default='meteor', metavar='M', help='sample by maximum likelihood')\r\n parser.add_argument('--mode', default='ml', metavar='MD', help='sample by maximum likelihood')\r\n parser.add_argument('--avg', type=bool, default=False, metavar='A', help='sample by maximum likelihood')\r\n\r\n args = parser.parse_args()\r\n\r\n if not args.avg:\r\n if args.metric == 'meteor':\r\n meteor(args.model_name, args.mode, (10 if args.mode == 'ml' else 5))\r\n elif args.metric == 'bleu':\r\n bleu(args.model_name, args.mode, (10 if args.mode == 'ml' else 5))\r\n else:\r\n if args.model_name == '':\r\n models = ['ori_32_50k', 'ori_32_100k', 'ori_32', 'tpl_16_32_50k', 'tpl_16_32_100k', 'tpl_16_32', 'gan_50k', 'gan_100k', 'gan_140k']\r\n for model in models:\r\n avg_meteor(model, args.mode, (10 if args.mode == 'ml' else 5))\r\n else:\r\n avg_meteor(args.model_name, args.mode, (10 if args.mode == 'ml' else 5))\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n" ]
[ [ "torch.device", "pandas.read_csv", "pandas.DataFrame" ], [ "numpy.array", "numpy.loadtxt" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.3", "1.1", "1.5", "1.2" ], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
denis19973/Keras-RFCN
[ "5e1fdaf197b3a93c22a82d9476a3f9a1c804e398" ]
[ "Fashion_Test.py" ]
[ "\"\"\"\nKeras RFCN\nCopyright (c) 2018\nLicensed under the MIT License (see LICENSE for details)\nWritten by [email protected]\n\"\"\"\n\n'''\nThis is a demo to Eval a RFCN model with DeepFashion Dataset\nhttp://mmlab.ie.cuhk.edu.hk/projects/DeepFashion.html\n'''\n\nfrom KerasRFCN.Model.Model import RFCN_Model\nfrom KerasRFCN.Config import Config\nimport KerasRFCN.Utils \nimport os\nfrom keras.preprocessing import image\nimport pickle\nimport numpy as np\nimport argparse\nimport matplotlib.pyplot as plt\nimport matplotlib.patches as patches\n\nclass RFCNNConfig(Config):\n \"\"\"Configuration for training on the toy shapes dataset.\n Derives from the base Config class and overrides values specific\n to the toy shapes dataset.\n \"\"\"\n # Give the configuration a recognizable name\n NAME = \"Fashion\"\n\n # Backbone model\n # choose one from ['resnet50', 'resnet101', 'resnet50_dilated', 'resnet101_dilated']\n BACKBONE = \"resnet101\"\n \n # Train on 1 GPU and 8 images per GPU. We can put multiple images on each\n # GPU because the images are small. Batch size is 8 (GPUs * images/GPU).\n GPU_COUNT = 1\n IMAGES_PER_GPU = 1\n\n # Number of classes (including background)\n C = 1 + 46 # background + 2 tags\n NUM_CLASSES = C\n # Use small images for faster training. Set the limits of the small side\n # the large side, and that determines the image shape.\n IMAGE_MIN_DIM = 640\n IMAGE_MAX_DIM = 768\n\n # Use smaller anchors because our image and objects are small\n RPN_ANCHOR_SCALES = (32, 64, 128, 256, 512) # anchor side in pixels\n # Use same strides on stage 4-6 if use dilated resnet of DetNet\n # Like BACKBONE_STRIDES = [4, 8, 16, 16, 16]\n BACKBONE_STRIDES = [4, 8, 16, 32, 64]\n # Reduce training ROIs per image because the images are small and have\n # few objects. Aim to allow ROI sampling to pick 33% positive ROIs.\n TRAIN_ROIS_PER_IMAGE = 200\n\n # Use a small epoch since the data is simple\n STEPS_PER_EPOCH = 100\n\n # use small validation steps since the epoch is small\n VALIDATION_STEPS = 5\n\n RPN_NMS_THRESHOLD = 0.7\n\n DETECTION_MIN_CONFIDENCE = 0.4\n POOL_SIZE = 7\n\n\ndef Test(model, loadpath, savepath):\n assert not loadpath == savepath, \"loadpath should'n same with savepath\"\n\n model_path = model.find_last()[1]\n # Load trained weights (fill in path to trained weights here)\n \n model.load_weights(model_path, by_name=True)\n print(\"Loading weights from \", model_path)\n\n if os.path.isdir(loadpath):\n for idx, imgname in enumerate(os.listdir(loadpath)):\n if not imgname.lower().endswith(('.bmp', '.jpeg', '.jpg', '.png', '.tif', '.tiff')):\n continue\n print(imgname)\n imageoriChannel = np.array(plt.imread( os.path.join(loadpath, imgname) )) / 255.0\n img = image.img_to_array( image.load_img(os.path.join(loadpath, imgname)) )\n TestSinglePic(img, imageoriChannel, model, savepath=savepath, imgname=imgname)\n \n elif os.path.isfile(loadpath):\n if not loadpath.lower().endswith(('.bmp', '.jpeg', '.jpg', '.png', '.tif', '.tiff')):\n print(\"not image file!\")\n return\n print(loadpath)\n imageoriChannel = np.array(plt.imread( loadpath )) / 255.0\n img = image.img_to_array( image.load_img(loadpath) )\n (filename,extension) = os.path.splitext(loadpath)\n TestSinglePic(img, imageoriChannel, model, savepath=savepath, imgname=filename)\n \ndef TestSinglePic(image, image_ori, model, savepath, imgname):\n r = model.detect([image], verbose=1)[0]\n print(r)\n def get_ax(rows=1, cols=1, size=8):\n _, ax = plt.subplots(rows, cols, figsize=(size*cols, size*rows))\n return ax\n\n ax = get_ax(1)\n\n assert not savepath == \"\", \"empty save path\"\n assert not imgname == \"\", \"empty image file name\"\n\n for box in r['rois']:\n y1, x1, y2, x2 = box\n p = patches.Rectangle((x1, y1), x2 - x1, y2 - y1, linewidth=2,\n alpha=0.7, linestyle=\"dashed\",\n edgecolor=\"red\", facecolor='none')\n ax.add_patch(p)\n ax.imshow(image_ori)\n\n plt.savefig(os.path.join(savepath, imgname),bbox_inches='tight')\n plt.clf()\n\nif __name__ == '__main__':\n ROOT_DIR = os.getcwd()\n parser = argparse.ArgumentParser()\n\n parser.add_argument('--loadpath', required=False,\n default=\"images/\",\n metavar=\"evaluate images loadpath\",\n help=\"evaluate images loadpath\")\n parser.add_argument('--savepath', required=False,\n default=\"result/\",\n metavar=\"evaluate images savepath\",\n help=\"evaluate images savepath\")\n\n config = RFCNNConfig()\n args = parser.parse_args()\n\n model = RFCN_Model(mode=\"inference\", config=config,\n model_dir=os.path.join(ROOT_DIR, \"logs\") )\n\n Test(model, args.loadpath, args.savepath)" ]
[ [ "matplotlib.pyplot.imread", "matplotlib.patches.Rectangle", "matplotlib.pyplot.clf", "matplotlib.pyplot.subplots" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
KuKuXia/DeepLearningMugenKnock
[ "979cf05e65e352da36453337380a418a2a2fdccb" ]
[ "Question_prepare/answers/answer_rotation.py" ]
[ "import cv2\nimport numpy as np\nfrom glob import glob\nimport matplotlib.pyplot as plt\n\nnp.random.seed(0)\n\nnum_classes = 2\nimg_height, img_width = 64, 64\n\nCLS = ['akahara', 'madara']\n\n# get train data\ndef data_load(path, hf=False, vf=False, rot=None):\n xs = []\n ts = []\n paths = []\n \n for dir_path in glob(path + '/*'):\n for path in glob(dir_path + '/*'):\n x = cv2.imread(path)\n x = cv2.resize(x, (img_width, img_height)).astype(np.float32)\n x /= 255.\n x = x[..., ::-1]\n xs.append(x)\n\n for i, cls in enumerate(CLS):\n if cls in path:\n t = i\n \n ts.append(t)\n\n paths.append(path)\n\n if hf:\n xs.append(x[:, ::-1])\n ts.append(t)\n paths.append(path)\n\n if vf:\n xs.append(x[::-1])\n ts.append(t)\n paths.append(path)\n\n if hf and vf:\n xs.append(x[::-1, ::-1])\n ts.append(t)\n paths.append(path)\n\n if rot is not None:\n angle = rot\n scale = 1\n\n # show\n a_num = 360 // rot\n w_num = np.ceil(np.sqrt(a_num))\n h_num = np.ceil(a_num / w_num)\n count = 1\n plt.subplot(h_num, w_num, count)\n plt.axis('off')\n plt.imshow(x)\n plt.title(\"angle=0\")\n \n while angle < 360:\n _h, _w, _c = x.shape\n max_side = max(_h, _w)\n tmp = np.zeros((max_side, max_side, _c))\n tx = int((max_side - _w) / 2)\n ty = int((max_side - _h) / 2)\n tmp[ty: ty+_h, tx: tx+_w] = x.copy()\n M = cv2.getRotationMatrix2D((max_side/2, max_side/2), angle, scale)\n _x = cv2.warpAffine(tmp, M, (max_side, max_side))\n _x = _x[tx:tx+_w, ty:ty+_h]\n xs.append(x)\n ts.append(t)\n paths.append(path)\n\n # show\n count += 1\n plt.subplot(h_num, w_num, count)\n plt.imshow(_x)\n plt.axis('off')\n plt.title(\"angle={}\".format(angle))\n\n angle += rot\n plt.show()\n\n\n xs = np.array(xs, dtype=np.float32)\n ts = np.array(ts, dtype=np.int)\n \n xs = xs.transpose(0,3,1,2)\n\n return xs, ts, paths\n\n\nxs, ts, paths = data_load(\"../Dataset/train/images/\", hf=True, vf=True, rot=1)\n\nmb = 3\nmbi = 0\ntrain_ind = np.arange(len(xs))\nnp.random.seed(0)\nnp.random.shuffle(train_ind)\n\nfor i in range(10):\n if mbi + mb > len(xs):\n mb_ind = train_ind[mbi:]\n np.random.shuffle(train_ind)\n mb_ind = np.hstack((mb_ind, train_ind[:(mb-(len(xs)-mbi))]))\n mbi = mb - (len(xs) - mbi)\n else:\n mb_ind = train_ind[mbi: mbi+mb]\n mbi += mb\n\n print(mb_ind)\n" ]
[ [ "matplotlib.pyplot.imshow", "numpy.sqrt", "numpy.random.seed", "matplotlib.pyplot.title", "numpy.random.shuffle", "numpy.ceil", "matplotlib.pyplot.subplot", "matplotlib.pyplot.axis", "numpy.array", "numpy.zeros", "matplotlib.pyplot.show" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
mariuslindegaard/6.867_MARL_project
[ "572b88b4d491db8a1673535868f4bf9aff58f73d" ]
[ "src/modules/agents/noisy_agents.py" ]
[ "import torch.nn as nn\nimport torch.nn.functional as F\nfrom utils.noisy_liner import NoisyLinear\nfrom torch.nn import LayerNorm\n\nclass NoisyRNNAgent(nn.Module):\n def __init__(self, input_shape, args):\n super(NoisyRNNAgent, self).__init__()\n self.args = args\n\n self.fc1 = nn.Linear(input_shape, args.rnn_hidden_dim)\n self.rnn = nn.GRUCell(args.rnn_hidden_dim, args.rnn_hidden_dim)\n self.fc2 = NoisyLinear(args.rnn_hidden_dim, args.n_actions, True, args.device)\n\n if getattr(args, \"use_layer_norm\", False):\n self.layer_norm = LayerNorm(args.rnn_hidden_dim)\n\n def init_hidden(self):\n # make hidden states on same device as model\n return self.fc1.weight.new(1, self.args.rnn_hidden_dim).zero_()\n\n def forward(self, inputs, hidden_state):\n b, a, e = inputs.size()\n \n inputs = inputs.view(-1, e)\n x = F.relu(self.fc1(inputs), inplace=True)\n h_in = hidden_state.reshape(-1, self.args.rnn_hidden_dim)\n hh = self.rnn(x, h_in)\n\n if getattr(self.args, \"use_layer_norm\", False):\n q = self.fc2(self.layer_norm(hh))\n else:\n q = self.fc2(hh)\n\n return q.view(b, a, -1), hh.view(b, a, -1)" ]
[ [ "torch.nn.Linear", "torch.nn.GRUCell", "torch.nn.LayerNorm" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
aasensio/bayesDI
[ "4ddad57d89c3512b4c4ee5684ddc5608060ebdec" ]
[ "modules/flow.py" ]
[ "import numpy as np\nimport torch\nimport torch.nn.functional as F\nfrom nflows import transforms, distributions, flows, utils\nimport nflows.nn.nets as nn_\nimport matplotlib.pyplot as pl\nfrom modules import resnet\n\n# https://github.com/stephengreen/lfi-gw/blob/master/lfigw/nde_flows.py\n\ndef create_linear_transform(input_dim):\n \"\"\"Create the composite linear transform PLU.\n Arguments:\n input_dim {int} -- dimension of the space\n Returns:\n Transform -- nde.Transform object\n \"\"\"\n \n permutation = transforms.RandomPermutation(features = input_dim)\n linear = transforms.LULinear(input_dim, identity_init=True)\n\n return transforms.CompositeTransform([permutation, linear])\n\ndef create_base_transform(i, \n input_dim, \n context_dim,\n hidden_dim=512,\n num_transform_blocks=2,\n activation='relu',\n dropout_probability=0.0,\n batch_norm=False,\n num_bins=8,\n tail_bound=1.,\n apply_unconditional_transform=False,\n base_transform_type='rq-coupling',\n transform_net='conv'):\n\n \"\"\"Build a base NSF transform of x, conditioned on y.\n This uses the PiecewiseRationalQuadraticCoupling transform or\n the MaskedPiecewiseRationalQuadraticAutoregressiveTransform, as described\n in the Neural Spline Flow paper (https://arxiv.org/abs/1906.04032).\n Code is adapted from the uci.py example from\n https://github.com/bayesiains/nsf.\n A coupling flow fixes half the components of x, and applies a transform\n to the remaining components, conditioned on the fixed components. This is\n a restricted form of an autoregressive transform, with a single split into\n fixed/transformed components.\n The transform here is a neural spline flow, where the flow is parametrized\n by a residual neural network that depends on x_fixed and y. The residual\n network consists of a sequence of two-layer fully-connected blocks.\n Arguments:\n i {int} -- index of transform in sequence\n param_dim {int} -- dimensionality of x\n Keyword Arguments:\n context_dim {int} -- dimensionality of y (default: {None})\n hidden_dim {int} -- number of hidden units per layer (default: {512})\n num_transform_blocks {int} -- number of transform blocks comprising the\n transform (default: {2})\n activation {str} -- activation function (default: {'relu'})\n dropout_probability {float} -- probability of dropping out a unit\n (default: {0.0})\n batch_norm {bool} -- whether to use batch normalization\n (default: {False})\n num_bins {int} -- number of bins for the spline (default: {8})\n tail_bound {[type]} -- [description] (default: {1.})\n apply_unconditional_transform {bool} -- whether to apply an\n unconditional transform to\n fixed components\n (default: {False})\n base_transform_type {str} -- type of base transform\n ([rq-coupling], rq-autoregressive)\n Returns:\n Transform -- the NSF transform\n \"\"\"\n\n if activation == 'elu':\n activation_fn = F.elu\n elif activation == 'relu':\n activation_fn = F.relu\n elif activation == 'leaky_relu':\n activation_fn = F.leaky_relu\n else:\n activation_fn = F.relu # Default\n print('Invalid activation function specified. Using ReLU.')\n\n if base_transform_type == 'rq-coupling':\n\n mask = utils.create_alternating_binary_mask(input_dim, even=(i % 2 == 0))\n\n if (transform_net == 'fc'):\n transform_net = lambda in_features, out_features: nn_.ResidualNet(\n in_features = in_features,\n out_features = out_features,\n hidden_features = hidden_dim,\n context_features = context_dim,\n num_blocks = num_transform_blocks,\n activation = activation_fn,\n dropout_probability = dropout_probability,\n use_batch_norm = batch_norm)\n\n if (transform_net == 'conv'):\n transform_net = lambda in_features, out_features: resnet.ConvResidualNet1d(\n in_channels = 1,\n out_channels = out_features // in_features,\n hidden_channels = hidden_dim,\n context_channels = context_dim,\n num_blocks = num_transform_blocks,\n activation = activation_fn,\n dropout_probability = dropout_probability,\n use_batch_norm = batch_norm)\n\n transform = transforms.PiecewiseRationalQuadraticCouplingTransform(\n mask = mask,\n transform_net_create_fn = transform_net,\n num_bins = num_bins,\n tails = 'linear',\n tail_bound = tail_bound,\n apply_unconditional_transform = apply_unconditional_transform\n )\n\n elif base_transform_type == 'rq-autoregressive':\n transform = transforms.MaskedPiecewiseRationalQuadraticAutoregressiveTransform(\n features=input_dim,\n hidden_features=hidden_dim,\n context_features=context_dim,\n num_bins=num_bins,\n tails='linear',\n tail_bound=tail_bound,\n num_blocks=num_transform_blocks,\n use_residual_blocks=True,\n random_mask=False,\n activation=activation_fn,\n dropout_probability=dropout_probability,\n use_batch_norm=batch_norm\n )\n else:\n raise ValueError\n\n return transform\n\ndef create_transform(input_dim, context_dim, num_flow_steps, base_transform_kwargs):\n \"\"\"Build a sequence of NSF transforms, which maps parameters x into the\n base distribution u (noise). Transforms are conditioned on strain data y.\n Note that the forward map is f^{-1}(x, y).\n Each step in the sequence consists of\n * A linear transform of x, which in particular permutes components\n * A NSF transform of x, conditioned on y.\n There is one final linear transform at the end.\n This function was adapted from the uci.py example in\n https://github.com/bayesiains/nsf\n Arguments:\n num_flow_steps {int} -- number of transforms in sequence\n param_dim {int} -- dimensionality of x\n context_dim {int} -- dimensionality of y\n base_transform_kwargs {dict} -- hyperparameters for NSF step\n Returns:\n Transform -- the constructed transform\n \"\"\"\n\n transform = transforms.CompositeTransform([\n transforms.CompositeTransform([\n create_linear_transform(input_dim),\n create_base_transform(i, input_dim, context_dim=context_dim, **base_transform_kwargs)\n ]) for i in range(num_flow_steps)] + [create_linear_transform(input_dim)])\n\n return transform\n\ndef fun(input_dim):\n \n return fun\n\ndef create_nsf_model(input_dim, context_dim, num_flow_steps, base_transform_kwargs, learn_normal=False):\n\n \"\"\"Build NSF (neural spline flow) model. This uses the nsf module\n available at https://github.com/bayesiains/nsf.\n This models the posterior distribution p(x|y).\n The model consists of\n * a base distribution (StandardNormal, dim(x))\n * a sequence of transforms, each conditioned on y\n Arguments:\n input_dim {int} -- dimensionality of x\n context_dim {int} -- dimensionality of y\n num_flow_steps {int} -- number of sequential transforms\n base_transform_kwargs {dict} -- hyperparameters for transform steps\n Returns:\n Flow -- the model\n \"\"\"\n \n # Define a base distribution.\n if (learn_normal):\n base_distribution = distributions.DiagonalNormal(shape=(input_dim,))\n else:\n base_distribution = distributions.StandardNormal(shape=(input_dim,))\n # if (sigma_base != 1):\n # def fun2(x): \n # n_batch, n = x.shape\n # return torch.cat([torch.zeros((n_batch, input_dim), device=x.device), sigma_base * torch.ones((n_batch, input_dim), device=x.device)], dim=1)\n # base_distribution = distributions.ConditionalDiagonalNormal(shape=(input_dim,), context_encoder=fun2)\n \n # Define the neural spline transform\n transform = create_transform(input_dim, context_dim, num_flow_steps, base_transform_kwargs)\n\n # Create the flow\n flow = flows.Flow(transform=transform, distribution=base_distribution)\n\n # Add the hyperparameters for reconstructing the model after loading\n flow.model_hyperparams = {\n 'input_dim': input_dim,\n 'num_flow_steps': num_flow_steps,\n 'context_dim': context_dim,\n 'base_transform_kwargs': base_transform_kwargs\n }\n \n return flow\n\ndef obtain_samples(flow, y, nsamples, device=None, batch_size=512):\n \"\"\"Draw samples from the posterior.\n Arguments:\n flow {Flow} -- NSF model\n y {array} -- strain data\n nsamples {int} -- number of samples desired\n Keyword Arguments:\n device {torch.device} -- model device (CPU or GPU) (default: {None})\n batch_size {int} -- batch size for sampling (default: {512})\n Returns:\n Tensor -- samples\n \"\"\"\n\n with torch.no_grad():\n flow.eval()\n\n y = torch.from_numpy(y).unsqueeze(0).to(device)\n\n num_batches = nsamples // batch_size\n num_leftover = nsamples % batch_size\n\n samples = [flow.sample(batch_size, y) for _ in range(num_batches)]\n if num_leftover > 0:\n samples.append(flow.sample(num_leftover, y))\n\n # The batching in the nsf package seems screwed up, so we had to do it\n # ourselves, as above. They are concatenating on the wrong axis.\n\n # samples = flow.sample(nsamples, context=y, batch_size=batch_size)\n\n return torch.cat(samples, dim=1)[0]\n \n\nif (__name__ == '__main__'):\n \n base_transform_kwargs = {\n 'hidden_dim': 50,\n 'num_transform_blocks': 2,\n 'activation': 'relu',\n 'dropout_probability': 0.0,\n 'batch_norm': False,\n 'num_bins': 10,\n 'tail_bound': 3.0,\n 'apply_unconditional_transform': False\n }\n model = create_nsf_model(20, 1, 3, base_transform_kwargs)\n\n # context = np.array([[2.]])\n # context = torch.tensor(context.astype('float32'))\n\n # samples = model.sample(5000, context).detach().cpu().numpy()\n # pl.plot(samples[0,:,0], samples[0,:,1], '.')\n # pl.show()" ]
[ [ "torch.no_grad", "torch.from_numpy", "torch.cat" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
zhaipro/MySceneDetect
[ "fbbe085b05e916d52253ffddd91848c3e85b2fe9" ]
[ "scenedetect/main.py" ]
[ "import sys\nimport time\n\nimport cv2\nimport numpy as np\n\n\ndef scenedetect(cap, threshold=30, min_scene_len=15):\n w = cap.get(cv2.CAP_PROP_FRAME_WIDTH)\n downscale_factor = int(w / 200)\n last_hsv = None\n first = 0\n curr = 0\n\n while True:\n ret, im = cap.read()\n if not ret:\n break\n\n curr_hsv = im[::downscale_factor, ::downscale_factor]\n curr_hsv = cv2.cvtColor(curr_hsv, cv2.COLOR_BGR2HSV)\n curr_hsv = curr_hsv.astype('int32')\n if last_hsv is not None:\n delta_hsv = np.mean(np.abs(curr_hsv - last_hsv))\n if delta_hsv >= threshold and curr - first >= min_scene_len:\n yield first, curr, delta_hsv\n first = curr\n\n last_hsv = curr_hsv\n curr += 1\n yield first, curr, 0\n\n\nfn = 'video.rmvb'\ncap = cv2.VideoCapture(fn)\nstart = time.time()\nfor first, last, delta_hsv in scenedetect(cap):\n print(first, last, delta_hsv)\nprint(time.time() - start)\ncap.release()\n" ]
[ [ "numpy.abs" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
lightyang/tensorflow
[ "14c58e1d380b2001ffdf7ef782d44ad1a21f763c" ]
[ "tensorflow/python/keras/layers/preprocessing/categorical.py" ]
[ "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Keras categorical preprocessing layers.\"\"\"\n# pylint: disable=g-classes-have-attributes\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport numpy as np\n\nfrom tensorflow.python.framework import dtypes\nfrom tensorflow.python.framework import ops\nfrom tensorflow.python.framework import sparse_tensor\nfrom tensorflow.python.framework import tensor_shape\nfrom tensorflow.python.framework import tensor_spec\nfrom tensorflow.python.keras.engine.base_layer import Layer\nfrom tensorflow.python.ops import lookup_ops\nfrom tensorflow.python.ops import sparse_ops\nfrom tensorflow.python.ops import string_ops\n\n\nclass CategoryLookup(Layer):\n \"\"\"Category lookup layer.\n\n This layer looks up tokens (int or string) in a vocabulary table,\n and return their indices (int). It converts a sequence of int or string to a\n sequence of int.\n\n Attributes:\n max_tokens: The maximum size of the vocabulary for this layer. If None,\n there is no cap on the size of the vocabulary. This is used when `adapt`\n is called.\n num_oov_tokens: Non-negative integer. The number of out-of-vocab tokens. All\n out-of-vocab inputs will be assigned IDs in the range of [0,\n num_oov_tokens) based on a hash.\n vocabulary: The vocabulary to lookup the input. If it is a file, it\n represents the source vocab file; If it is a list/tuple, it represents the\n source vocab list. If it is None, the vocabulary can later be set.\n name: Name to give to the layer.\n **kwargs: Keyword arguments to construct a layer.\n Input shape: A string or int tensor of shape `[batch_size, d1, ..., dm]`\n Output shape: An int tensor of shape `[batch_size, d1, .., dm]`\n Example: Consider a batch of a single input sample, `[[\"a\", \"c\", \"d\", \"a\",\n \"x\"]]`. Let's say the vocabulary is `[\"a\", \"b\", \"c\", \"d\"]` and a single OOV\n token is used (`num_oov_tokens=1`). Then the corresponding output is `[[1,\n 3, 4, 1, 0]]`. 0 stands for an OOV token.\n \"\"\"\n\n def __init__(self,\n max_tokens=None,\n num_oov_tokens=1,\n vocabulary=None,\n name=None,\n **kwargs):\n if max_tokens is not None:\n raise ValueError('`max_tokens` and `adapt` is not supported yet.')\n if vocabulary is None:\n raise ValueError('for now, you must pass a `vocabulary` argument')\n self.max_tokens = max_tokens\n self.num_oov_tokens = num_oov_tokens\n self.vocabulary = vocabulary\n super(CategoryLookup, self).__init__(name=name, **kwargs)\n\n def __call__(self, inputs, *args, **kwargs):\n if isinstance(inputs, (np.ndarray, float, int)):\n inputs = ops.convert_to_tensor(inputs)\n self._input_dtype = inputs.dtype\n return super(CategoryLookup, self).__call__(inputs, *args, **kwargs)\n\n def build(self, input_shape):\n # categorical with vocabulary list.\n if isinstance(self.vocabulary, (tuple, list, np.ndarray)):\n self.table = lookup_ops.index_table_from_tensor(\n vocabulary_list=self.vocabulary,\n num_oov_buckets=self.num_oov_tokens,\n dtype=self._input_dtype)\n # categorical with vocabulary file.\n elif self.vocabulary:\n self.table = lookup_ops.index_table_from_file(\n vocabulary_file=self.vocabulary,\n num_oov_buckets=self.num_oov_tokens,\n key_dtype=self._input_dtype)\n\n def call(self, inputs):\n return self.table.lookup(inputs)\n\n def compute_output_shape(self, input_shape):\n return input_shape\n\n def compute_output_signature(self, input_spec):\n output_shape = self.compute_output_shape(input_spec.shape.as_list())\n output_dtype = dtypes.int64\n if isinstance(input_spec, sparse_tensor.SparseTensorSpec):\n return sparse_tensor.SparseTensorSpec(\n shape=output_shape, dtype=output_dtype)\n else:\n return tensor_spec.TensorSpec(shape=output_shape, dtype=output_dtype)\n\n def get_config(self):\n config = {\n 'max_tokens': self.max_tokens,\n 'num_oov_tokens': self.num_oov_tokens,\n 'vocabulary': self.vocabulary\n }\n base_config = super(CategoryLookup, self).get_config()\n return dict(list(base_config.items()) + list(config.items()))\n\n\nclass CategoryCrossing(Layer):\n \"\"\"Category crossing layer.\n\n This layer transforms multiple categorical inputs to categorical outputs\n by Cartesian product, and hash the output if necessary. Without hashing\n (`num_bins=None`) the output dtype is string, with hashing the output dtype\n is int64.\n\n Arguments:\n depth: depth of input crossing. By default None, all inputs are crossed into\n one output. It can also be an int or tuple/list of ints. Passing an\n integer will create combinations of crossed outputs with depth up to that\n integer, i.e., [1, 2, ..., `depth`), and passing a tuple of integers will\n create crossed outputs with depth for the specified values in the tuple,\n i.e., `depth`=(N1, N2) will create all possible crossed outputs with depth\n equal to N1 or N2. Passing `None` means a single crossed output with all\n inputs. For example, with inputs `a`, `b` and `c`, `depth=2` means the\n output will be [a;b;c;cross(a, b);cross(bc);cross(ca)].\n num_bins: Number of hash bins. By default None, no hashing is performed.\n name: Name to give to the layer.\n **kwargs: Keyword arguments to construct a layer.\n\n Input shape: a list of string or int tensors or sparse tensors of shape\n `[batch_size, d1, ..., dm]`\n\n Output shape: a single string or int tensor or sparse tensor of shape\n `[batch_size, d1, ..., dm]`\n\n Example: (`depth`=None)\n If the layer receives three inputs:\n `a=[[1], [4]]`, `b=[[2], [5]]`, `c=[[3], [6]]`\n the output will be a string tensor if not hashed:\n `[[b'1_X_2_X_3'], [b'4_X_5_X_6']]`\n the output will be an int64 tensor if hashed:\n `[[hash(b'1_X_2_X_3')], [hash(b'4_X_5_X_6')]]`\n\n Example: (`depth` is an integer)\n With the same input above, and if `depth`=2,\n the output will be a list of 6 string tensors if not hashed:\n `[[b'1'], [b'4']]`\n `[[b'2'], [b'5']]`\n `[[b'3'], [b'6']]`\n `[[b'1_X_2'], [b'4_X_5']]`,\n `[[b'2_X_3'], [b'5_X_6']]`,\n `[[b'3_X_1'], [b'6_X_4']]`\n the output will be a list of 6 int64 tensors if hashed:\n `[[hash(b'1')], [hash(b'4')]]`\n `[[hash(b'2')], [hash(b'5')]]`\n `[[hash(b'3')], [hash(b'6')]]`\n `[[hash(b'1_X_2')], [hash(b'4_X_5')]]`,\n `[[hash(b'2_X_3')], [hash(b'5_X_6')]]`,\n `[[hash(b'3_X_1')], [hash(b'6_X_4')]]`\n\n Example: (`depth` is a tuple/list of integers)\n With the same input above, and if `depth`=(2, 3)\n the output will be a list of 4 string tensors if not hashed:\n `[[b'1_X_2'], [b'4_X_5']]`,\n `[[b'2_X_3'], [b'5_X_6']]`,\n `[[b'3_X_1'], [b'6_X_4']]`,\n `[[b'1_X_2_X_3'], [b'4_X_5_X_6']]`\n the output will be a list of 4 int64 tensors if hashed:\n `[[hash(b'1_X_2')], [hash(b'4_X_5')]]`,\n `[[hash(b'2_X_3')], [hash(b'5_X_6')]]`,\n `[[hash(b'3_X_1')], [hash(b'6_X_4')]]`,\n `[[hash(b'1_X_2_X_3')], [hash(b'4_X_5_X_6')]]`\n \"\"\"\n\n def __init__(self, depth=None, num_bins=None, name=None, **kwargs):\n # TODO(tanzheny): Add support for depth.\n # TODO(tanzheny): Consider making seperator configurable.\n if depth is not None:\n raise NotImplementedError('`depth` is not supported yet.')\n self.num_bins = num_bins\n self.depth = depth\n super(CategoryCrossing, self).__init__(name=name, **kwargs)\n\n def call(self, inputs):\n sparse_output = False\n if any([isinstance(inp, sparse_tensor.SparseTensor) for inp in inputs]):\n sparse_output = True\n if self.num_bins is not None:\n output = sparse_ops.sparse_cross_hashed(\n inputs, num_buckets=self.num_bins)\n else:\n output = sparse_ops.sparse_cross(inputs)\n if not sparse_output:\n output = sparse_ops.sparse_tensor_to_dense(output)\n return output\n\n def compute_output_shape(self, input_shape):\n if not isinstance(input_shape, (tuple, list)):\n raise ValueError('A `CategoryCrossing` layer should be called '\n 'on a list of inputs.')\n input_shapes = input_shape\n batch_size = None\n for inp_shape in input_shapes:\n inp_tensor_shape = tensor_shape.TensorShape(inp_shape).as_list()\n if len(inp_tensor_shape) != 2:\n raise ValueError('Inputs must be rank 2, get {}'.format(input_shapes))\n if batch_size is None:\n batch_size = inp_tensor_shape[0]\n # The second dimension is dynamic based on inputs.\n output_shape = [batch_size, None]\n return tensor_shape.TensorShape(output_shape)\n\n def compute_output_signature(self, input_spec):\n input_shapes = [x.shape for x in input_spec]\n output_shape = self.compute_output_shape(input_shapes)\n output_dtype = dtypes.int64 if self.num_bins else dtypes.string\n return sparse_tensor.SparseTensorSpec(\n shape=output_shape, dtype=output_dtype)\n\n def get_config(self):\n config = {'depth': self.depth, 'num_bins': self.num_bins}\n base_config = super(CategoryCrossing, self).get_config()\n return dict(list(base_config.items()) + list(config.items()))\n\n\nclass Hashing(Layer):\n \"\"\"Implements categorical feature hashing, also known as \"hashing trick\".\n\n This layer transforms categorical inputs to hashed output. It converts a\n sequence of int or string to a sequence of int. The stable hash function uses\n tensorflow::ops::Fingerprint to produce universal output that is consistent\n across platforms.\n\n Usage:\n ```python\n layer = Hashing(num_bins=3)\n inp = np.asarray([['A', 'B'], ['C', 'A']])\n layer(inputs)\n [[0, 0], [1, 0]]\n ```\n\n Arguments:\n num_bins: Number of hash bins.\n name: Name to give to the layer.\n **kwargs: Keyword arguments to construct a layer.\n\n Input shape: A string, int32 or int64 tensor of shape\n `[batch_size, d1, ..., dm]`\n\n Output shape: An int64 tensor of shape `[batch_size, d1, ..., dm]`\n\n Example:\n If the input is a 5 by 1 string tensor '[['A'], ['B'], ['C'], ['D'], ['E']]'\n with `num_bins=2`, then output is 5 by 1 integer tensor\n [[hash('A')], [hash('B')], [hash('C')], [hash('D')], [hash('E')]].\n \"\"\"\n\n def __init__(self, num_bins, name=None, **kwargs):\n # TODO(tanzheny): consider adding strong hash variant.\n self._num_bins = num_bins\n super(Hashing, self).__init__(name=name, **kwargs)\n\n def call(self, inputs):\n # TODO(tanzheny): Add ragged support.\n # TODO(tanzheny): Add int support.\n if isinstance(inputs, sparse_tensor.SparseTensor):\n sparse_values = inputs.values\n sparse_hashed_values = string_ops.string_to_hash_bucket_fast(\n sparse_values, self._num_bins, name='lookup')\n return sparse_tensor.SparseTensor(\n indices=inputs.indices,\n values=sparse_hashed_values,\n dense_shape=inputs.dense_shape)\n # string_to_hash_bucket_fast uses FarmHash as hash function.\n return string_ops.string_to_hash_bucket_fast(\n inputs, self._num_bins, name='lookup')\n\n def compute_output_shape(self, input_shape):\n return input_shape\n\n def compute_output_signature(self, input_spec):\n output_shape = self.compute_output_shape(input_spec.shape.as_list())\n output_dtype = dtypes.int64\n if isinstance(input_spec, sparse_tensor.SparseTensorSpec):\n return sparse_tensor.SparseTensorSpec(\n shape=output_shape, dtype=output_dtype)\n else:\n return tensor_spec.TensorSpec(shape=output_shape, dtype=output_dtype)\n\n def get_config(self):\n config = {'num_bins': self._num_bins}\n base_config = super(Hashing, self).get_config()\n return dict(list(base_config.items()) + list(config.items()))\n" ]
[ [ "tensorflow.python.ops.lookup_ops.index_table_from_tensor", "tensorflow.python.framework.tensor_shape.TensorShape", "tensorflow.python.ops.lookup_ops.index_table_from_file", "tensorflow.python.framework.tensor_spec.TensorSpec", "tensorflow.python.ops.sparse_ops.sparse_cross", "tensorflow.python.framework.sparse_tensor.SparseTensorSpec", "tensorflow.python.ops.sparse_ops.sparse_cross_hashed", "tensorflow.python.ops.sparse_ops.sparse_tensor_to_dense", "tensorflow.python.framework.sparse_tensor.SparseTensor", "tensorflow.python.framework.ops.convert_to_tensor", "tensorflow.python.ops.string_ops.string_to_hash_bucket_fast" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "2.7", "2.6", "2.2", "2.3", "2.4", "2.9", "2.5", "2.8", "2.10" ] } ]
stillmatic/pandas
[ "da067b2fe4cdc43eac5349e0648cfbbe4b96dbbd" ]
[ "pandas/tests/categorical/test_algos.py" ]
[ "import pytest\nimport numpy as np\n\nimport pandas as pd\nimport pandas.util.testing as tm\n\n\[email protected]('ordered', [True, False])\[email protected]('categories', [\n ['b', 'a', 'c'],\n ['a', 'b', 'c', 'd'],\n])\ndef test_factorize(categories, ordered):\n cat = pd.Categorical(['b', 'b', 'a', 'c', None],\n categories=categories,\n ordered=ordered)\n labels, uniques = pd.factorize(cat)\n expected_labels = np.array([0, 0, 1, 2, -1], dtype=np.intp)\n expected_uniques = pd.Categorical(['b', 'a', 'c'],\n categories=categories,\n ordered=ordered)\n\n tm.assert_numpy_array_equal(labels, expected_labels)\n tm.assert_categorical_equal(uniques, expected_uniques)\n\n\ndef test_factorized_sort():\n cat = pd.Categorical(['b', 'b', None, 'a'])\n labels, uniques = pd.factorize(cat, sort=True)\n expected_labels = np.array([1, 1, -1, 0], dtype=np.intp)\n expected_uniques = pd.Categorical(['a', 'b'])\n\n tm.assert_numpy_array_equal(labels, expected_labels)\n tm.assert_categorical_equal(uniques, expected_uniques)\n\n\ndef test_factorized_sort_ordered():\n cat = pd.Categorical(['b', 'b', None, 'a'],\n categories=['c', 'b', 'a'],\n ordered=True)\n\n labels, uniques = pd.factorize(cat, sort=True)\n expected_labels = np.array([0, 0, -1, 1], dtype=np.intp)\n expected_uniques = pd.Categorical(['b', 'a'],\n categories=['c', 'b', 'a'],\n ordered=True)\n\n tm.assert_numpy_array_equal(labels, expected_labels)\n tm.assert_categorical_equal(uniques, expected_uniques)\n" ]
[ [ "pandas.util.testing.assert_numpy_array_equal", "pandas.util.testing.assert_categorical_equal", "pandas.Categorical", "pandas.factorize", "numpy.array" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "1.4", "1.3", "0.19", "1.1", "1.5", "0.24", "0.20", "1.0", "0.25", "1.2" ], "scipy": [], "tensorflow": [] } ]
supersamdam/ConversationalAI
[ "bb6013c33f6332aee57abbae310577c056c6fdc1" ]
[ "Prototype.py" ]
[ "import numpy as np\nimport pandas as pd\nimport re\nimport nltk\nnltk.download('stopwords')\nfrom nltk.corpus import stopwords\nfrom nltk.stem.porter import PorterStemmer\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.naive_bayes import GaussianNB\nfrom sklearn.metrics import confusion_matrix, accuracy_score\nimport pickle\nimport joblib\n\n# Class starts from here\nclass CONVAI:\n #this is the empty vocabulary (vectorizer)\n cv = CountVectorizer(max_features = 20000) #change in no of features will result in how many different/unique words it will have\n classifier = GaussianNB() #this is the main algorith which works on probablistic approach\n no = 1000 #change this to change the number of data in terms of line you want to fed in model\n \n def init(self): #basic function \n dataset = pd.read_csv('data.csv') #dataset loaded\n no=self.no\n corpus = [] #corpus will have cleaned data\n for i in range(0, no):\n review = re.sub('[^a-zA-Z]', ' ', dataset['0'][i])\n review = review.lower()\n review = review.split()\n ps = PorterStemmer()\n all_stopwords = stopwords.words('english')\n all_stopwords.remove('not')\n review = [ps.stem(word) for word in review if not word in set(all_stopwords)]\n review = ' '.join(review)\n corpus.append(review)\n \n print(corpus)\n \n \n X = self.cv.fit_transform(corpus).toarray() #divided dataset into 2 parts this will be like questions\n y = dataset.iloc[0:no, 2].values #this will be like answer to the abouve question\n # print(X)\n \n\n \n X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.20, random_state = 0) #splitted dataset into train and test\n \n \n \n sav = self.classifier.fit(X_train, y_train) \n \n y_pred = self.classifier.predict(X_test) #all the action is done here\n print(np.concatenate((y_pred.reshape(len(y_pred),1,), y_test.reshape(len(y_test),1)),1),) #printing the current actions\n \n\n cm = confusion_matrix(y_test, y_pred) \n print(cm)\n a = accuracy_score(y_test, y_pred)\n print(a)\n joblib.dump(self.cv, \"vectorizer1.pkl\") #vocabulary is saved here\n joblib.dump(self.classifier, \"classifier1.pkl\") #algorithm is saved here\n\n\n # with open('model.pkl', 'wb') as fout:\n # pickle.dump((cv, classifier), fout)\n\n # filename = 'finalized_model.sav'\n # pickle.dump(cv, open(filename, 'wb'))\n # filename = 'finalized.sav' \n # pickle.dump(cv, open(filename, 'wb'))\n\n\n # saved_model = pickle.dumps(classifier)\n\n \n def Test(self,query): #this is the function for implementation of new inputs\n vectorizer = joblib.load(\"vectorizer.pkl\") #vocabulary is loaded\n classifier = joblib.load(\"classifier.pkl\") #algoritm is loaded\n\n # with open('model.pkl', 'rb') as fin:\n # cv, classifier = pickle.load(fin)\n \n #This is known as preprocessing the data\n cv = self.cv\n classifier = self.classifier\n #query = input()\n new_review = query\n new_review = re.sub('[^a-zA-Z]', ' ', new_review)\n new_review = new_review.lower() \n new_review = new_review.split()\n ps = PorterStemmer()\n all_stopwords = stopwords.words('english')\n all_stopwords.remove('not')\n new_review = [ps.stem(word) for word in new_review if not word in set(all_stopwords)]\n new_review = ' '.join(new_review)\n new_corpus = [new_review]\n new_X_test = cv.transform(new_corpus).toarray() \n new_y_pred = classifier.predict(new_X_test)\n print(new_y_pred) #output from the algorithm is printed\n return new_y_pred #output from the algorithm is returned\n \nif __name__ == \"__main__\": #main class\n a=CONVAI() #created instance(object) of the class CONVAI\n a.init() #called the function which will start training\n a.Test(\"hello\") #enter different type of input here to get new output results \n\n" ]
[ [ "pandas.read_csv", "sklearn.naive_bayes.GaussianNB", "sklearn.metrics.confusion_matrix", "sklearn.model_selection.train_test_split", "sklearn.feature_extraction.text.CountVectorizer", "sklearn.metrics.accuracy_score" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.1", "1.5", "1.2", "1.3" ], "scipy": [], "tensorflow": [] } ]
XiaotingChen/tfmodisco
[ "17cbafe806942304a02e8134fe10224bdff38b0c" ]
[ "modisco/value_provider.py" ]
[ "from __future__ import division, print_function, absolute_import\nimport numpy as np\nimport scipy.stats\n\n\nclass AbstractValueProvider(object):\n\n def __call__(self, seqlet):\n raise NotImplementedError()\n\n @classmethod\n def from_hdf5(cls, grp):\n the_class = eval(grp.attrs[\"class\"])\n return the_class.from_hdf5(grp) \n\n\nclass CoorScoreValueProvider(AbstractValueProvider):\n\n def __call__(self, seqlet):\n return seqlet.coor.score \n\n def save_hdf5(self, grp):\n grp.attrs[\"class\"] = type(self).__name__\n\n @classmethod\n def from_hdf5(cls, grp):\n return cls()\n\n\nclass TransformCentralWindowValueProvider(AbstractValueProvider):\n\n def __init__(self, track_name, central_window, val_transformer):\n if isinstance(track_name, str):\n self.track_name = track_name\n else: \n self.track_name = track_name.decode('utf-8')\n self.central_window = central_window\n self.val_transformer = val_transformer\n\n def __call__(self, seqlet):\n val = self.get_val(seqlet=seqlet)\n return self.val_transformer(val=val)\n\n def get_val(self, seqlet):\n flank_to_ignore = int(0.5*(len(seqlet)-self.central_window))\n track_values = seqlet[self.track_name]\\\n .fwd[flank_to_ignore:(len(seqlet)-flank_to_ignore)]\n return np.sum(track_values)\n\n def save_hdf5(self, grp):\n grp.attrs[\"class\"] = type(self).__name__\n grp.attrs[\"track_name\"] = self.track_name\n grp.attrs[\"central_window\"] = self.central_window\n self.val_transformer.save_hdf5(grp.create_group(\"val_transformer\")) \n\n @classmethod\n def from_hdf5(cls, grp):\n if isinstance(grp.attrs[\"track_name\"], str):\n track_name = grp.attrs[\"track_name\"]\n else:\n track_name = grp.attrs[\"track_name\"].decode('utf-8')\n central_window = grp.attrs[\"central_window\"] \n val_transformer = AbstractValTransformer.from_hdf5(\n grp[\"val_transformer\"]) \n return cls(track_name=track_name,\n central_window=central_window,\n val_transformer=val_transformer)\n\n\nclass AbstractValTransformer(object):\n\n def __call__(self, val):\n raise NotImplementedError()\n\n @classmethod\n def from_hdf5(cls, grp):\n the_class = eval(grp.attrs[\"class\"])\n return the_class.from_hdf5(grp) \n\n\nclass AbsPercentileValTransformer(AbstractValTransformer):\n\n def __init__(self, distribution):\n self.distribution = np.array(sorted(np.abs(distribution)))\n\n @classmethod\n def from_hdf5(cls, grp):\n distribution = np.array(grp[\"distribution\"][:])\n return cls(distribution=distribution) \n\n def save_hdf5(self, grp):\n grp.attrs[\"class\"] = type(self).__name__\n grp.create_dataset(\"distribution\", data=self.distribution)\n\n def __call__(self, val):\n return np.sign(val)*np.searchsorted(\n a=self.distribution,\n v=abs(val))/float(len(self.distribution))\n\n\nclass SignedPercentileValTransformer(AbstractValTransformer):\n\n def __init__(self, distribution):\n self.distribution = np.array(distribution)\n self.pos_dist = np.array(sorted(\n [x for x in self.distribution if x > 0]))\n self.abs_neg_dist = np.array(sorted(\n [abs(x) for x in self.distribution if x < 0]))\n\n @classmethod\n def from_hdf5(cls, grp):\n distribution = np.array(grp[\"distribution\"][:])\n return cls(distribution=distribution) \n\n def save_hdf5(self, grp):\n grp.attrs[\"class\"] = type(self).__name__\n grp.create_dataset(\"distribution\", data=self.distribution)\n\n def __call__(self, val):\n if (val == 0):\n return 0\n elif (val > 0):\n #add 1E-7 for complicated numerical stability issues \n # basically need robustness when dealing with ties\n return np.searchsorted(\n a=self.pos_dist, v=(val+1E-7))/float(len(self.pos_dist))\n else:\n #add 1E-7 for complicated numerical stability issues \n # basically need robustness when dealing with ties\n return np.searchsorted(\n a=self.abs_neg_dist, v=(abs(val)+1E-7))/float(\n len(self.abs_neg_dist))\n" ]
[ [ "numpy.abs", "numpy.sign", "numpy.searchsorted", "numpy.array", "numpy.sum" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
bgalbraith/macarico
[ "448e3e7f088dde0f4eb016fbdee857221b9523fb", "448e3e7f088dde0f4eb016fbdee857221b9523fb" ]
[ "macarico/actors/bow.py", "macarico/policies/linear.py" ]
[ "from __future__ import division, generators, print_function\n\nimport torch\nimport torch.nn as nn\n\nimport macarico\nimport macarico.util as util\nfrom macarico.util import Var, Varng\n\nclass BOWActor(macarico.Actor):\n def __init__(self, attention, n_actions, act_history_length=1, obs_history_length=0):\n self.att_dim = sum((att.dim for att in attention))\n super().__init__(n_actions,\n self.att_dim + \n act_history_length * n_actions + \\\n obs_history_length * self.att_dim,\n attention)\n self.act_history_length = act_history_length\n self.obs_history_length = obs_history_length\n self._reset()\n\n def _forward(self, state, x):\n feats = x[:]\n if self.act_history_length > 0:\n f = util.zeros(self, 1, self.act_history_length * self.n_actions)\n for i in range(min(self.act_history_length, len(state._trajectory))):\n a = state._trajectory[-i]\n f[0, i * self.n_actions + a] = 1\n feats.append(Varng(f))\n if self.obs_history_length > 0:\n for i in range(self.obs_history_length):\n feats.append(Varng(self.obs_history[(self.obs_history_pos+i) % self.obs_history_length]))\n # update history\n self.obs_history[self.obs_history_pos] = torch.cat(x, dim=1).data\n self.obs_history_pos = (self.obs_history_pos + 1) % self.obs_history_length\n return torch.cat(feats, dim=1)\n\n def _reset(self):\n self.obs_history = []\n for _ in range(self.obs_history_length):\n self.obs_history.append(util.zeros(self, 1, self.att_dim))\n self.obs_history_pos = 0\n \n", "from __future__ import division, generators, print_function\nimport random\n#import torch\n#from torch import nn\n#from torch.autograd import Variable\n#from torch.nn import functional as F\n#from torch.nn.parameter import Parameter\n#import torch.nn.functional as F\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom macarico.util import Var, Varng\nfrom torch.nn.parameter import Parameter\nimport numpy as np\n\nimport macarico\nfrom macarico import util, CostSensitivePolicy\n\nclass SoftmaxPolicy(macarico.StochasticPolicy):\n def __init__(self, features, n_actions, temperature=1.0):\n macarico.StochasticPolicy.__init__(self)\n self.n_actions = n_actions\n self.features = features\n self.mapping = nn.Linear(features.dim, n_actions)\n self.disallow = torch.zeros(n_actions)\n self.temperature = temperature\n\n def forward(self, state):\n fts = self.features(state)\n z = self.mapping(fts).squeeze().data\n #print('pol', z.numpy(), util.argmin(z, state.actions), state.actions)\n return util.argmin(z, state.actions)\n\n def stochastic(self, state):\n z = self.mapping(self.features(state)).squeeze()\n if len(state.actions) != self.n_actions:\n self.disallow.zero_()\n self.disallow += 1e10\n for a in state.actions:\n self.disallow[a] = 0.\n z += Varng(self.disallow)\n p = F.softmax(-z / self.temperature, dim=0)\n return util.sample_from_probs(p)\n\ndef truth_to_vec(truth, tmp_vec):\n if isinstance(truth, torch.FloatTensor):\n return truth\n if isinstance(truth, int) or isinstance(truth, np.int32) or isinstance(truth, np.int64):\n tmp_vec.zero_()\n tmp_vec += 1\n tmp_vec[truth] = 0\n return tmp_vec\n if isinstance(truth, list) or isinstance(truth, set):\n tmp_vec.zero_()\n tmp_vec += 1\n for t in truth:\n tmp_vec[t] = 0\n return tmp_vec\n raise ValueError('invalid argument type for \"truth\", must be in, list or set; got \"%s\"' % type(truth))\n \nclass CSOAAPolicy(SoftmaxPolicy, CostSensitivePolicy):\n def __init__(self, features, n_actions, loss_fn='huber', temperature=1.0):\n SoftmaxPolicy.__init__(self, features, n_actions, temperature)\n self.set_loss(loss_fn)\n\n def set_loss(self, loss_fn):\n assert loss_fn in ['squared', 'huber']\n self.loss_fn = nn.MSELoss(size_average=False) if loss_fn == 'squared' else \\\n nn.SmoothL1Loss(size_average=False) if loss_fn == 'huber' else \\\n None\n \n def predict_costs(self, state):\n return self.mapping(self.features(state)).squeeze()\n\n def _compute_loss(self, loss_fn, pred_costs, truth, state_actions):\n if len(state_actions) == self.n_actions:\n return loss_fn(pred_costs, Varng(truth))\n return sum((loss_fn(pred_costs[a], Varng(torch.zeros(1) + truth[a])) \\\n for a in state_actions))\n \n def _update(self, pred_costs, truth, actions=None):\n truth = truth_to_vec(truth, torch.zeros(self.n_actions))\n #print('update', truth.numpy(), pred_costs.data.numpy(), actions)\n return self._compute_loss(self.loss_fn, pred_costs, truth, actions)\n\nclass WMCPolicy(CSOAAPolicy):\n def __init__(self, features, n_actions, loss_fn='hinge', temperature=1.0):\n CSOAAPolicy.__init__(self, features, n_actions, loss_fn, temperature)\n \n def set_loss(self, loss_fn):\n assert loss_fn in ['multinomial', 'hinge', 'squared', 'huber']\n if loss_fn == 'hinge':\n l = nn.MultiMarginLoss(size_average=False)\n self.loss_fn = lambda p, t, _: l(p, Varng(torch.LongTensor([t])))\n elif loss_fn == 'multinomial':\n l = nn.NLLLoss(size_average=False)\n self.loss_fn = lambda p, t, _: l(F.log_softmax(p.unsqueeze(0), dim=1), Varng(torch.LongTensor([t])))\n elif loss_fn in ['squared', 'huber']:\n l = (nn.MSELoss if loss_fn == 'squared' else nn.SmoothL1Loss)(size_average=False)\n self.loss_fn = lambda p, t, sa: self._compute_loss(l, p, 1 - truth_to_vec(t, torch.zeros(self.n_actions)), sa)\n \n def _update(self, pred_costs, truth, actions=None):\n pred_costs = -pred_costs\n if isinstance(truth, int): truth = [truth]\n if isinstance(truth, list) or isinstance(truth, set):\n return sum((self.loss_fn(pred_costs, a, actions) for a in truth))\n\n assert isinstance(truth, torch.FloatTensor)\n \n if len(actions) == 1:\n a = list(actions)[0]\n return self.loss_fn(pred_costs, a, actions)\n\n full_actions = actions is None or len(actions) == self.n_actions\n truth_sum = truth.sum() if full_actions else sum((truth[a] for a in actions))\n w = truth_sum / (len(actions)-1) - truth\n w -= w.min()\n return sum((w[a] * self.loss_fn(pred_costs, a, actions) \\\n for a in actions \\\n if w[a] > 1e-6))\n \n" ]
[ [ "torch.cat" ], [ "torch.nn.SmoothL1Loss", "torch.nn.functional.softmax", "torch.nn.NLLLoss", "torch.LongTensor", "torch.zeros", "torch.nn.Linear", "torch.nn.MSELoss", "torch.nn.MultiMarginLoss" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
FrancisLiang/models-1
[ "e14d5bc1ab36d0dd11977f27cff54605bf99c945", "e14d5bc1ab36d0dd11977f27cff54605bf99c945", "e14d5bc1ab36d0dd11977f27cff54605bf99c945", "e14d5bc1ab36d0dd11977f27cff54605bf99c945", "e14d5bc1ab36d0dd11977f27cff54605bf99c945", "e14d5bc1ab36d0dd11977f27cff54605bf99c945", "e14d5bc1ab36d0dd11977f27cff54605bf99c945" ]
[ "PaddleNLP/emotion_detection/run_classifier.py", "PaddleCV/ocr_recognition/infer.py", "PaddleNLP/unarchived/sequence_tagging_for_ner/utils_extend.py", "PaddleCV/deeplabv3+/reader.py", "PaddleRec/ssr/train.py", "PaddleCV/PaddleDetection/ppdet/data/transform/shared_queue/sharedmemory.py", "PaddleRec/gru4rec/utils.py" ]
[ "\"\"\"\nEmotion Detection Task\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport os\nimport time\nimport argparse\nimport multiprocessing\nimport sys\nsys.path.append(\"../\")\n\nimport paddle\nimport paddle.fluid as fluid\nimport numpy as np\n\nfrom models.classification import nets\nimport reader\nimport config\nimport utils\n\nparser = argparse.ArgumentParser(__doc__)\nmodel_g = utils.ArgumentGroup(parser, \"model\", \"model configuration and paths.\")\nmodel_g.add_arg(\"config_path\", str, None, \"Path to the json file for EmoTect model config.\")\nmodel_g.add_arg(\"init_checkpoint\", str, None, \"Init checkpoint to resume training from.\")\nmodel_g.add_arg(\"output_dir\", str, None, \"Directory path to save checkpoints\")\n\ntrain_g = utils.ArgumentGroup(parser, \"training\", \"training options.\")\ntrain_g.add_arg(\"epoch\", int, 10, \"Number of epoches for training.\")\ntrain_g.add_arg(\"save_steps\", int, 10000, \"The steps interval to save checkpoints.\")\ntrain_g.add_arg(\"validation_steps\", int, 1000, \"The steps interval to evaluate model performance.\")\ntrain_g.add_arg(\"lr\", float, 0.002, \"The Learning rate value for training.\")\n\nlog_g = utils.ArgumentGroup(parser, \"logging\", \"logging related\")\nlog_g.add_arg(\"skip_steps\", int, 10, \"The steps interval to print loss.\")\nlog_g.add_arg(\"verbose\", bool, False, \"Whether to output verbose log\")\n\ndata_g = utils.ArgumentGroup(parser, \"data\", \"Data paths, vocab paths and data processing options\")\ndata_g.add_arg(\"data_dir\", str, None, \"Directory path to training data.\")\ndata_g.add_arg(\"vocab_path\", str, None, \"Vocabulary path.\")\ndata_g.add_arg(\"batch_size\", int, 256, \"Total examples' number in batch for training.\")\ndata_g.add_arg(\"random_seed\", int, 0, \"Random seed.\")\n\nrun_type_g = utils.ArgumentGroup(parser, \"run_type\", \"running type options.\")\nrun_type_g.add_arg(\"use_cuda\", bool, False, \"If set, use GPU for training.\")\nrun_type_g.add_arg(\"task_name\", str, None, \"The name of task to perform sentiment classification.\")\nrun_type_g.add_arg(\"do_train\", bool, False, \"Whether to perform training.\")\nrun_type_g.add_arg(\"do_val\", bool, False, \"Whether to perform evaluation.\")\nrun_type_g.add_arg(\"do_infer\", bool, False, \"Whether to perform inference.\")\n\nparser.add_argument('--enable_ce', action='store_true', help='If set, run the task with continuous evaluation logs.')\n\nargs = parser.parse_args()\n\ndef create_model(args,\n pyreader_name,\n emotect_config,\n num_labels,\n is_infer=False):\n \"\"\"\n Create Model for sentiment classification\n \"\"\"\n if is_infer:\n pyreader = fluid.layers.py_reader(\n capacity=16,\n shapes=[[-1, 1]],\n dtypes=['int64'],\n lod_levels=[1],\n name=pyreader_name,\n use_double_buffer=False)\n else:\n pyreader = fluid.layers.py_reader(\n capacity=16,\n shapes=([-1, 1], [-1, 1]),\n dtypes=('int64', 'int64'),\n lod_levels=(1, 0),\n name=pyreader_name,\n use_double_buffer=False)\n\n if emotect_config['model_type'] == \"cnn_net\":\n network = nets.cnn_net\n elif emotect_config['model_type'] == \"bow_net\":\n network = nets.bow_net\n elif emotect_config['model_type'] == \"lstm_net\":\n network = nets.lstm_net\n elif emotect_config['model_type'] == \"bilstm_net\":\n network = nets.bilstm_net\n elif emotect_config['model_type'] == \"gru_net\":\n network = nets.gru_net\n elif emotect_config['model_type'] == \"textcnn_net\":\n network = nets.textcnn_net\n else:\n raise ValueError(\"Unknown network type!\")\n\n if is_infer:\n data = fluid.layers.read_file(pyreader)\n probs = network(data, None, emotect_config[\"vocab_size\"], class_dim=num_labels, is_infer=True)\n return pyreader, probs\n\n data, label = fluid.layers.read_file(pyreader)\n avg_loss, probs = network(data, label, emotect_config[\"vocab_size\"], class_dim=num_labels)\n num_seqs = fluid.layers.create_tensor(dtype='int64')\n accuracy = fluid.layers.accuracy(input=probs, label=label, total=num_seqs)\n return pyreader, avg_loss, accuracy, num_seqs\n\n\ndef evaluate(exe, test_program, test_pyreader, fetch_list, eval_phase):\n \"\"\"\n Evaluation Function\n \"\"\"\n test_pyreader.start()\n total_cost, total_acc, total_num_seqs = [], [], []\n time_begin = time.time()\n while True:\n try:\n np_loss, np_acc, np_num_seqs = exe.run(program=test_program,\n fetch_list=fetch_list,\n return_numpy=False)\n np_loss = np.array(np_loss)\n np_acc = np.array(np_acc)\n np_num_seqs = np.array(np_num_seqs)\n total_cost.extend(np_loss * np_num_seqs)\n total_acc.extend(np_acc * np_num_seqs)\n total_num_seqs.extend(np_num_seqs)\n except fluid.core.EOFException:\n test_pyreader.reset()\n break\n time_end = time.time()\n print(\"[%s evaluation] avg loss: %f, avg acc: %f, elapsed time: %f s\" %\n (eval_phase, np.sum(total_cost) / np.sum(total_num_seqs),\n np.sum(total_acc) / np.sum(total_num_seqs), time_end - time_begin))\n\n\ndef infer(exe, infer_program, infer_pyreader, fetch_list, infer_phase):\n infer_pyreader.start()\n time_begin = time.time()\n while True:\n try:\n batch_probs = exe.run(program=infer_program,\n fetch_list=fetch_list,\n return_numpy=True)\n for probs in batch_probs[0]:\n print(\"%d\\t%f\\t%f\\t%f\" % (np.argmax(probs), probs[0], probs[1], probs[2]))\n except fluid.core.EOFException as e:\n infer_pyreader.reset()\n break\n time_end = time.time()\n print(\"[%s] elapsed time: %f s\" % (infer_phase, time_end - time_begin))\n\n\ndef main(args):\n \"\"\"\n Main Function\n \"\"\"\n emotect_config = config.EmoTectConfig(args.config_path)\n\n if args.use_cuda:\n place = fluid.CUDAPlace(int(os.getenv('FLAGS_selected_gpus', '0')))\n else:\n place = fluid.CPUPlace()\n exe = fluid.Executor(place)\n\n task_name = args.task_name.lower()\n processor = reader.EmoTectProcessor(data_dir=args.data_dir,\n vocab_path=args.vocab_path,\n random_seed=args.random_seed)\n num_labels = len(processor.get_labels())\n\n if not (args.do_train or args.do_val or args.do_infer):\n raise ValueError(\"For args `do_train`, `do_val` and `do_infer`, at \"\n \"least one of them must be True.\")\n\n startup_prog = fluid.Program()\n if args.random_seed is not None:\n startup_prog.random_seed = args.random_seed\n\n if args.do_train:\n train_data_generator = processor.data_generator(\n batch_size=args.batch_size,\n phase='train',\n epoch=args.epoch)\n\n num_train_examples = processor.get_num_examples(phase=\"train\")\n max_train_steps = args.epoch * num_train_examples // args.batch_size + 1\n\n print(\"Num train examples: %d\" % num_train_examples)\n print(\"Max train steps: %d\" % max_train_steps)\n\n train_program = fluid.Program()\n if args.random_seed is not None:\n train_program.random_seed = args.random_seed\n\n with fluid.program_guard(train_program, startup_prog):\n with fluid.unique_name.guard():\n train_pyreader, loss, accuracy, num_seqs = create_model(\n args,\n pyreader_name='train_reader',\n emotect_config=emotect_config,\n num_labels=num_labels,\n is_infer=False)\n\n sgd_optimizer = fluid.optimizer.Adagrad(learning_rate=args.lr)\n sgd_optimizer.minimize(loss)\n\n if args.verbose:\n lower_mem, upper_mem, unit = fluid.contrib.memory_usage(\n program=train_program, batch_size=args.batch_size)\n print(\"Theoretical memory usage in training: %.3f - %.3f %s\" %\n (lower_mem, upper_mem, unit))\n\n if args.do_val:\n test_prog = fluid.Program()\n with fluid.program_guard(test_prog, startup_prog):\n with fluid.unique_name.guard():\n test_pyreader, loss, accuracy, num_seqs = create_model(\n args,\n pyreader_name='test_reader',\n emotect_config=emotect_config,\n num_labels=num_labels,\n is_infer=False)\n test_prog = test_prog.clone(for_test=True)\n\n if args.do_infer:\n test_prog = fluid.Program()\n with fluid.program_guard(test_prog, startup_prog):\n with fluid.unique_name.guard():\n infer_pyreader, probs = create_model(\n args,\n pyreader_name='infer_reader',\n emotect_config=emotect_config,\n num_labels=num_labels,\n is_infer=True)\n test_prog = test_prog.clone(for_test=True)\n\n exe.run(startup_prog)\n\n if args.do_train:\n if args.init_checkpoint:\n utils.init_checkpoint(\n exe,\n args.init_checkpoint,\n main_program=startup_prog)\n elif args.do_val or args.do_infer:\n if not args.init_checkpoint:\n raise ValueError(\"args 'init_checkpoint' should be set if\"\n \"only doing validation or infer!\")\n utils.init_checkpoint(\n exe,\n args.init_checkpoint,\n main_program=test_prog)\n\n if args.do_train:\n train_exe = exe\n train_pyreader.decorate_paddle_reader(train_data_generator)\n else:\n train_exe = None\n if args.do_val or args.do_infer:\n test_exe = exe\n\n if args.do_train:\n train_pyreader.start()\n steps = 0\n total_cost, total_acc, total_num_seqs = [], [], []\n time_begin = time.time()\n ce_info = []\n while True:\n try:\n steps += 1\n if steps % args.skip_steps == 0:\n fetch_list = [loss.name, accuracy.name, num_seqs.name]\n else:\n fetch_list = []\n\n outputs = train_exe.run(program=train_program,\n fetch_list=fetch_list,\n return_numpy=False)\n if steps % args.skip_steps == 0:\n np_loss, np_acc, np_num_seqs = outputs\n np_loss = np.array(np_loss)\n np_acc = np.array(np_acc)\n np_num_seqs = np.array(np_num_seqs)\n total_cost.extend(np_loss * np_num_seqs)\n total_acc.extend(np_acc * np_num_seqs)\n total_num_seqs.extend(np_num_seqs)\n\n if args.verbose:\n verbose = \"train pyreader queue size: %d, \" % train_pyreader.queue.size()\n print(verbose)\n\n time_end = time.time()\n used_time = time_end - time_begin\n print(\"step: %d, avg loss: %f, \"\n \"avg acc: %f, speed: %f steps/s\" %\n (steps, np.sum(total_cost) / np.sum(total_num_seqs),\n np.sum(total_acc) / np.sum(total_num_seqs),\n args.skip_steps / used_time))\n ce_info.append([np.sum(total_cost) / np.sum(total_num_seqs), np.sum(total_acc) / np.sum(total_num_seqs), used_time])\n total_cost, total_acc, total_num_seqs = [], [], []\n time_begin = time.time()\n\n if steps % args.save_steps == 0:\n save_path = os.path.join(args.output_dir, \"step_\" + str(steps))\n fluid.io.save_persistables(exe, save_path, train_program)\n\n if steps % args.validation_steps == 0:\n # evaluate on dev set\n if args.do_val:\n test_pyreader.decorate_paddle_reader(\n processor.data_generator(\n batch_size=args.batch_size,\n phase='dev',\n epoch=1))\n evaluate(test_exe, test_prog, test_pyreader,\n [loss.name, accuracy.name, num_seqs.name],\n \"dev\")\n\n except fluid.core.EOFException:\n save_path = os.path.join(args.output_dir, \"step_\" + str(steps))\n fluid.io.save_persistables(exe, save_path, train_program)\n train_pyreader.reset()\n break\n\n if args.do_train and args.enable_ce:\n card_num = get_cards()\n ce_loss = 0\n ce_acc = 0\n ce_time = 0\n try:\n ce_loss = ce_info[-2][0]\n ce_acc = ce_info[-2][1]\n ce_time = ce_info[-2][2]\n except:\n print(\"ce info error\")\n print(\"kpis\\teach_step_duration_%s_card%s\\t%s\" %\n (task_name, card_num, ce_time))\n print(\"kpis\\ttrain_loss_%s_card%s\\t%f\" %\n (task_name, card_num, ce_loss))\n print(\"kpis\\ttrain_acc_%s_card%s\\t%f\" %\n (task_name, card_num, ce_acc))\n\n # evaluate on test set\n if not args.do_train and args.do_val:\n test_pyreader.decorate_paddle_reader(\n processor.data_generator(\n batch_size=args.batch_size,\n phase='test',\n epoch=1))\n print(\"Final test result:\")\n evaluate(test_exe, test_prog, test_pyreader,\n [loss.name, accuracy.name, num_seqs.name],\n \"test\")\n\n # infer\n if args.do_infer:\n infer_pyreader.decorate_paddle_reader(\n processor.data_generator(\n batch_size=args.batch_size,\n phase='infer',\n epoch=1))\n infer(test_exe, test_prog, infer_pyreader,\n [probs.name], \"infer\")\n\n\ndef get_cards():\n num = 0\n cards = os.environ.get('CUDA_VISIBLE_DEVICES', '')\n if cards != '':\n num = len(cards.split(\",\"))\n return num\n\n\nif __name__ == \"__main__\":\n utils.print_arguments(args)\n main(args)\n", "from __future__ import print_function\nimport paddle.fluid as fluid\nfrom utility import add_arguments, print_arguments, to_lodtensor, get_ctc_feeder_data, get_attention_feeder_for_infer, get_ctc_feeder_for_infer\nimport paddle.fluid.profiler as profiler\nfrom crnn_ctc_model import ctc_infer\nfrom attention_model import attention_infer\nimport numpy as np\nimport data_reader\nimport argparse\nimport functools\nimport os\nimport time\n\nparser = argparse.ArgumentParser(description=__doc__)\nadd_arg = functools.partial(add_arguments, argparser=parser)\n# yapf: disable\nadd_arg('model', str, \"crnn_ctc\", \"Which type of network to be used. 'crnn_ctc' or 'attention'\")\nadd_arg('model_path', str, None, \"The model path to be used for inference.\")\nadd_arg('input_images_dir', str, None, \"The directory of images.\")\nadd_arg('input_images_list', str, None, \"The list file of images.\")\nadd_arg('dict', str, None, \"The dictionary. The result of inference will be index sequence if the dictionary was None.\")\nadd_arg('use_gpu', bool, True, \"Whether use GPU to infer.\")\nadd_arg('iterations', int, 0, \"The number of iterations. Zero or less means whole test set. More than 0 means the test set might be looped until # of iterations is reached.\")\nadd_arg('profile', bool, False, \"Whether to use profiling.\")\nadd_arg('skip_batch_num', int, 0, \"The number of first minibatches to skip as warm-up for better performance test.\")\nadd_arg('batch_size', int, 1, \"The minibatch size.\")\n# yapf: enable\n\n\ndef inference(args):\n \"\"\"OCR inference\"\"\"\n if args.model == \"crnn_ctc\":\n infer = ctc_infer\n get_feeder_data = get_ctc_feeder_for_infer\n else:\n infer = attention_infer\n get_feeder_data = get_attention_feeder_for_infer\n eos = 1\n sos = 0\n num_classes = data_reader.num_classes()\n data_shape = data_reader.data_shape()\n # define network\n images = fluid.layers.data(name='pixel', shape=data_shape, dtype='float32')\n ids = infer(images, num_classes, use_cudnn=True if args.use_gpu else False)\n # data reader\n infer_reader = data_reader.inference(\n batch_size=args.batch_size,\n infer_images_dir=args.input_images_dir,\n infer_list_file=args.input_images_list,\n cycle=True if args.iterations > 0 else False,\n model=args.model)\n # prepare environment\n place = fluid.CPUPlace()\n if args.use_gpu:\n place = fluid.CUDAPlace(0)\n\n exe = fluid.Executor(place)\n exe.run(fluid.default_startup_program())\n\n # load dictionary\n dict_map = None\n if args.dict is not None and os.path.isfile(args.dict):\n dict_map = {}\n with open(args.dict) as dict_file:\n for i, word in enumerate(dict_file):\n dict_map[i] = word.strip()\n print(\"Loaded dict from %s\" % args.dict)\n\n # load init model\n model_dir = args.model_path\n model_file_name = None\n if not os.path.isdir(args.model_path):\n model_dir = os.path.dirname(args.model_path)\n model_file_name = os.path.basename(args.model_path)\n fluid.io.load_params(exe, dirname=model_dir, filename=model_file_name)\n print(\"Init model from: %s.\" % args.model_path)\n\n batch_times = []\n iters = 0\n for data in infer_reader():\n feed_dict = get_feeder_data(data, place)\n if args.iterations > 0 and iters == args.iterations + args.skip_batch_num:\n break\n if iters < args.skip_batch_num:\n print(\"Warm-up itaration\")\n if iters == args.skip_batch_num:\n profiler.reset_profiler()\n\n start = time.time()\n result = exe.run(fluid.default_main_program(),\n feed=feed_dict,\n fetch_list=[ids],\n return_numpy=False)\n indexes = prune(np.array(result[0]).flatten(), 0, 1)\n batch_time = time.time() - start\n fps = args.batch_size / batch_time\n batch_times.append(batch_time)\n if dict_map is not None:\n print(\"Iteration %d, latency: %.5f s, fps: %f, result: %s\" % (\n iters,\n batch_time,\n fps,\n [dict_map[index] for index in indexes], ))\n else:\n print(\"Iteration %d, latency: %.5f s, fps: %f, result: %s\" % (\n iters,\n batch_time,\n fps,\n indexes, ))\n\n iters += 1\n\n latencies = batch_times[args.skip_batch_num:]\n latency_avg = np.average(latencies)\n latency_pc99 = np.percentile(latencies, 99)\n fpses = np.divide(args.batch_size, latencies)\n fps_avg = np.average(fpses)\n fps_pc99 = np.percentile(fpses, 1)\n\n # Benchmark output\n print('\\nTotal examples (incl. warm-up): %d' % (iters * args.batch_size))\n print('average latency: %.5f s, 99pc latency: %.5f s' % (latency_avg,\n latency_pc99))\n print('average fps: %.5f, fps for 99pc latency: %.5f' % (fps_avg, fps_pc99))\n\n\ndef prune(words, sos, eos):\n \"\"\"Remove unused tokens in prediction result.\"\"\"\n start_index = 0\n end_index = len(words)\n if sos in words:\n start_index = np.where(words == sos)[0][0] + 1\n if eos in words:\n end_index = np.where(words == eos)[0][0]\n return words[start_index:end_index]\n\n\ndef main():\n args = parser.parse_args()\n print_arguments(args)\n if args.profile:\n if args.use_gpu:\n with profiler.cuda_profiler(\"cuda_profiler.txt\", 'csv') as nvprof:\n inference(args)\n else:\n with profiler.profiler(\"CPU\", sorted_key='total') as cpuprof:\n inference(args)\n else:\n inference(args)\n\n\nif __name__ == \"__main__\":\n main()\n", "import numpy as np\n\nimport paddle.fluid as fluid\n\n\ndef get_embedding(emb_file='data/wordVectors.txt'):\n \"\"\"\n Get the trained word vector.\n \"\"\"\n return np.loadtxt(emb_file, dtype='float32')\n\n\ndef to_lodtensor(data, place):\n \"\"\"\n convert data to lodtensor\n \"\"\"\n seq_lens = [len(seq) for seq in data]\n cur_len = 0\n lod = [cur_len]\n for l in seq_lens:\n cur_len += l\n lod.append(cur_len)\n flattened_data = np.concatenate(data, axis=0).astype(\"int64\")\n flattened_data = flattened_data.reshape([len(flattened_data), 1])\n res = fluid.LoDTensor()\n res.set(flattened_data, place)\n res.set_lod([lod])\n return res\n", "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nimport cv2\nimport numpy as np\nimport os\nimport six\nimport time\nfrom data_utils import GeneratorEnqueuer\n\ndefault_config = {\n \"shuffle\": True,\n \"min_resize\": 0.5,\n \"max_resize\": 4,\n \"crop_size\": 769,\n}\n\n\ndef slice_with_pad(a, s, value=0):\n pads = []\n slices = []\n for i in range(len(a.shape)):\n if i >= len(s):\n pads.append([0, 0])\n slices.append([0, a.shape[i]])\n else:\n l, r = s[i]\n if l < 0:\n pl = -l\n l = 0\n else:\n pl = 0\n if r > a.shape[i]:\n pr = r - a.shape[i]\n r = a.shape[i]\n else:\n pr = 0\n pads.append([pl, pr])\n slices.append([l, r])\n slices = list(map(lambda x: slice(x[0], x[1], 1), slices))\n a = a[slices]\n a = np.pad(a, pad_width=pads, mode='constant', constant_values=value)\n return a\n\n\nclass CityscapeDataset:\n def __init__(self, dataset_dir, subset='train', config=default_config):\n label_dirname = os.path.join(dataset_dir, 'gtFine/' + subset)\n if six.PY2:\n import commands\n label_files = commands.getoutput(\n \"find %s -type f | grep labelTrainIds | sort\" %\n label_dirname).splitlines()\n else:\n import subprocess\n label_files = subprocess.getstatusoutput(\n \"find %s -type f | grep labelTrainIds | sort\" %\n label_dirname)[-1].splitlines()\n self.label_files = label_files\n self.label_dirname = label_dirname\n self.index = 0\n self.subset = subset\n self.dataset_dir = dataset_dir\n self.config = config\n self.reset()\n print(\"total number\", len(label_files))\n\n def reset(self, shuffle=False):\n self.index = 0\n if self.config[\"shuffle\"]:\n np.random.shuffle(self.label_files)\n\n def next_img(self):\n self.index += 1\n if self.index >= len(self.label_files):\n self.reset()\n\n def get_img(self):\n shape = self.config[\"crop_size\"]\n while True:\n ln = self.label_files[self.index]\n img_name = os.path.join(\n self.dataset_dir,\n 'leftImg8bit/' + self.subset + ln[len(self.label_dirname):])\n img_name = img_name.replace('gtFine_labelTrainIds', 'leftImg8bit')\n label = cv2.imread(ln)\n img = cv2.imread(img_name)\n if img is None:\n print(\"load img failed:\", img_name)\n self.next_img()\n else:\n break\n if shape == -1:\n return img, label, ln\n\n if np.random.rand() > 0.5:\n range_l = 1\n range_r = self.config['max_resize']\n else:\n range_l = self.config['min_resize']\n range_r = 1\n\n if np.random.rand() > 0.5:\n assert len(img.shape) == 3 and len(\n label.shape) == 3, \"{} {}\".format(img.shape, label.shape)\n img = img[:, :, ::-1]\n label = label[:, :, ::-1]\n\n random_scale = np.random.rand(1) * (range_r - range_l) + range_l\n crop_size = int(shape / random_scale)\n bb = crop_size // 2\n\n def _randint(low, high):\n return int(np.random.rand(1) * (high - low) + low)\n\n offset_x = np.random.randint(bb, max(bb + 1, img.shape[0] -\n bb)) - crop_size // 2\n offset_y = np.random.randint(bb, max(bb + 1, img.shape[1] -\n bb)) - crop_size // 2\n img_crop = slice_with_pad(img, [[offset_x, offset_x + crop_size],\n [offset_y, offset_y + crop_size]], 128)\n img = cv2.resize(img_crop, (shape, shape))\n label_crop = slice_with_pad(label, [[offset_x, offset_x + crop_size],\n [offset_y, offset_y + crop_size]],\n 255)\n label = cv2.resize(\n label_crop, (shape, shape), interpolation=cv2.INTER_NEAREST)\n return img, label, ln + str(\n (offset_x, offset_y, crop_size, random_scale))\n\n def get_batch(self, batch_size=1):\n imgs = []\n labels = []\n names = []\n while len(imgs) < batch_size:\n img, label, ln = self.get_img()\n imgs.append(img)\n labels.append(label)\n names.append(ln)\n self.next_img()\n return np.array(imgs), np.array(labels), names\n\n def get_batch_generator(self,\n batch_size,\n total_step,\n num_workers=8,\n max_queue=32,\n use_multiprocessing=True):\n def do_get_batch():\n iter_id = 0\n while True:\n imgs, labels, names = self.get_batch(batch_size)\n labels = labels.astype(np.int32)[:, :, :, 0]\n imgs = imgs[:, :, :, ::-1].transpose(\n 0, 3, 1, 2).astype(np.float32) / (255.0 / 2) - 1\n yield imgs, labels, names\n if not use_multiprocessing:\n iter_id += 1\n if iter_id >= total_step:\n break\n\n batches = do_get_batch()\n if not use_multiprocessing:\n try:\n from prefetch_generator import BackgroundGenerator\n batches = BackgroundGenerator(batches, 100)\n except:\n print(\n \"You can install 'prefetch_generator' for acceleration of data reading.\"\n )\n return batches\n\n def reader():\n try:\n enqueuer = GeneratorEnqueuer(\n batches, use_multiprocessing=use_multiprocessing)\n enqueuer.start(max_queue_size=max_queue, workers=num_workers)\n generator_out = None\n for i in range(total_step):\n while enqueuer.is_running():\n if not enqueuer.queue.empty():\n generator_out = enqueuer.queue.get()\n break\n else:\n time.sleep(0.02)\n yield generator_out\n generator_out = None\n enqueuer.stop()\n finally:\n if enqueuer is not None:\n enqueuer.stop()\n\n data_gen = reader()\n return data_gen\n", "#Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport os\nimport sys\nimport time\nimport argparse\nimport logging\nimport paddle.fluid as fluid\nimport paddle\nimport utils\nimport numpy as np\nfrom nets import SequenceSemanticRetrieval\n\nlogging.basicConfig(format=\"%(asctime)s - %(levelname)s - %(message)s\")\nlogger = logging.getLogger(\"fluid\")\nlogger.setLevel(logging.INFO)\n\n\ndef parse_args():\n parser = argparse.ArgumentParser(\"sequence semantic retrieval\")\n parser.add_argument(\n \"--train_dir\", type=str, default='train_data', help=\"Training file\")\n parser.add_argument(\n \"--base_lr\", type=float, default=0.01, help=\"learning rate\")\n parser.add_argument(\n '--vocab_path', type=str, default='vocab.txt', help='vocab file')\n parser.add_argument(\n \"--epochs\", type=int, default=10, help=\"Number of epochs\")\n parser.add_argument(\n '--parallel', type=int, default=0, help='whether parallel')\n parser.add_argument(\n '--use_cuda', type=int, default=0, help='whether use gpu')\n parser.add_argument(\n '--print_batch', type=int, default=10, help='num of print batch')\n parser.add_argument(\n '--model_dir', type=str, default='model_output', help='model dir')\n parser.add_argument(\n \"--hidden_size\", type=int, default=128, help=\"hidden size\")\n parser.add_argument(\n \"--batch_size\", type=int, default=50, help=\"number of batch\")\n parser.add_argument(\n \"--embedding_dim\", type=int, default=128, help=\"embedding dim\")\n parser.add_argument(\n '--num_devices', type=int, default=1, help='Number of GPU devices')\n parser.add_argument(\n '--step_num', type=int, default=1000, help='Number of steps')\n parser.add_argument(\n '--enable_ce',\n action='store_true',\n help='If set, run the task with continuous evaluation logs.')\n return parser.parse_args()\n\n\ndef get_cards(args):\n return args.num_devices\n\n\ndef train(args):\n if args.enable_ce:\n SEED = 102\n fluid.default_startup_program().random_seed = SEED \n fluid.default_main_program().random_seed = SEED \n use_cuda = True if args.use_cuda else False\n parallel = True if args.parallel else False\n print(\"use_cuda:\", use_cuda, \"parallel:\", parallel)\n train_reader, vocab_size = utils.construct_train_data(\n args.train_dir, args.vocab_path, args.batch_size * get_cards(args))\n place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()\n ssr = SequenceSemanticRetrieval(vocab_size, args.embedding_dim,\n args.hidden_size)\n # Train program\n train_input_data, cos_pos, avg_cost, acc = ssr.train()\n\n # Optimization to minimize lost\n optimizer = fluid.optimizer.Adagrad(learning_rate=args.base_lr)\n optimizer.minimize(avg_cost)\n\n data_list = [var.name for var in train_input_data]\n feeder = fluid.DataFeeder(feed_list=data_list, place=place)\n exe = fluid.Executor(place)\n exe.run(fluid.default_startup_program())\n if parallel:\n train_exe = fluid.ParallelExecutor(\n use_cuda=use_cuda, loss_name=avg_cost.name)\n else:\n train_exe = exe\n\n total_time = 0.0\n ce_info = []\n for pass_id in range(args.epochs):\n epoch_idx = pass_id + 1\n print(\"epoch_%d start\" % epoch_idx)\n t0 = time.time()\n i = 0\n for batch_id, data in enumerate(train_reader()):\n i += 1\n loss_val, correct_val = train_exe.run(\n feed=feeder.feed(data), fetch_list=[avg_cost.name, acc.name])\n ce_info.append(float(np.mean(correct_val)) / args.batch_size)\n if i % args.print_batch == 0:\n logger.info(\n \"Train --> pass: {} batch_id: {} avg_cost: {}, acc: {}\".\n format(pass_id, batch_id,\n np.mean(loss_val),\n float(np.mean(correct_val)) / args.batch_size))\n if args.enable_ce and i > args.step_num:\n break\n t1 = time.time()\n total_time += t1 - t0\n print(\"epoch:%d num_steps:%d time_cost(s):%f\" %\n (epoch_idx, i, total_time / epoch_idx))\n save_dir = \"%s/epoch_%d\" % (args.model_dir, epoch_idx)\n fluid.io.save_params(executor=exe, dirname=save_dir)\n print(\"model saved in %s\" % save_dir)\n\n # only for ce\n if args.enable_ce:\n ce_acc = 0\n try:\n ce_acc = ce_info[-2]\n except:\n print(\"ce info error\")\n epoch_idx = args.epochs\n device = get_device(args)\n if args.use_cuda:\n gpu_num = device[1]\n print(\"kpis\\teach_pass_duration_gpu%s\\t%s\" %\n (gpu_num, total_time / epoch_idx))\n print(\"kpis\\ttrain_acc_gpu%s\\t%s\" %\n (gpu_num, ce_acc))\n else:\n cpu_num = device[1]\n threads_num = device[2]\n print(\"kpis\\teach_pass_duration_cpu%s_thread%s\\t%s\" %\n (cpu_num, threads_num, total_time / epoch_idx))\n print(\"kpis\\ttrain_acc_cpu%s_thread%s\\t%s\" %\n (cpu_num, threads_num, ce_acc))\n \n\ndef get_device(args):\n if args.use_cuda:\n gpus = os.environ.get(\"CUDA_VISIBLE_DEVICES\", 1)\n gpu_num = len(gpus.split(','))\n return \"gpu\", gpu_num\n else:\n threads_num = os.environ.get('NUM_THREADS', 1)\n cpu_num = os.environ.get('CPU_NUM', 1)\n return \"cpu\", int(cpu_num), int(threads_num)\n \n\ndef main():\n args = parse_args()\n train(args)\n\n\nif __name__ == \"__main__\":\n main()\n", "# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# utils for memory management which is allocated on sharedmemory,\n# note that these structures may not be thread-safe\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport os\nimport time\nimport math\nimport struct\nimport sys\nimport six\n\nif six.PY3:\n import pickle\nelse:\n import cPickle as pickle\n\nimport json\nimport uuid\nimport random\nimport numpy as np\nimport weakref\nimport logging\nfrom multiprocessing import Lock\nfrom multiprocessing import RawArray\n\nlogger = logging.getLogger(__name__)\n\n\nclass SharedMemoryError(ValueError):\n \"\"\" SharedMemoryError\n \"\"\"\n pass\n\n\nclass SharedBufferError(SharedMemoryError):\n \"\"\" SharedBufferError\n \"\"\"\n pass\n\n\nclass MemoryFullError(SharedMemoryError):\n \"\"\" MemoryFullError\n \"\"\"\n\n def __init__(self, errmsg=''):\n super(MemoryFullError, self).__init__()\n self.errmsg = errmsg\n\n\ndef memcopy(dst, src, offset=0, length=None):\n \"\"\" copy data from 'src' to 'dst' in bytes\n \"\"\"\n length = length if length is not None else len(src)\n assert type(dst) == np.ndarray, 'invalid type for \"dst\" in memcopy'\n if type(src) is not np.ndarray:\n if type(src) is str and six.PY3:\n src = src.encode()\n src = np.frombuffer(src, dtype='uint8', count=len(src))\n\n dst[:] = src[offset:offset + length]\n\n\nclass SharedBuffer(object):\n \"\"\" Buffer allocated from SharedMemoryMgr, and it stores data on shared memory\n\n note that: \n every instance of this should be freed explicitely by calling 'self.free'\n \"\"\"\n\n def __init__(self, owner, capacity, pos, size=0, alloc_status=''):\n \"\"\" Init\n\n Args:\n owner (str): manager to own this buffer\n capacity (int): capacity in bytes for this buffer\n pos (int): page position in shared memory\n size (int): bytes already used\n alloc_status (str): debug info about allocator when allocate this\n \"\"\"\n self._owner = owner\n self._cap = capacity\n self._pos = pos\n self._size = size\n self._alloc_status = alloc_status\n assert self._pos >= 0 and self._cap > 0, \\\n \"invalid params[%d:%d] to construct SharedBuffer\" \\\n % (self._pos, self._cap)\n\n def owner(self):\n \"\"\" get owner\n \"\"\"\n return SharedMemoryMgr.get_mgr(self._owner)\n\n def put(self, data, override=False):\n \"\"\" put data to this buffer\n\n Args:\n data (str): data to be stored in this buffer\n\n Returns:\n None\n\n Raises:\n SharedMemoryError when not enough space in this buffer\n \"\"\"\n assert type(data) in [str, bytes], \\\n 'invalid type[%s] for SharedBuffer::put' % (str(type(data)))\n if self._size > 0 and not override:\n raise SharedBufferError('already has already been setted before')\n\n if self.capacity() < len(data):\n raise SharedBufferError('data[%d] is larger than size of buffer[%s]'\\\n % (len(data), str(self)))\n\n self.owner().put_data(self, data)\n self._size = len(data)\n\n def get(self, offset=0, size=None, no_copy=True):\n \"\"\" get the data stored this buffer\n\n Args:\n offset (int): position for the start point to 'get'\n size (int): size to get\n\n Returns:\n data (np.ndarray('uint8')): user's data in numpy \n which is passed in by 'put'\n None: if no data stored in\n \"\"\"\n offset = offset if offset >= 0 else self._size + offset\n if self._size <= 0:\n return None\n\n size = self._size if size is None else size\n assert offset + size <= self._cap, 'invalid offset[%d] '\\\n 'or size[%d] for capacity[%d]' % (offset, size, self._cap)\n return self.owner().get_data(self, offset, size, no_copy=no_copy)\n\n def size(self):\n \"\"\" bytes of used memory\n \"\"\"\n return self._size\n\n def resize(self, size):\n \"\"\" resize the used memory to 'size', should not be greater than capacity\n \"\"\"\n assert size >= 0 and size <= self._cap, \\\n \"invalid size[%d] for resize\" % (size)\n\n self._size = size\n\n def capacity(self):\n \"\"\" size of allocated memory\n \"\"\"\n return self._cap\n\n def __str__(self):\n \"\"\" human readable format\n \"\"\"\n return \"SharedBuffer(owner:%s, pos:%d, size:%d, \"\\\n \"capacity:%d, alloc_status:[%s], pid:%d)\" \\\n % (str(self._owner), self._pos, self._size, \\\n self._cap, self._alloc_status, os.getpid())\n\n def free(self):\n \"\"\" free this buffer to it's owner\n \"\"\"\n if self._owner is not None:\n self.owner().free(self)\n self._owner = None\n self._cap = 0\n self._pos = -1\n self._size = 0\n return True\n else:\n return False\n\n\nclass PageAllocator(object):\n \"\"\" allocator used to malloc and free shared memory which\n is split into pages\n \"\"\"\n s_allocator_header = 12\n\n def __init__(self, base, total_pages, page_size):\n \"\"\" init\n \"\"\"\n self._magic_num = 1234321000 + random.randint(100, 999)\n self._base = base\n self._total_pages = total_pages\n self._page_size = page_size\n\n header_pages = int(\n math.ceil((total_pages + self.s_allocator_header) / page_size))\n\n self._header_pages = header_pages\n self._free_pages = total_pages - header_pages\n self._header_size = self._header_pages * page_size\n self._reset()\n\n def _dump_alloc_info(self, fname):\n hpages, tpages, pos, used = self.header()\n\n start = self.s_allocator_header\n end = start + self._page_size * hpages\n alloc_flags = self._base[start:end].tostring()\n info = {\n 'magic_num': self._magic_num,\n 'header_pages': hpages,\n 'total_pages': tpages,\n 'pos': pos,\n 'used': used\n }\n info['alloc_flags'] = alloc_flags\n fname = fname + '.' + str(uuid.uuid4())[:6]\n with open(fname, 'wb') as f:\n f.write(pickle.dumps(info, -1))\n logger.warn('dump alloc info to file[%s]' % (fname))\n\n def _reset(self):\n alloc_page_pos = self._header_pages\n used_pages = self._header_pages\n header_info = struct.pack(\n str('III'), self._magic_num, alloc_page_pos, used_pages)\n assert len(header_info) == self.s_allocator_header, \\\n 'invalid size of header_info'\n\n memcopy(self._base[0:self.s_allocator_header], header_info)\n self.set_page_status(0, self._header_pages, '1')\n self.set_page_status(self._header_pages, self._free_pages, '0')\n\n def header(self):\n \"\"\" get header info of this allocator\n \"\"\"\n header_str = self._base[0:self.s_allocator_header].tostring()\n magic, pos, used = struct.unpack(str('III'), header_str)\n\n assert magic == self._magic_num, \\\n 'invalid header magic[%d] in shared memory' % (magic)\n return self._header_pages, self._total_pages, pos, used\n\n def empty(self):\n \"\"\" are all allocatable pages available\n \"\"\"\n header_pages, pages, pos, used = self.header()\n return header_pages == used\n\n def full(self):\n \"\"\" are all allocatable pages used\n \"\"\"\n header_pages, pages, pos, used = self.header()\n return header_pages + used == pages\n\n def __str__(self):\n header_pages, pages, pos, used = self.header()\n desc = '{page_info[magic:%d,total:%d,used:%d,header:%d,alloc_pos:%d,pagesize:%d]}' \\\n % (self._magic_num, pages, used, header_pages, pos, self._page_size)\n return 'PageAllocator:%s' % (desc)\n\n def set_alloc_info(self, alloc_pos, used_pages):\n \"\"\" set allocating position to new value\n \"\"\"\n memcopy(self._base[4:12], struct.pack(str('II'), alloc_pos, used_pages))\n\n def set_page_status(self, start, page_num, status):\n \"\"\" set pages from 'start' to 'end' with new same status 'status'\n \"\"\"\n assert status in ['0', '1'], 'invalid status[%s] for page status '\\\n 'in allocator[%s]' % (status, str(self))\n start += self.s_allocator_header\n end = start + page_num\n assert start >= 0 and end <= self._header_size, 'invalid end[%d] of pages '\\\n 'in allocator[%s]' % (end, str(self))\n memcopy(self._base[start:end], str(status * page_num))\n\n def get_page_status(self, start, page_num, ret_flag=False):\n start += self.s_allocator_header\n end = start + page_num\n assert start >= 0 and end <= self._header_size, 'invalid end[%d] of pages '\\\n 'in allocator[%s]' % (end, str(self))\n status = self._base[start:end].tostring().decode()\n if ret_flag:\n return status\n\n zero_num = status.count('0')\n if zero_num == 0:\n return (page_num, 1)\n else:\n return (zero_num, 0)\n\n def malloc_page(self, page_num):\n header_pages, pages, pos, used = self.header()\n end = pos + page_num\n if end > pages:\n pos = self._header_pages\n end = pos + page_num\n\n start_pos = pos\n flags = ''\n while True:\n # maybe flags already has some '0' pages,\n # so just check 'page_num - len(flags)' pages\n flags += self.get_page_status(\n pos, page_num - len(flags), ret_flag=True)\n\n if flags.count('0') == page_num:\n break\n\n # not found enough pages, so shift to next few pages\n free_pos = flags.rfind('1') + 1\n flags = flags[free_pos:]\n\n pos += free_pos\n end = pos + page_num\n if end > pages:\n pos = self._header_pages\n end = pos + page_num\n flags = ''\n\n # not found available pages after scan all pages\n if pos <= start_pos and end >= start_pos:\n logger.debug('not found available pages after scan all pages')\n break\n\n page_status = (flags.count('0'), 0)\n if page_status != (page_num, 0):\n free_pages = self._total_pages - used\n if free_pages == 0:\n err_msg = 'all pages have been used:%s' % (str(self))\n else:\n err_msg = 'not found available pages with page_status[%s] '\\\n 'and %d free pages' % (str(page_status), free_pages)\n err_msg = 'failed to malloc %d pages at pos[%d] for reason[%s] and allocator status[%s]' \\\n % (page_num, pos, err_msg, str(self))\n raise MemoryFullError(err_msg)\n\n self.set_page_status(pos, page_num, '1')\n used += page_num\n self.set_alloc_info(end, used)\n\n assert self.get_page_status(pos, page_num) == (page_num, 1), \\\n 'faild to validate the page status'\n return pos\n\n def free_page(self, start, page_num):\n \"\"\" free 'page_num' pages start from 'start'\n \"\"\"\n page_status = self.get_page_status(start, page_num)\n assert page_status == (page_num, 1), \\\n 'invalid status[%s] when free [%d, %d]' \\\n % (str(page_status), start, page_num)\n self.set_page_status(start, page_num, '0')\n _, _, pos, used = self.header()\n used -= page_num\n self.set_alloc_info(pos, used)\n\n\nDEFAULT_SHARED_MEMORY_SIZE = 1024 * 1024 * 1024\n\n\nclass SharedMemoryMgr(object):\n \"\"\" manage a continouse block of memory, provide\n 'malloc' to allocate new buffer, and 'free' to free buffer\n \"\"\"\n s_memory_mgrs = weakref.WeakValueDictionary()\n s_mgr_num = 0\n s_log_statis = False\n\n @classmethod\n def get_mgr(cls, id):\n \"\"\" get a SharedMemoryMgr with size of 'capacity'\n \"\"\"\n assert id in cls.s_memory_mgrs, 'invalid id[%s] for memory managers' % (\n id)\n return cls.s_memory_mgrs[id]\n\n def __init__(self, capacity=None, pagesize=None):\n \"\"\" init\n \"\"\"\n logger.debug('create SharedMemoryMgr')\n\n pagesize = 64 * 1024 if pagesize is None else pagesize\n assert type(pagesize) is int, \"invalid type of pagesize[%s]\" \\\n % (str(pagesize))\n\n capacity = DEFAULT_SHARED_MEMORY_SIZE if capacity is None else capacity\n assert type(capacity) is int, \"invalid type of capacity[%s]\" \\\n % (str(capacity))\n\n assert capacity > 0, '\"size of shared memory should be greater than 0'\n self._released = False\n self._cap = capacity\n self._page_size = pagesize\n\n assert self._cap % self._page_size == 0, \\\n \"capacity[%d] and pagesize[%d] are not consistent\" \\\n % (self._cap, self._page_size)\n self._total_pages = self._cap // self._page_size\n\n self._pid = os.getpid()\n SharedMemoryMgr.s_mgr_num += 1\n self._id = self._pid * 100 + SharedMemoryMgr.s_mgr_num\n SharedMemoryMgr.s_memory_mgrs[self._id] = self\n self._locker = Lock()\n self._setup()\n\n def _setup(self):\n self._shared_mem = RawArray('c', self._cap)\n self._base = np.frombuffer(\n self._shared_mem, dtype='uint8', count=self._cap)\n self._locker.acquire()\n try:\n self._allocator = PageAllocator(self._base, self._total_pages,\n self._page_size)\n finally:\n self._locker.release()\n\n def malloc(self, size, wait=True):\n \"\"\" malloc a new SharedBuffer\n\n Args:\n size (int): buffer size to be malloc\n wait (bool): whether to wait when no enough memory\n\n Returns:\n SharedBuffer\n\n Raises:\n SharedMemoryError when not found available memory\n \"\"\"\n page_num = int(math.ceil(size / self._page_size))\n size = page_num * self._page_size\n\n start = None\n ct = 0\n errmsg = ''\n while True:\n self._locker.acquire()\n try:\n start = self._allocator.malloc_page(page_num)\n alloc_status = str(self._allocator)\n except MemoryFullError as e:\n start = None\n errmsg = e.errmsg\n if not wait:\n raise e\n finally:\n self._locker.release()\n\n if start is None:\n time.sleep(0.1)\n if ct % 100 == 0:\n logger.warn('not enough space for reason[%s]' % (errmsg))\n\n ct += 1\n else:\n break\n\n return SharedBuffer(self._id, size, start, alloc_status=alloc_status)\n\n def free(self, shared_buf):\n \"\"\" free a SharedBuffer\n\n Args:\n shared_buf (SharedBuffer): buffer to be freed\n\n Returns:\n None\n\n Raises:\n SharedMemoryError when failed to release this buffer\n \"\"\"\n assert shared_buf._owner == self._id, \"invalid shared_buf[%s] \"\\\n \"for it's not allocated from me[%s]\" % (str(shared_buf), str(self))\n cap = shared_buf.capacity()\n start_page = shared_buf._pos\n page_num = cap // self._page_size\n\n #maybe we don't need this lock here\n self._locker.acquire()\n try:\n self._allocator.free_page(start_page, page_num)\n finally:\n self._locker.release()\n\n def put_data(self, shared_buf, data):\n \"\"\" fill 'data' into 'shared_buf'\n \"\"\"\n assert len(data) <= shared_buf.capacity(), 'too large data[%d] '\\\n 'for this buffer[%s]' % (len(data), str(shared_buf))\n start = shared_buf._pos * self._page_size\n end = start + len(data)\n assert start >= 0 and end <= self._cap, \"invalid start \"\\\n \"position[%d] when put data to buff:%s\" % (start, str(shared_buf))\n self._base[start:end] = np.frombuffer(data, 'uint8', len(data))\n\n def get_data(self, shared_buf, offset, size, no_copy=True):\n \"\"\" extract 'data' from 'shared_buf' in range [offset, offset + size)\n \"\"\"\n start = shared_buf._pos * self._page_size\n start += offset\n if no_copy:\n return self._base[start:start + size]\n else:\n return self._base[start:start + size].tostring()\n\n def __str__(self):\n return 'SharedMemoryMgr:{id:%d, %s}' % (self._id, str(self._allocator))\n\n def __del__(self):\n if SharedMemoryMgr.s_log_statis:\n logger.info('destroy [%s]' % (self))\n\n if not self._released and not self._allocator.empty():\n logger.warn('not empty when delete this SharedMemoryMgr[%s]' %\n (self))\n else:\n self._released = True\n\n if self._id in SharedMemoryMgr.s_memory_mgrs:\n del SharedMemoryMgr.s_memory_mgrs[self._id]\n SharedMemoryMgr.s_mgr_num -= 1\n", "import sys\nimport collections\nimport six\nimport time\nimport numpy as np\nimport paddle.fluid as fluid\nimport paddle\nimport os\n\n\ndef to_lodtensor(data, place):\n \"\"\" convert to LODtensor \"\"\"\n seq_lens = [len(seq) for seq in data]\n cur_len = 0\n lod = [cur_len]\n for l in seq_lens:\n cur_len += l\n lod.append(cur_len)\n flattened_data = np.concatenate(data, axis=0).astype(\"int64\")\n flattened_data = flattened_data.reshape([len(flattened_data), 1])\n res = fluid.LoDTensor()\n res.set(flattened_data, place)\n res.set_lod([lod])\n return res\n\n\ndef to_lodtensor_bpr(raw_data, neg_size, vocab_size, place):\n \"\"\" convert to LODtensor \"\"\"\n data = [dat[0] for dat in raw_data]\n seq_lens = [len(seq) for seq in data]\n cur_len = 0\n lod = [cur_len]\n for l in seq_lens:\n cur_len += l\n lod.append(cur_len)\n flattened_data = np.concatenate(data, axis=0).astype(\"int64\")\n flattened_data = flattened_data.reshape([len(flattened_data), 1])\n res = fluid.LoDTensor()\n res.set(flattened_data, place)\n res.set_lod([lod])\n\n data = [dat[1] for dat in raw_data]\n pos_data = np.concatenate(data, axis=0).astype(\"int64\")\n length = np.size(pos_data)\n neg_data = np.tile(pos_data, neg_size)\n np.random.shuffle(neg_data)\n for ii in range(length * neg_size):\n if neg_data[ii] == pos_data[ii // neg_size]:\n neg_data[ii] = pos_data[length - 1 - ii // neg_size]\n\n label_data = np.column_stack(\n (pos_data.reshape(length, 1), neg_data.reshape(length, neg_size)))\n res_label = fluid.LoDTensor()\n res_label.set(label_data, place)\n res_label.set_lod([lod])\n\n res_pos = fluid.LoDTensor()\n res_pos.set(np.zeros([len(flattened_data), 1]).astype(\"int64\"), place)\n res_pos.set_lod([lod])\n\n return res, res_pos, res_label\n\n\ndef to_lodtensor_bpr_test(raw_data, vocab_size, place):\n \"\"\" convert to LODtensor \"\"\"\n data = [dat[0] for dat in raw_data]\n seq_lens = [len(seq) for seq in data]\n cur_len = 0\n lod = [cur_len]\n for l in seq_lens:\n cur_len += l\n lod.append(cur_len)\n flattened_data = np.concatenate(data, axis=0).astype(\"int64\")\n flattened_data = flattened_data.reshape([len(flattened_data), 1])\n res = fluid.LoDTensor()\n res.set(flattened_data, place)\n res.set_lod([lod])\n\n data = [dat[1] for dat in raw_data]\n flattened_data = np.concatenate(data, axis=0).astype(\"int64\")\n flattened_data = flattened_data.reshape([len(flattened_data), 1])\n res_pos = fluid.LoDTensor()\n res_pos.set(flattened_data, place)\n res_pos.set_lod([lod])\n return res, res_pos\n\n\ndef get_vocab_size(vocab_path):\n with open(vocab_path, \"r\") as rf:\n line = rf.readline()\n return int(line.strip())\n\n\ndef prepare_data(file_dir,\n vocab_path,\n batch_size,\n buffer_size=1000,\n word_freq_threshold=0,\n is_train=True):\n \"\"\" prepare the English Pann Treebank (PTB) data \"\"\"\n print(\"start constuct word dict\")\n if is_train:\n vocab_size = get_vocab_size(vocab_path)\n reader = sort_batch(\n paddle.reader.shuffle(\n train(\n file_dir, buffer_size, data_type=DataType.SEQ),\n buf_size=buffer_size),\n batch_size,\n batch_size * 20)\n else:\n vocab_size = get_vocab_size(vocab_path)\n reader = paddle.batch(\n test(\n file_dir, buffer_size, data_type=DataType.SEQ), batch_size)\n return vocab_size, reader\n\n\ndef sort_batch(reader, batch_size, sort_group_size, drop_last=False):\n \"\"\"\n Create a batched reader.\n :param reader: the data reader to read from.\n :type reader: callable\n :param batch_size: size of each mini-batch\n :type batch_size: int\n :param sort_group_size: size of partial sorted batch\n :type sort_group_size: int\n :param drop_last: drop the last batch, if the size of last batch is not equal to batch_size.\n :type drop_last: bool\n :return: the batched reader.\n :rtype: callable\n \"\"\"\n\n def batch_reader():\n r = reader()\n b = []\n for instance in r:\n b.append(instance)\n if len(b) == sort_group_size:\n sortl = sorted(b, key=lambda x: len(x[0]), reverse=True)\n b = []\n c = []\n for sort_i in sortl:\n c.append(sort_i)\n if (len(c) == batch_size):\n yield c\n c = []\n if drop_last == False and len(b) != 0:\n sortl = sorted(b, key=lambda x: len(x[0]), reverse=True)\n c = []\n for sort_i in sortl:\n c.append(sort_i)\n if (len(c) == batch_size):\n yield c\n c = []\n\n # Batch size check\n batch_size = int(batch_size)\n if batch_size <= 0:\n raise ValueError(\"batch_size should be a positive integeral value, \"\n \"but got batch_size={}\".format(batch_size))\n return batch_reader\n\n\nclass DataType(object):\n SEQ = 2\n\n\ndef reader_creator(file_dir, n, data_type):\n def reader():\n files = os.listdir(file_dir)\n for fi in files:\n with open(file_dir + '/' + fi, \"r\") as f:\n for l in f:\n if DataType.SEQ == data_type:\n l = l.strip().split()\n l = [w for w in l]\n src_seq = l[:len(l) - 1]\n trg_seq = l[1:]\n if n > 0 and len(src_seq) > n: continue\n yield src_seq, trg_seq\n else:\n assert False, 'error data type'\n\n return reader\n\n\ndef train(train_dir, n, data_type=DataType.SEQ):\n return reader_creator(train_dir, n, data_type)\n\n\ndef test(test_dir, n, data_type=DataType.SEQ):\n return reader_creator(test_dir, n, data_type)\n" ]
[ [ "numpy.array", "numpy.argmax", "numpy.sum" ], [ "numpy.percentile", "numpy.average", "numpy.array", "numpy.where", "numpy.divide" ], [ "numpy.concatenate", "numpy.loadtxt" ], [ "numpy.random.rand", "numpy.array", "numpy.pad", "numpy.random.shuffle" ], [ "numpy.mean" ], [ "numpy.frombuffer" ], [ "numpy.concatenate", "numpy.size", "numpy.tile", "numpy.random.shuffle" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
ice-blaze/simple-captcha-deeplearning
[ "16960249bf316bef8fe6b9d86113c902309b36c5" ]
[ "deep_learning.py" ]
[ "from generate_captchas import CHAR_POSSIBILITIES\nfrom generate_captchas import generate_captcha\nfrom generate_captchas import get_random_captcha_names_and_lines\nfrom digital_processing_image_approach import clean_image_kernel4\nimport keras\nfrom keras.models import Sequential, load_model\nfrom keras.layers import Dense, Conv2D, MaxPooling2D, Flatten, Dropout\nimport os\nimport imageio\nimport random\nimport numpy as np\nnp.random.seed(123) # for reproducibility\n\n\ndef add_dict(a, b):\n \"\"\"\n :param a dict: Dictionary we will merge with b\n :param b dict: Dictionary that will be merged into a\n :return a dict: Merged dictionary of a and b\n \"\"\"\n for key in b:\n a[key] = a.get(key, 0) + b[key]\n\n return a\n\n\ndef similar(real, predicted):\n \"\"\"\n Compare if the captcha code predicted is close to the real one\n :param real string: Real captcha string\n :param predicted string: Predicted captcha string\n :return\n wrong_letter_count float: Percentage of wrong letter\n wrong_letter_dict dict: Dict of all wrong letters as key and a counter\n of failed as value\n \"\"\"\n wrong_letter_count = 0\n\n wrong_letter_dict = {}\n for real_letter, preddicted_letter in zip(real, predicted):\n if real_letter != preddicted_letter:\n wrong_letter_dict[real_letter] = \\\n wrong_letter_dict.get(real_letter, 0) + 1\n wrong_letter_count += 1\n\n wrong_letter_count /= len(real)\n wrong_letter_count = 1.0 - wrong_letter_count\n\n return wrong_letter_count, wrong_letter_dict\n\n\ndef create_model(input_shape, number_of_classes):\n \"\"\"\n :param input_shape numpy1d: Shape of the image\n :param number_of_classes int: Class number the model should handle\n :return model Model: Keras model\n \"\"\"\n model = Sequential()\n model.add(Conv2D(\n 20,\n kernel_size=(5, 5),\n padding=\"same\",\n strides=(1, 1),\n activation='relu',\n input_shape=(input_shape)\n ))\n\n model.add(Conv2D(32, (3, 3), padding=\"same\", activation='relu'))\n model.add(Conv2D(32, (3, 3), activation='relu'))\n model.add(MaxPooling2D(pool_size=(4, 4), strides=(4, 4)))\n model.add(Dropout(0.25))\n\n model.add(Conv2D(64, (3, 3), padding=\"same\", activation='relu'))\n model.add(Conv2D(64, (3, 3), activation='relu'))\n model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))\n model.add(Dropout(0.25))\n model.add(Conv2D(128, (3, 3), padding=\"same\", activation='relu'))\n model.add(Conv2D(128, (3, 3), activation='relu'))\n model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))\n model.add(Dropout(0.25))\n\n model.add(Flatten())\n model.add(Dense(64*8*8, activation='relu'))\n model.add(Dropout(0.5))\n model.add(Dense(number_of_classes, activation='softmax'))\n\n model.compile(\n loss=keras.losses.categorical_crossentropy,\n optimizer=\"Adamax\",\n metrics=['accuracy']\n )\n\n return model\n\n\ndef chunks(array, chunk_size):\n \"\"\"\n Convert a 1D list into a 2D list with length of the array of array equal\n to chunk_size\n :param array list: list of object\n :param chunk_size int: length of the chunks\n :return 2d list:\n \"\"\"\n for i in range(0, len(array), chunk_size):\n yield array[i:i + chunk_size]\n\n\ndef one_label(char):\n \"\"\"\n Convert one char into a binarized label\n :param char string: one character\n :return zeros list int: binarized label\n \"\"\"\n zeros = [0.0] * len(CHAR_POSSIBILITIES)\n char_index = CHAR_POSSIBILITIES.index(char)\n zeros[char_index] = 1.0\n return zeros\n\n\ndef char_to_num(captcha_name):\n \"\"\"\n Convert catpcha character to binarized labels\n :param captcha_name string: code of the captcha\n :return all_labels list int: name transform into binarized labels\n \"\"\"\n all_labels = []\n for char in captcha_name:\n all_labels += one_label(char)\n return all_labels\n\n\ndef num_to_char(captcha_binarized_label, char_count):\n \"\"\"\n Convert catpcha binarized labels to char\n :param captcha_binarized_label list int: captcha binarized\n :param char_count int: length of the original captcha name\n :return captcha_name string: captcha code\n \"\"\"\n captcha_name = \"\"\n\n for x in range(char_count):\n length = len(CHAR_POSSIBILITIES)\n char_range = captcha_binarized_label[x * length:(x + 1) * length]\n char_index = np.argmax(char_range)\n captcha_name += CHAR_POSSIBILITIES[char_index]\n\n return captcha_name\n\n\ndef load_data_no_generator(generated_captcha_path, captchas, char_count):\n \"\"\"\n :param generated_captcha_path strig: folder containing captchas\n :param catpchas list string: All captcha names\n :param char_count int: Length of the catpcha name\n \"\"\"\n x = np.array([\n clean_image_kernel4(imageio.imread(generated_captcha_path + captcha))\n for captcha in captchas\n ])\n\n # Binarizide the labels (multi class)\n label_in_list = [\n list(captcha[:char_count])\n for captcha in captchas\n ]\n label_in_numlist = [\n char_to_num(label)\n for label in label_in_list\n ]\n # label need to be list [0,1,0,0,1,...]\n y = np.array(label_in_numlist)\n\n # 5. Preprocess input data\n x = x.astype(float)\n x /= np.max(x) # normalize\n\n return x, y\n\n\ndef load_data(captchas):\n \"\"\"\n :param captchas list string: Captcha names\n :return list tuple numpy2d,labels: Tuple of image and labels binarized\n \"\"\"\n while True:\n for captcha_chunk in captchas:\n x = np.array([\n # TODO opti possible\n clean_image_kernel4(generate_captcha(\n captcha.split(\"-\")[0], captcha.split(\"-\")[1])\n )\n for captcha in captcha_chunk\n ])\n\n # Binarizide the labels (multi class)\n label_in_list = [\n list(captcha.split(\"-\")[0])\n for captcha in captcha_chunk\n ]\n label_in_numlist = [\n char_to_num(label)\n for label in label_in_list\n ]\n # label need to be list [0,1,0,0,1,...]\n y = np.array(label_in_numlist)\n\n # 5. Preprocess input data\n x = x.astype(float)\n x /= np.max(x) # normalize\n\n yield x, y\n\n\ndef train_and_test_model(number_of_captchas=10, model_path=None):\n \"\"\"\n :param number_of_captchas int: Number of captcha we want to for the train\n :param model_path string: Path of the model if it exist\n :return None: Print test result\n \"\"\"\n number_of_classes = len(CHAR_POSSIBILITIES)\n captchas = list(get_random_captcha_names_and_lines(number_of_captchas))\n random.shuffle(captchas)\n char_count = len(captchas[0].split(\"-\")[0])\n batch_size = 250\n\n pivot = int(len(captchas) / 10)\n x_five, y_five = next(load_data([captchas[:1]]))\n\n captchas_train = list(chunks(captchas[pivot:], batch_size))\n captchas_test = list(chunks(captchas[:pivot], batch_size))\n\n if os.path.exists(model_path):\n model = load_model(model_path)\n else:\n model = create_model(x_five[0].shape, number_of_classes * char_count)\n\n epochs = 1\n model.fit_generator(\n load_data(captchas_train),\n steps_per_epoch=len(captchas_train),\n epochs=epochs,\n verbose=1,\n )\n\n # Save model\n model.save(model_path)\n\n score = model.evaluate_generator(\n load_data(captchas_test),\n steps=batch_size,\n )\n\n print(score)\n print('Test loss:', score[0])\n print('Test accuracy:', score[1])\n\n # Test with real captchas\n path = \"./real-captchas/\"\n real_captchas = os.listdir(path)\n print_test(model, path, real_captchas, char_count, 100)\n\n\ndef print_test(model, path, captchas, char_count, max_size=100):\n \"\"\"\n :param model Model: Keras model to read captchas\n :param path string: Path where are stored real captchas\n :param catpchas list string: All captcha names\n :param char_count int: Length of the catpcha name\n :param max_size int: Number of captcha we want to test\n :return None: Print captcha test results\n \"\"\"\n print(\"Real captcha test\")\n data = load_data_no_generator(path, captchas, char_count)\n x = data[0]\n y = data[1]\n allx = model.predict(x)\n\n predicted = [\n num_to_char(predict, char_count) for predict in allx[:max_size]\n ]\n real = [num_to_char(real_label, char_count) for real_label in y[:max_size]]\n ziper = zip(real, predicted)\n correct = 0\n mean_similar = 0\n error_dict = {}\n for z in ziper:\n sim, sim_dict = similar(z[0], z[1])\n mean_similar += sim\n error_dict = add_dict(error_dict, sim_dict)\n if z[0] == z[1]:\n correct += 1\n print(str(z[0] == z[1]) + \" \" + str(z) + \" simili: \" + str(sim))\n print(\"overall: \" + str(correct/len(predicted)))\n print(\"overall similarity: \" + str(mean_similar / len(predicted)))\n print(error_dict)\n print(sorted(error_dict.keys()))\n\n\nif __name__ == \"__main__\":\n model_path = \"model.h5\"\n # train_and_test_model(1600000, model_path)\n train_and_test_model(800000, model_path)\n" ]
[ [ "numpy.max", "numpy.array", "numpy.argmax", "numpy.random.seed" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
Samteymoori/pepper
[ "734d226de47a855952e3b58145c1fcfbe221d3b4" ]
[ "pepper_variant/modules/python/models/predict_distributed_cpu.py" ]
[ "import sys\nimport os\nimport torch\nimport torch.onnx\nimport torch.distributed as dist\nimport torch.nn as nn\nimport onnxruntime\nfrom datetime import datetime\nfrom torch.utils.data import DataLoader\nimport torch.multiprocessing as mp\n\nfrom pepper_variant.modules.python.models.dataloader_predict import SequenceDataset\nfrom pepper_variant.modules.python.models.ModelHander import ModelHandler\nfrom pepper_variant.modules.python.Options import ImageSizeOptions, TrainOptions\nfrom pepper_variant.modules.python.DataStorePredict import DataStore\n\n\ndef predict(input_filepath, file_chunks, output_filepath, model_path, batch_size, num_workers, threads, thread_id):\n # session options\n sess_options = onnxruntime.SessionOptions()\n sess_options.intra_op_num_threads = threads\n sess_options.execution_mode = onnxruntime.ExecutionMode.ORT_SEQUENTIAL\n sess_options.graph_optimization_level = onnxruntime.GraphOptimizationLevel.ORT_ENABLE_ALL\n\n ort_session = onnxruntime.InferenceSession(model_path + \".onnx\", sess_options=sess_options)\n torch.set_num_threads(threads)\n\n # create output file\n output_filename = output_filepath + \"pepper_prediction_\" + str(thread_id) + \".hdf\"\n prediction_data_file = DataStore(output_filename, mode='w')\n\n # data loader\n input_data = SequenceDataset(input_filepath, file_chunks)\n\n data_loader = DataLoader(input_data,\n batch_size=batch_size,\n shuffle=False,\n num_workers=num_workers)\n\n batch_completed = 0\n total_batches = len(data_loader)\n with torch.no_grad():\n for contig, contig_start, contig_end, chunk_id, images, position, index in data_loader:\n images = images.type(torch.FloatTensor)\n hidden = torch.zeros(images.size(0), 2 * TrainOptions.GRU_LAYERS, TrainOptions.HIDDEN_SIZE)\n\n prediction_base_tensor = torch.zeros((images.size(0), images.size(1), ImageSizeOptions.TOTAL_LABELS))\n\n for i in range(0, ImageSizeOptions.SEQ_LENGTH, TrainOptions.WINDOW_JUMP):\n if i + TrainOptions.TRAIN_WINDOW > ImageSizeOptions.SEQ_LENGTH:\n break\n chunk_start = i\n chunk_end = i + TrainOptions.TRAIN_WINDOW\n # chunk all the data\n image_chunk = images[:, chunk_start:chunk_end]\n\n # run inference on onnx mode, which takes numpy inputs\n ort_inputs = {ort_session.get_inputs()[0].name: image_chunk.cpu().numpy(),\n ort_session.get_inputs()[1].name: hidden.cpu().numpy()}\n output_base, hidden = ort_session.run(None, ort_inputs)\n output_base = torch.from_numpy(output_base)\n hidden = torch.from_numpy(hidden)\n\n # now calculate how much padding is on the top and bottom of this chunk so we can do a simple\n # add operation\n top_zeros = chunk_start\n bottom_zeros = ImageSizeOptions.SEQ_LENGTH - chunk_end\n\n # do softmax and get prediction\n # we run a softmax a padding to make the output tensor compatible for adding\n inference_layers = nn.Sequential(\n nn.Softmax(dim=2),\n nn.ZeroPad2d((0, 0, top_zeros, bottom_zeros))\n )\n\n # run the softmax and padding layers\n base_prediction = (inference_layers(output_base) * 10).type(torch.IntTensor)\n\n # now simply add the tensor to the global counter\n prediction_base_tensor = torch.add(prediction_base_tensor, base_prediction)\n\n # base_values, base_labels = torch.max(prediction_base_tensor, 2)\n #\n # predicted_base_labels = base_labels.cpu().numpy()\n prediction_base_tensor = prediction_base_tensor.cpu().numpy().astype(int)\n\n for i in range(images.size(0)):\n prediction_data_file.write_prediction(contig[i],\n contig_start[i],\n contig_end[i],\n chunk_id[i],\n position[i],\n index[i],\n prediction_base_tensor[i])\n batch_completed += 1\n\n if thread_id == 0 and batch_completed % 5 == 0:\n sys.stderr.write(\"[\" + str(datetime.now().strftime('%m-%d-%Y %H:%M:%S')) + \"] \" +\n \"INFO: BATCHES PROCESSED \" + str(batch_completed) + \"/\" + str(total_batches) + \".\\n\")\n sys.stderr.flush()\n\n\ndef cleanup():\n dist.destroy_process_group()\n\n\ndef setup(rank, total_callers, args, all_input_files):\n os.environ['MASTER_ADDR'] = 'localhost'\n os.environ['MASTER_PORT'] = '12355'\n\n # initialize the process group\n dist.init_process_group(\"gloo\", rank=rank, world_size=total_callers)\n\n filepath, output_filepath, model_path, batch_size, threads, num_workers = args\n\n # Explicitly setting seed to make sure that models created in two processes\n # start from same random weights and biases.\n predict(filepath, all_input_files[rank], output_filepath, model_path, batch_size, num_workers, threads, rank)\n cleanup()\n\n\ndef predict_distributed_cpu(filepath, file_chunks, output_filepath, model_path, batch_size, callers, threads, num_workers):\n \"\"\"\n Create a prediction table/dictionary of an images set using a trained model.\n :param filepath: Path to image files to predict on\n :param file_chunks: Path to chunked files\n :param batch_size: Batch size used for prediction\n :param model_path: Path to a trained model\n :param output_filepath: Path to output directory\n :param callers: Number of callers to start\n :param threads: Number of threads per caller.\n :param num_workers: Number of workers to be used by the dataloader\n :return: Prediction dictionary\n \"\"\"\n transducer_model, hidden_size, gru_layers, prev_ite = \\\n ModelHandler.load_simple_model_for_training(model_path,\n input_channels=ImageSizeOptions.IMAGE_CHANNELS,\n image_features=ImageSizeOptions.IMAGE_HEIGHT,\n seq_len=ImageSizeOptions.SEQ_LENGTH,\n num_classes=ImageSizeOptions.TOTAL_LABELS)\n\n transducer_model.eval()\n\n sys.stderr.write(\"[\" + str(datetime.now().strftime('%m-%d-%Y %H:%M:%S')) + \"] INFO: MODEL LOADING TO ONNX\\n\")\n x = torch.zeros(1, TrainOptions.TRAIN_WINDOW, ImageSizeOptions.IMAGE_HEIGHT)\n h = torch.zeros(1, 2 * TrainOptions.GRU_LAYERS, TrainOptions.HIDDEN_SIZE)\n\n if not os.path.isfile(model_path + \".onnx\"):\n sys.stderr.write(\"[\" + str(datetime.now().strftime('%m-%d-%Y %H:%M:%S')) + \"] INFO: SAVING MODEL TO ONNX\\n\")\n torch.onnx.export(transducer_model, (x, h),\n model_path + \".onnx\",\n training=False,\n opset_version=10,\n do_constant_folding=True,\n input_names=['input_image', 'input_hidden'],\n output_names=['output_pred', 'output_hidden'],\n dynamic_axes={'input_image': {0: 'batch_size'},\n 'input_hidden': {0: 'batch_size'},\n 'output_pred': {0: 'batch_size'},\n 'output_hidden': {0: 'batch_size'}})\n\n transducer_model.eval()\n args = (filepath, output_filepath, model_path, batch_size, threads, num_workers)\n\n mp.spawn(setup,\n args=(callers, args, file_chunks),\n nprocs=callers,\n join=True)\n" ]
[ [ "torch.nn.Softmax", "torch.onnx.export", "torch.distributed.init_process_group", "torch.multiprocessing.spawn", "torch.zeros", "torch.add", "torch.utils.data.DataLoader", "torch.from_numpy", "torch.set_num_threads", "torch.no_grad", "torch.distributed.destroy_process_group", "torch.nn.ZeroPad2d" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
thanever/SOC
[ "9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4", "9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4", "9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4", "9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4", "9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4", "9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4" ]
[ "Data/scigrid-de/pypower/scigrid_2011_01_07_01.py", "Data/scigrid-de/pypower/scigrid_2011_01_07_22.py", "Data/scigrid-de/pypower/scigrid_2011_01_08_02.py", "Uncertainty/data/case-ln/case_ln_101.py", "Data/scigrid-de/pypower/scigrid_2011_01_06_19.py", "Uncertainty/data/case-de/case_de_103.py" ]
[ "from numpy import array\ndef scigrid_2011_01_07_01():\n\tppc = {\"version\": '2'}\n\tppc[\"baseMVA\"] = 100.0\n\tppc[\"bus\"] = array([\n\t\t[586,\t\t3,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[589,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[590,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[593,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[595,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[598,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[599,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[602,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[603,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[607,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[608,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[609,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[612,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[614,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[616,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[617,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[618,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[619,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[624,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[629,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[632,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[637,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[638,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[640,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[641,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[642,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[643,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[647,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[652,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[655,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[663,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[666,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[670,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[672,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[676,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[681,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[683,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[687,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[694,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[695,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[697,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[698,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[702,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[705,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[707,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[714,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[716,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[717,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[722,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[724,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[730,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[732,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[735,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[741,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[742,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[743,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[747,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[749,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[750,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[753,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[761,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[762,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[765,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[767,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[772,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[774,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[777,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[778,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[781,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[784,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[785,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[788,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[789,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[791,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[792,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[795,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[800,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[801,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[802,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[805,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[806,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[808,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[809,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[811,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[814,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[816,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[817,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[821,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[826,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[834,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[835,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[836,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[837,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[839,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[841,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[843,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[844,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[850,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[851,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[853,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[856,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[857,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[858,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[860,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[865,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[867,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[869,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[870,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[872,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[874,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[875,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[882,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[883,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[885,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[886,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[889,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[890,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[893,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[894,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[895,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[896,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[898,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[902,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[903,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[905,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[906,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[907,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[909,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[917,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[918,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[920,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[921,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[922,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[923,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[925,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[931,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[936,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[937,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[939,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[940,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[944,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[950,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[952,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[958,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[959,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[960,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[963,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[965,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[967,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[969,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999644,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[971,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[978,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[982,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[983,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[984,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[985,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[986,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[987,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[988,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[993,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[994,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[995,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[997,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[999,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1002,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1007,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1010,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1011,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1012,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1014,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1027,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1028,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1029,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1030,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1031,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1032,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1033,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1034,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1035,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1036,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1037,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1038,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1039,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1040,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1041,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1042,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1043,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1044,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1045,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1046,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1047,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1048,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1049,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1050,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1051,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1052,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1053,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1054,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1055,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1056,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1057,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1058,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1059,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1060,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1061,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1062,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1063,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1064,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1065,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1066,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1067,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1068,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1069,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1070,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1071,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1072,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1073,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1074,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1075,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1076,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1077,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1078,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1079,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1080,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1081,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1082,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1083,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1084,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1085,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1086,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1087,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1088,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1089,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1090,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1091,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1092,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1093,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1096,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1097,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1098,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1099,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1100,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1101,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1102,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1103,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1105,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1106,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1107,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1108,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1109,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1110,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1111,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1113,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1114,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1115,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1116,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1117,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1118,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1119,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1120,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1121,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1122,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1123,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1124,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1125,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1126,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1127,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1128,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1129,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1130,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1131,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1133,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1134,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1135,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1136,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1137,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1138,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1139,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1140,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1142,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1143,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1144,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1145,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1146,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1147,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1148,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1149,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1150,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1151,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1152,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1155,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1157,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1160,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1161,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1162,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1163,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1164,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1165,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1166,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1168,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1169,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1171,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1172,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1173,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1175,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1176,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1177,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1178,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1179,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1181,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1182,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1183,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1184,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1186,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1187,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1188,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1189,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1190,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1191,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1192,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1193,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1194,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1195,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1196,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1197,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1198,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1199,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1200,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1201,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1202,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1203,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1204,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1205,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1206,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1207,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1208,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1209,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1210,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1211,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1212,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1213,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1214,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1215,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1216,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1217,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1218,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1219,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1220,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1221,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1222,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1223,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1224,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1225,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1226,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1227,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1228,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1229,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1230,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1231,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1232,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1233,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1235,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1236,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1237,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1238,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1239,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1240,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1241,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1242,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1243,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1244,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1245,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1246,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1247,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1248,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1249,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1250,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1251,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1252,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1253,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1254,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1255,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1256,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1257,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1258,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1259,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1260,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1261,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1262,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1263,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1264,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1265,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1266,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1267,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1268,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1269,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1270,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1271,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1272,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1273,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1274,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1275,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1276,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1277,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1278,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1279,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1280,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1281,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1282,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1283,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1284,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1285,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1286,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1287,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1288,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1289,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1290,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1291,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1292,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1293,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1294,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1295,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1296,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1297,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1298,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1299,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1300,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1301,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1302,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1303,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1304,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1305,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1306,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1307,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1308,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1309,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1310,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1311,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1312,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1313,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1314,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1315,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1316,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1317,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1318,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1319,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1320,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1321,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1322,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1323,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1324,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1325,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1326,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1327,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1328,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1329,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1330,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1332,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1333,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1334,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1335,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1336,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1337,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1338,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1339,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1340,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1341,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1342,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1343,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1344,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1345,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1346,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1347,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1348,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1349,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1350,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1351,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1352,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1355,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1356,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1357,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1358,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1359,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1363,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1364,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1365,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1366,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1367,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1368,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1369,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1370,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1371,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1372,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1373,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1374,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1375,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1376,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1377,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1378,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1379,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1381,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1382,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1383,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1387,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1390,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1391,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1393,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1394,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1395,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1396,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1397,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1398,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1399,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1400,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1401,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1402,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1403,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1404,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1405,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1406,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1407,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1408,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1409,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1410,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1411,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1412,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1413,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1414,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1415,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1416,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1417,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1418,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1419,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1420,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1421,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999644,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1422,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1423,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1424,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1425,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1426,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1427,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1428,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1429,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1430,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1431,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1432,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1433,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1434,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1435,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1436,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1437,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1438,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1439,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1440,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1441,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1442,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1443,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1444,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1445,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1446,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1447,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1448,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1449,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1450,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1451,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1452,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1453,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1454,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1455,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1456,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1459,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1460,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1461,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1463,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1464,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1466,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1467,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1468,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1469,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1470,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1471,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1472,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1473,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1474,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1475,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1476,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1477,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1479,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1480,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1481,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1482,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1483,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1484,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1485,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1486,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1487,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1488,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1489,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1490,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1491,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1492,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1493,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1494,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1495,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1496,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1497,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1498,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1499,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1500,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1501,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1502,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1503,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1504,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1505,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1506,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1507,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1508,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1510,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1511,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1512,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1513,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1514,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1516,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1517,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1518,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1519,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1,\t\t1,\t\t231.535683,\t\t46.307137,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[2,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000015,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[3,\t\t1,\t\t40.581977,\t\t8.116395,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[4,\t\t1,\t\t66.738408,\t\t13.347682,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[5,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998829,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[6,\t\t1,\t\t195.97163,\t\t39.194326,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[7,\t\t1,\t\t147.688993,\t\t29.537799,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[8,\t\t1,\t\t123.575597,\t\t24.715119,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[9,\t\t1,\t\t83.572245,\t\t16.714449,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[10,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001864,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[11,\t\t1,\t\t73.223533,\t\t14.644707,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[12,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000997,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[13,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000519,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[14,\t\t1,\t\t175.12383,\t\t35.024766,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[15,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000477,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[16,\t\t1,\t\t298.667302,\t\t59.73346,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[17,\t\t1,\t\t70.343995,\t\t14.068799,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[18,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.002785,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[19,\t\t1,\t\t173.793495,\t\t34.758699,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[20,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998624,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[21,\t\t1,\t\t747.338688,\t\t149.467738,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[22,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000541,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[23,\t\t1,\t\t97.851973,\t\t19.570395,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[24,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999995,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[25,\t\t1,\t\t46.803281,\t\t9.360656,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[26,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000745,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[27,\t\t1,\t\t57.452323,\t\t11.490465,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[28,\t\t1,\t\t169.754403,\t\t33.950881,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[29,\t\t1,\t\t62.354326,\t\t12.470865,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[30,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999264,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[31,\t\t1,\t\t122.711704,\t\t24.542341,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[32,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.995193,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[33,\t\t1,\t\t153.857417,\t\t30.771483,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[34,\t\t1,\t\t30.52459,\t\t6.104918,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[35,\t\t1,\t\t2.020889,\t\t0.404178,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[36,\t\t1,\t\t6.690873,\t\t1.338175,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[37,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.002691,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[38,\t\t1,\t\t161.19808,\t\t32.239616,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[39,\t\t1,\t\t52.784066,\t\t10.556813,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[40,\t\t1,\t\t55.134608,\t\t11.026922,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[41,\t\t1,\t\t59.257208,\t\t11.851442,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[42,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001586,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[43,\t\t1,\t\t90.873598,\t\t18.17472,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[44,\t\t1,\t\t116.259296,\t\t23.251859,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[45,\t\t1,\t\t61.713034,\t\t12.342607,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[46,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000336,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[47,\t\t1,\t\t268.333226,\t\t53.666645,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[48,\t\t1,\t\t184.443359,\t\t36.888672,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[49,\t\t1,\t\t46.654864,\t\t9.330973,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[50,\t\t1,\t\t67.93578,\t\t13.587156,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[51,\t\t1,\t\t88.040336,\t\t17.608067,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[52,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0001,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[53,\t\t1,\t\t133.58711,\t\t26.717422,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[54,\t\t1,\t\t67.87003,\t\t13.574006,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[55,\t\t1,\t\t66.560665,\t\t13.312133,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[56,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999841,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[57,\t\t1,\t\t79.452642,\t\t15.890528,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[58,\t\t1,\t\t181.99836,\t\t36.399672,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[59,\t\t1,\t\t51.979844,\t\t10.395969,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[60,\t\t1,\t\t27.405216,\t\t5.481043,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[61,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999477,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[62,\t\t1,\t\t208.931319,\t\t41.786264,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[63,\t\t1,\t\t123.330369,\t\t24.666074,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[64,\t\t1,\t\t1308.785147,\t\t261.757029,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[65,\t\t1,\t\t4.360894,\t\t0.872179,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[66,\t\t1,\t\t138.366196,\t\t27.673239,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[67,\t\t1,\t\t296.818798,\t\t59.36376,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[68,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998332,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[69,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00075,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[70,\t\t1,\t\t561.513466,\t\t112.302693,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[71,\t\t1,\t\t130.488497,\t\t26.097699,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[72,\t\t1,\t\t213.722252,\t\t42.74445,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[73,\t\t1,\t\t68.420546,\t\t13.684109,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[74,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.003789,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[75,\t\t1,\t\t85.276082,\t\t17.055216,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[76,\t\t1,\t\t82.310129,\t\t16.462026,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[77,\t\t1,\t\t79.722985,\t\t15.944597,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[78,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.995035,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[79,\t\t1,\t\t82.320126,\t\t16.464025,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[80,\t\t1,\t\t87.436676,\t\t17.487335,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[81,\t\t1,\t\t98.704099,\t\t19.74082,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[82,\t\t1,\t\t3.28493,\t\t0.656986,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[83,\t\t1,\t\t219.786066,\t\t43.957213,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[84,\t\t1,\t\t21.636582,\t\t4.327316,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[85,\t\t1,\t\t75.031466,\t\t15.006293,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[86,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999969,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[87,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999273,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[88,\t\t1,\t\t60.560337,\t\t12.112067,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[89,\t\t1,\t\t75.134368,\t\t15.026874,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[90,\t\t1,\t\t86.776878,\t\t17.355376,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[91,\t\t1,\t\t30.141967,\t\t6.028393,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[92,\t\t1,\t\t32.89546,\t\t6.579092,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[93,\t\t1,\t\t32.263856,\t\t6.452771,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[94,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999174,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[95,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000263,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[96,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[97,\t\t1,\t\t4.53767,\t\t0.907534,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[98,\t\t1,\t\t83.429506,\t\t16.685901,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[99,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001151,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[100,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001527,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[101,\t\t1,\t\t59.076598,\t\t11.81532,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[102,\t\t1,\t\t114.34551,\t\t22.869102,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[103,\t\t1,\t\t133.692027,\t\t26.738405,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[104,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999922,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[105,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999928,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[106,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99986,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[107,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999995,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[108,\t\t1,\t\t94.303426,\t\t18.860685,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[109,\t\t1,\t\t38.181848,\t\t7.63637,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[110,\t\t1,\t\t49.561569,\t\t9.912314,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[111,\t\t1,\t\t87.340876,\t\t17.468175,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[112,\t\t1,\t\t44.205493,\t\t8.841099,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[113,\t\t1,\t\t69.683871,\t\t13.936774,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[114,\t\t1,\t\t102.627302,\t\t20.52546,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[115,\t\t1,\t\t66.157788,\t\t13.231558,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[116,\t\t1,\t\t110.70596,\t\t22.141192,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[117,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000816,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[118,\t\t1,\t\t171.412339,\t\t34.282468,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[119,\t\t1,\t\t33.22675,\t\t6.64535,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[120,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001279,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[121,\t\t1,\t\t45.121942,\t\t9.024388,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[122,\t\t1,\t\t39.503802,\t\t7.90076,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[123,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000268,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[124,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000006,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[125,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999914,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[126,\t\t1,\t\t207.119414,\t\t41.423883,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[127,\t\t1,\t\t160.125097,\t\t32.025019,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[128,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001323,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[129,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[130,\t\t1,\t\t220.78338,\t\t44.156676,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[131,\t\t1,\t\t48.748779,\t\t9.749756,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[132,\t\t1,\t\t126.934451,\t\t25.38689,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[133,\t\t1,\t\t42.518068,\t\t8.503614,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[134,\t\t1,\t\t42.343957,\t\t8.468791,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[135,\t\t1,\t\t42.400098,\t\t8.48002,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[136,\t\t1,\t\t41.074226,\t\t8.214845,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[137,\t\t1,\t\t32.8556,\t\t6.57112,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[138,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999263,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[139,\t\t1,\t\t64.360791,\t\t12.872158,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[140,\t\t1,\t\t44.508243,\t\t8.901649,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[141,\t\t1,\t\t52.734412,\t\t10.546882,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[142,\t\t1,\t\t58.026678,\t\t11.605336,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[143,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[144,\t\t1,\t\t52.856304,\t\t10.571261,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[145,\t\t1,\t\t153.760388,\t\t30.752078,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[146,\t\t1,\t\t198.226065,\t\t39.645213,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[147,\t\t1,\t\t121.500905,\t\t24.300181,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[148,\t\t1,\t\t171.460082,\t\t34.292016,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[149,\t\t1,\t\t110.539074,\t\t22.107815,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[150,\t\t1,\t\t144.320239,\t\t28.864048,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[151,\t\t1,\t\t34.008844,\t\t6.801769,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[152,\t\t1,\t\t70.598833,\t\t14.119767,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[153,\t\t1,\t\t125.9598,\t\t25.19196,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[154,\t\t1,\t\t129.385711,\t\t25.877142,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[155,\t\t1,\t\t134.766653,\t\t26.953331,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[156,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999992,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[157,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000087,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[158,\t\t1,\t\t35.506525,\t\t7.101305,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[159,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001066,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[160,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[161,\t\t1,\t\t110.227427,\t\t22.045485,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[162,\t\t1,\t\t164.757336,\t\t32.951467,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[163,\t\t1,\t\t32.949911,\t\t6.589982,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[164,\t\t1,\t\t33.082423,\t\t6.616485,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[165,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[166,\t\t1,\t\t38.678704,\t\t7.735741,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[167,\t\t1,\t\t54.411201,\t\t10.88224,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[168,\t\t1,\t\t37.13495,\t\t7.42699,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[169,\t\t1,\t\t127.123641,\t\t25.424728,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[170,\t\t1,\t\t95.522697,\t\t19.104539,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[171,\t\t1,\t\t81.528586,\t\t16.305717,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[172,\t\t1,\t\t40.012009,\t\t8.002402,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[173,\t\t1,\t\t38.223311,\t\t7.644662,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[174,\t\t1,\t\t57.359494,\t\t11.471899,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[175,\t\t1,\t\t38.198259,\t\t7.639652,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[176,\t\t1,\t\t133.106751,\t\t26.62135,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[177,\t\t1,\t\t21.704995,\t\t4.340999,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[178,\t\t1,\t\t114.954978,\t\t22.990996,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[179,\t\t1,\t\t42.356942,\t\t8.471388,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[180,\t\t1,\t\t37.232836,\t\t7.446567,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[181,\t\t1,\t\t28.102272,\t\t5.620454,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[182,\t\t1,\t\t1.273046,\t\t0.254609,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[183,\t\t1,\t\t381.062729,\t\t76.212546,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[184,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999954,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[185,\t\t1,\t\t81.488061,\t\t16.297612,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[186,\t\t1,\t\t43.880897,\t\t8.776179,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[187,\t\t1,\t\t25.665856,\t\t5.133171,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[188,\t\t1,\t\t38.198259,\t\t7.639652,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[189,\t\t1,\t\t140.163669,\t\t28.032734,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[190,\t\t1,\t\t185.392677,\t\t37.078535,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[191,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[192,\t\t1,\t\t44.648172,\t\t8.929634,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[193,\t\t1,\t\t38.136642,\t\t7.627328,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[194,\t\t1,\t\t26.326335,\t\t5.265267,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[195,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[196,\t\t1,\t\t36.934313,\t\t7.386863,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[197,\t\t1,\t\t58.517517,\t\t11.703503,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[198,\t\t1,\t\t34.627533,\t\t6.925507,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[199,\t\t1,\t\t44.581796,\t\t8.916359,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[200,\t\t1,\t\t38.199146,\t\t7.639829,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[201,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.997871,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[202,\t\t1,\t\t39.143281,\t\t7.828656,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[203,\t\t1,\t\t5.157478,\t\t1.031496,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[204,\t\t1,\t\t151.164654,\t\t30.232931,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[205,\t\t1,\t\t75.589132,\t\t15.117826,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[206,\t\t1,\t\t36.277501,\t\t7.2555,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[207,\t\t1,\t\t107.873663,\t\t21.574733,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[208,\t\t1,\t\t31.76454,\t\t6.352908,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[209,\t\t1,\t\t44.14161,\t\t8.828322,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[210,\t\t1,\t\t50.710449,\t\t10.14209,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[211,\t\t1,\t\t178.207882,\t\t35.641576,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[212,\t\t1,\t\t44.665292,\t\t8.933058,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[213,\t\t1,\t\t209.380904,\t\t41.876181,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[214,\t\t1,\t\t140.886808,\t\t28.177362,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[215,\t\t1,\t\t297.912187,\t\t59.582437,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[216,\t\t1,\t\t100.452037,\t\t20.090407,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[217,\t\t1,\t\t32.1884,\t\t6.43768,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[218,\t\t1,\t\t98.063081,\t\t19.612616,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[219,\t\t1,\t\t157.599323,\t\t31.519865,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[220,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999672,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[221,\t\t1,\t\t89.903024,\t\t17.980605,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[222,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[223,\t\t1,\t\t89.099462,\t\t17.819892,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[224,\t\t1,\t\t103.6104,\t\t20.72208,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[225,\t\t1,\t\t186.038417,\t\t37.207683,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[226,\t\t1,\t\t64.988967,\t\t12.997793,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[227,\t\t1,\t\t80.963073,\t\t16.192615,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[228,\t\t1,\t\t79.38182,\t\t15.876364,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[229,\t\t1,\t\t175.658429,\t\t35.131686,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[230,\t\t1,\t\t42.132923,\t\t8.426585,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[231,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000936,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[232,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999991,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[233,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999606,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[234,\t\t1,\t\t150.082157,\t\t30.016431,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[235,\t\t1,\t\t48.804717,\t\t9.760943,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[236,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999981,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[237,\t\t1,\t\t0.403914,\t\t0.080783,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[238,\t\t1,\t\t55.223425,\t\t11.044685,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[239,\t\t1,\t\t76.298087,\t\t15.259617,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[240,\t\t1,\t\t481.273697,\t\t96.254739,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[241,\t\t1,\t\t356.125818,\t\t71.225164,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[242,\t\t1,\t\t129.671855,\t\t25.934371,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[243,\t\t1,\t\t104.619329,\t\t20.923866,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[244,\t\t1,\t\t124.646159,\t\t24.929232,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[245,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001786,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[246,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999913,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[247,\t\t1,\t\t24.735326,\t\t4.947065,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[248,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[249,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999997,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[250,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999995,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[251,\t\t1,\t\t61.387468,\t\t12.277494,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[252,\t\t1,\t\t157.430773,\t\t31.486155,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[253,\t\t1,\t\t69.118117,\t\t13.823623,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[254,\t\t1,\t\t22.068268,\t\t4.413654,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[255,\t\t1,\t\t108.529902,\t\t21.70598,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[256,\t\t1,\t\t124.464912,\t\t24.892982,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[257,\t\t1,\t\t60.06952,\t\t12.013904,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[258,\t\t1,\t\t195.759311,\t\t39.151862,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[259,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999581,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[260,\t\t1,\t\t121.832905,\t\t24.366581,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[261,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.002014,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[262,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99968,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[263,\t\t1,\t\t174.769144,\t\t34.953829,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[264,\t\t1,\t\t226.248083,\t\t45.249617,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[265,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000009,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[266,\t\t1,\t\t109.036505,\t\t21.807301,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[267,\t\t1,\t\t137.907521,\t\t27.581504,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[268,\t\t1,\t\t47.956289,\t\t9.591258,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[269,\t\t1,\t\t38.510698,\t\t7.70214,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[270,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[271,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[272,\t\t1,\t\t0.78576,\t\t0.157152,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[273,\t\t1,\t\t107.453062,\t\t21.490612,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[274,\t\t1,\t\t208.874596,\t\t41.774919,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[275,\t\t1,\t\t39.102465,\t\t7.820493,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[276,\t\t1,\t\t152.431348,\t\t30.48627,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[277,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998577,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[278,\t\t1,\t\t118.997587,\t\t23.799517,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[279,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998164,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[280,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999529,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[281,\t\t1,\t\t157.181561,\t\t31.436312,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[282,\t\t1,\t\t222.279069,\t\t44.455814,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[283,\t\t1,\t\t89.099103,\t\t17.819821,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[284,\t\t1,\t\t135.167465,\t\t27.033493,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[285,\t\t1,\t\t60.279948,\t\t12.05599,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[286,\t\t1,\t\t126.337034,\t\t25.267407,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[287,\t\t1,\t\t77.649516,\t\t15.529903,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[288,\t\t1,\t\t49.943628,\t\t9.988726,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[289,\t\t1,\t\t78.546842,\t\t15.709368,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[290,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.004907,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[291,\t\t1,\t\t51.690749,\t\t10.33815,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[292,\t\t1,\t\t101.905943,\t\t20.381189,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[293,\t\t1,\t\t89.813561,\t\t17.962712,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[294,\t\t1,\t\t23.933957,\t\t4.786791,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[295,\t\t1,\t\t50.078174,\t\t10.015635,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[296,\t\t1,\t\t142.172054,\t\t28.434411,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[297,\t\t1,\t\t149.424424,\t\t29.884885,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[298,\t\t1,\t\t78.899066,\t\t15.779813,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[299,\t\t1,\t\t76.413221,\t\t15.282644,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[300,\t\t1,\t\t208.170304,\t\t41.634061,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[301,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999525,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[302,\t\t1,\t\t175.358016,\t\t35.071603,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[303,\t\t1,\t\t90.068963,\t\t18.013793,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[304,\t\t1,\t\t77.342281,\t\t15.468456,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[305,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99979,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[306,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999891,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[307,\t\t1,\t\t91.735133,\t\t18.347027,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[308,\t\t1,\t\t113.097197,\t\t22.619439,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[309,\t\t1,\t\t185.042919,\t\t37.008584,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[310,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000041,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[311,\t\t1,\t\t157.177116,\t\t31.435423,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[312,\t\t1,\t\t70.686923,\t\t14.137385,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[313,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001149,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[314,\t\t1,\t\t218.943091,\t\t43.788618,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[315,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001529,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[316,\t\t1,\t\t85.78475,\t\t17.15695,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[317,\t\t1,\t\t115.506023,\t\t23.101205,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[318,\t\t1,\t\t189.819037,\t\t37.963807,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[319,\t\t1,\t\t6.800077,\t\t1.360015,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[320,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999995,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[321,\t\t1,\t\t160.858437,\t\t32.171687,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[322,\t\t1,\t\t20.478315,\t\t4.095663,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[323,\t\t1,\t\t2.130594,\t\t0.426119,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[324,\t\t1,\t\t376.637527,\t\t75.327505,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[325,\t\t1,\t\t122.691298,\t\t24.53826,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[326,\t\t1,\t\t9.94743,\t\t1.989486,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[327,\t\t1,\t\t85.604424,\t\t17.120885,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[328,\t\t1,\t\t145.883095,\t\t29.176619,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[329,\t\t1,\t\t219.42118,\t\t43.884236,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[330,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001641,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[331,\t\t1,\t\t17.421295,\t\t3.484259,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[332,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.994883,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[333,\t\t1,\t\t183.050164,\t\t36.610033,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[334,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99946,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[335,\t\t1,\t\t186.816503,\t\t37.363301,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[336,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998019,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[337,\t\t1,\t\t74.310127,\t\t14.862025,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[338,\t\t1,\t\t201.688244,\t\t40.337649,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[339,\t\t1,\t\t124.74139,\t\t24.948278,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[340,\t\t1,\t\t105.466324,\t\t21.093265,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[341,\t\t1,\t\t95.343664,\t\t19.068733,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[342,\t\t1,\t\t165.389884,\t\t33.077977,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[343,\t\t1,\t\t90.735302,\t\t18.14706,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[344,\t\t1,\t\t227.495134,\t\t45.499027,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[345,\t\t1,\t\t248.756971,\t\t49.751394,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[346,\t\t1,\t\t246.952253,\t\t49.390451,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[347,\t\t1,\t\t86.363489,\t\t17.272698,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[348,\t\t1,\t\t225.759849,\t\t45.15197,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[349,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001361,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[350,\t\t1,\t\t118.436912,\t\t23.687382,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[351,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001141,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[352,\t\t1,\t\t783.968775,\t\t156.793755,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[353,\t\t1,\t\t2.356872,\t\t0.471374,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[354,\t\t1,\t\t16.012385,\t\t3.202477,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[355,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[356,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[357,\t\t1,\t\t0.040138,\t\t0.008028,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[358,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00082,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[359,\t\t1,\t\t2.343515,\t\t0.468703,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[360,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000685,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[361,\t\t1,\t\t59.980163,\t\t11.996033,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[362,\t\t1,\t\t170.974507,\t\t34.194901,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[363,\t\t1,\t\t251.729885,\t\t50.345977,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[364,\t\t1,\t\t59.3922,\t\t11.87844,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[365,\t\t1,\t\t53.307654,\t\t10.661531,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[366,\t\t1,\t\t105.6556,\t\t21.13112,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[367,\t\t1,\t\t51.069528,\t\t10.213906,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[368,\t\t1,\t\t25.147475,\t\t5.029495,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[369,\t\t1,\t\t20.664524,\t\t4.132905,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[370,\t\t1,\t\t60.836949,\t\t12.16739,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[371,\t\t1,\t\t306.104743,\t\t61.220949,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[372,\t\t1,\t\t177.514538,\t\t35.502908,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[373,\t\t1,\t\t119.786939,\t\t23.957388,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[374,\t\t1,\t\t61.424714,\t\t12.284943,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[375,\t\t1,\t\t201.49439,\t\t40.298878,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[376,\t\t1,\t\t221.001397,\t\t44.200279,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[377,\t\t1,\t\t158.145186,\t\t31.629037,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[378,\t\t1,\t\t157.840789,\t\t31.568158,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[379,\t\t1,\t\t54.400959,\t\t10.880192,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[380,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999989,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[381,\t\t1,\t\t181.920125,\t\t36.384025,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[382,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000287,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[383,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999356,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[384,\t\t1,\t\t64.195093,\t\t12.839019,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[385,\t\t1,\t\t81.026806,\t\t16.205361,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[386,\t\t1,\t\t65.10261,\t\t13.020522,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[387,\t\t1,\t\t132.584124,\t\t26.516825,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[388,\t\t1,\t\t711.974806,\t\t142.394961,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[389,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999953,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[390,\t\t1,\t\t58.786094,\t\t11.757219,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[391,\t\t1,\t\t66.962375,\t\t13.392475,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[392,\t\t1,\t\t128.500124,\t\t25.700025,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[393,\t\t1,\t\t160.472614,\t\t32.094523,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[394,\t\t1,\t\t57.717386,\t\t11.543477,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[395,\t\t1,\t\t79.99273,\t\t15.998546,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[396,\t\t1,\t\t56.658032,\t\t11.331606,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[397,\t\t1,\t\t454.335008,\t\t90.867002,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[398,\t\t1,\t\t196.782306,\t\t39.356461,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[399,\t\t1,\t\t83.843594,\t\t16.768719,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[400,\t\t1,\t\t44.670462,\t\t8.934092,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[401,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000557,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[402,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000356,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[403,\t\t1,\t\t22.179923,\t\t4.435985,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[404,\t\t1,\t\t78.141243,\t\t15.628249,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[405,\t\t1,\t\t589.107715,\t\t117.821543,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[406,\t\t1,\t\t44.635096,\t\t8.927019,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[407,\t\t1,\t\t88.356151,\t\t17.67123,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[408,\t\t1,\t\t255.47644,\t\t51.095288,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[409,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999926,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[410,\t\t1,\t\t33.07651,\t\t6.615302,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[411,\t\t1,\t\t31.275194,\t\t6.255039,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[412,\t\t1,\t\t2.19674,\t\t0.439348,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[413,\t\t1,\t\t109.665229,\t\t21.933046,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[414,\t\t1,\t\t9.311764,\t\t1.862353,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[415,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999523,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[416,\t\t1,\t\t132.609322,\t\t26.521864,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[417,\t\t1,\t\t5.18875,\t\t1.03775,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[418,\t\t1,\t\t108.130419,\t\t21.626084,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[419,\t\t1,\t\t57.79494,\t\t11.558988,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[420,\t\t1,\t\t58.18776,\t\t11.637552,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[421,\t\t1,\t\t83.817984,\t\t16.763597,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[422,\t\t1,\t\t61.407864,\t\t12.281573,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[423,\t\t1,\t\t128.970085,\t\t25.794017,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[424,\t\t1,\t\t9.298411,\t\t1.859682,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[425,\t\t1,\t\t76.363415,\t\t15.272683,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[426,\t\t1,\t\t6.326944,\t\t1.265389,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[427,\t\t1,\t\t53.17174,\t\t10.634348,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[428,\t\t1,\t\t23.840558,\t\t4.768112,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[429,\t\t1,\t\t269.035043,\t\t53.807009,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[430,\t\t1,\t\t143.305714,\t\t28.661143,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[431,\t\t1,\t\t95.830732,\t\t19.166146,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[432,\t\t1,\t\t112.020247,\t\t22.404049,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[433,\t\t1,\t\t57.261764,\t\t11.452353,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[434,\t\t1,\t\t29.801811,\t\t5.960362,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[435,\t\t1,\t\t119.188482,\t\t23.837696,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[436,\t\t1,\t\t63.632731,\t\t12.726546,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[437,\t\t1,\t\t14.491687,\t\t2.898337,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[438,\t\t1,\t\t38.891719,\t\t7.778344,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[439,\t\t1,\t\t72.411353,\t\t14.482271,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[440,\t\t1,\t\t61.194993,\t\t12.238999,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[441,\t\t1,\t\t46.914161,\t\t9.382832,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[442,\t\t1,\t\t62.083316,\t\t12.416663,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[443,\t\t1,\t\t134.602474,\t\t26.920495,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[444,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999997,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[445,\t\t1,\t\t61.161808,\t\t12.232362,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[446,\t\t1,\t\t28.360182,\t\t5.672036,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[447,\t\t1,\t\t53.918247,\t\t10.783649,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[448,\t\t1,\t\t39.624436,\t\t7.924887,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[449,\t\t1,\t\t199.799824,\t\t39.959965,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[450,\t\t1,\t\t122.267959,\t\t24.453592,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[451,\t\t1,\t\t52.245702,\t\t10.44914,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[452,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[453,\t\t1,\t\t35.014757,\t\t7.002951,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[454,\t\t1,\t\t24.428604,\t\t4.885721,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[455,\t\t1,\t\t39.828783,\t\t7.965757,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[456,\t\t1,\t\t39.828783,\t\t7.965757,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[457,\t\t1,\t\t122.144889,\t\t24.428978,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[458,\t\t1,\t\t116.175191,\t\t23.235038,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[459,\t\t1,\t\t141.38953,\t\t28.277906,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[460,\t\t1,\t\t185.814973,\t\t37.162995,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[461,\t\t1,\t\t193.287865,\t\t38.657573,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[462,\t\t1,\t\t59.12776,\t\t11.825552,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[463,\t\t1,\t\t30.297434,\t\t6.059487,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[464,\t\t1,\t\t30.334057,\t\t6.066811,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[465,\t\t1,\t\t48.997793,\t\t9.799559,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[466,\t\t1,\t\t39.780009,\t\t7.956002,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[467,\t\t1,\t\t36.710361,\t\t7.342072,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[468,\t\t1,\t\t60.190482,\t\t12.038096,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[469,\t\t1,\t\t37.298836,\t\t7.459767,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[470,\t\t1,\t\t94.98582,\t\t18.997164,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[471,\t\t1,\t\t93.522105,\t\t18.704421,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[472,\t\t1,\t\t32.711213,\t\t6.542243,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[473,\t\t1,\t\t60.065587,\t\t12.013117,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[474,\t\t1,\t\t31.023248,\t\t6.20465,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[475,\t\t1,\t\t30.444615,\t\t6.088923,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[476,\t\t1,\t\t34.407424,\t\t6.881485,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[477,\t\t1,\t\t55.52614,\t\t11.105228,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[478,\t\t1,\t\t69.750952,\t\t13.95019,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[479,\t\t1,\t\t126.404216,\t\t25.280843,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[480,\t\t1,\t\t55.405258,\t\t11.081052,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[481,\t\t1,\t\t48.116491,\t\t9.623298,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[482,\t\t1,\t\t54.634205,\t\t10.926841,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[483,\t\t1,\t\t46.462388,\t\t9.292478,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[484,\t\t1,\t\t36.424252,\t\t7.28485,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[485,\t\t1,\t\t54.408192,\t\t10.881638,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[486,\t\t1,\t\t500.528791,\t\t100.105758,\t\t0,\t\t0,\t\t0,\t\t0.999644,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[487,\t\t1,\t\t126.831682,\t\t25.366336,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[488,\t\t1,\t\t365.459497,\t\t73.091899,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[489,\t\t1,\t\t96.1879,\t\t19.23758,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[490,\t\t1,\t\t29.930087,\t\t5.986017,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[491,\t\t1,\t\t41.154254,\t\t8.230851,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[492,\t\t1,\t\t64.176373,\t\t12.835275,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[493,\t\t1,\t\t82.715663,\t\t16.543133,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[494,\t\t1,\t\t113.049619,\t\t22.609924,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[495,\t\t1,\t\t88.990255,\t\t17.798051,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[496,\t\t1,\t\t6.303328,\t\t1.260666,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[497,\t\t1,\t\t788.229231,\t\t157.645846,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[498,\t\t1,\t\t36.96724,\t\t7.393448,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[499,\t\t1,\t\t51.600211,\t\t10.320042,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[500,\t\t1,\t\t28.250508,\t\t5.650102,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[501,\t\t1,\t\t47.794989,\t\t9.558998,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[502,\t\t1,\t\t188.636924,\t\t37.727385,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[503,\t\t1,\t\t57.772131,\t\t11.554426,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[504,\t\t1,\t\t37.831905,\t\t7.566381,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[505,\t\t1,\t\t268.333226,\t\t53.666645,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[506,\t\t1,\t\t84.226497,\t\t16.845299,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[507,\t\t1,\t\t80.117224,\t\t16.023445,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[508,\t\t1,\t\t116.472908,\t\t23.294582,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[509,\t\t1,\t\t153.488191,\t\t30.697638,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[510,\t\t1,\t\t96.96766,\t\t19.393532,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[511,\t\t1,\t\t84.585425,\t\t16.917085,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[512,\t\t1,\t\t55.873895,\t\t11.174779,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[513,\t\t1,\t\t30.780554,\t\t6.156111,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[514,\t\t1,\t\t76.60982,\t\t15.321964,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[515,\t\t1,\t\t68.340511,\t\t13.668102,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[516,\t\t1,\t\t76.45695,\t\t15.29139,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[517,\t\t1,\t\t35.91366,\t\t7.182732,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[518,\t\t1,\t\t202.268006,\t\t40.453601,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[519,\t\t1,\t\t19.906875,\t\t3.981375,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[520,\t\t1,\t\t80.37176,\t\t16.074352,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[521,\t\t1,\t\t72.602992,\t\t14.520598,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[522,\t\t1,\t\t62.16327,\t\t12.432654,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[523,\t\t1,\t\t33.461781,\t\t6.692356,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[524,\t\t1,\t\t97.122526,\t\t19.424505,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[525,\t\t1,\t\t115.705825,\t\t23.141165,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[526,\t\t1,\t\t35.07983,\t\t7.015966,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[527,\t\t1,\t\t38.515188,\t\t7.703038,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[528,\t\t1,\t\t84.063,\t\t16.8126,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[529,\t\t1,\t\t107.756318,\t\t21.551264,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[530,\t\t1,\t\t45.662726,\t\t9.132545,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[531,\t\t1,\t\t46.426928,\t\t9.285386,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[532,\t\t1,\t\t44.561758,\t\t8.912352,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[533,\t\t1,\t\t39.932712,\t\t7.986542,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[534,\t\t1,\t\t110.156768,\t\t22.031354,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[535,\t\t1,\t\t137.909203,\t\t27.581841,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[536,\t\t1,\t\t108.702172,\t\t21.740434,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[537,\t\t1,\t\t36.160733,\t\t7.232147,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[538,\t\t1,\t\t27.031297,\t\t5.406259,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[539,\t\t1,\t\t28.681868,\t\t5.736374,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[540,\t\t1,\t\t25.826762,\t\t5.165352,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[541,\t\t1,\t\t66.712756,\t\t13.342551,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[542,\t\t1,\t\t91.642706,\t\t18.328541,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[543,\t\t1,\t\t50.054795,\t\t10.010959,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[544,\t\t1,\t\t93.227759,\t\t18.645552,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[545,\t\t1,\t\t200.734654,\t\t40.146931,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[546,\t\t1,\t\t100.61124,\t\t20.122248,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[547,\t\t1,\t\t130.046639,\t\t26.009328,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[548,\t\t1,\t\t42.096635,\t\t8.419327,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[549,\t\t1,\t\t35.996222,\t\t7.199244,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[550,\t\t1,\t\t29.703005,\t\t5.940601,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[551,\t\t1,\t\t28.63298,\t\t5.726596,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[552,\t\t1,\t\t142.188155,\t\t28.437631,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[553,\t\t1,\t\t0.983722,\t\t0.196744,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[554,\t\t1,\t\t144.051445,\t\t28.810289,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[555,\t\t1,\t\t54.885195,\t\t10.977039,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[556,\t\t1,\t\t84.909223,\t\t16.981845,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[557,\t\t1,\t\t180.401553,\t\t36.080311,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[558,\t\t1,\t\t106.375344,\t\t21.275069,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[559,\t\t1,\t\t56.93106,\t\t11.386212,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[560,\t\t1,\t\t88.939784,\t\t17.787957,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[561,\t\t1,\t\t48.771981,\t\t9.754396,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[562,\t\t1,\t\t133.241398,\t\t26.64828,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[563,\t\t1,\t\t93.679562,\t\t18.735912,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[564,\t\t1,\t\t184.970556,\t\t36.994111,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[565,\t\t1,\t\t139.56945,\t\t27.91389,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[566,\t\t1,\t\t0.224178,\t\t0.044836,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[567,\t\t1,\t\t226.8764,\t\t45.37528,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[568,\t\t1,\t\t209.805777,\t\t41.961155,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[569,\t\t1,\t\t147.620818,\t\t29.524164,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[570,\t\t1,\t\t230.46268,\t\t46.092536,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[571,\t\t1,\t\t169.684163,\t\t33.936833,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[572,\t\t1,\t\t299.294532,\t\t59.858906,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[573,\t\t1,\t\t87.120714,\t\t17.424143,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[574,\t\t1,\t\t165.99823,\t\t33.199646,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[575,\t\t1,\t\t3.119404,\t\t0.623881,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[576,\t\t1,\t\t201.852734,\t\t40.370547,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[577,\t\t1,\t\t222.521596,\t\t44.504319,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[578,\t\t1,\t\t212.456169,\t\t42.491234,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[579,\t\t1,\t\t77.509809,\t\t15.501962,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[580,\t\t1,\t\t16.136389,\t\t3.227278,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[581,\t\t1,\t\t0.092721,\t\t0.018544,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[582,\t\t1,\t\t58.381537,\t\t11.676307,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[583,\t\t1,\t\t66.961478,\t\t13.392296,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[584,\t\t1,\t\t38.419289,\t\t7.683858,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[585,\t\t1,\t\t66.700613,\t\t13.340123,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t]\n\t])\n\tppc[\"gen\"] = array([\n\t\t[586,\t\t0.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t272.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[589,\t\t63.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t63.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[590,\t\t38.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[593,\t\t11.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[595,\t\t1466.614612,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4730.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[598,\t\t12.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[599,\t\t9.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[602,\t\t24.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[603,\t\t1363.789945,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3455.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[607,\t\t1800.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1800.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[608,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[609,\t\t36.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[612,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[614,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[616,\t\t29.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[617,\t\t137.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t137.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[618,\t\t33.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[619,\t\t118.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t118.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[624,\t\t27.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[629,\t\t75.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t75.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[632,\t\t45.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[637,\t\t53.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t53.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[638,\t\t128.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t128.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[640,\t\t12.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[641,\t\t12.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[642,\t\t28.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[643,\t\t857.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t857.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[647,\t\t14.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[652,\t\t46.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t46.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[655,\t\t61.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[663,\t\t15.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[666,\t\t28.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[670,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[672,\t\t33.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[676,\t\t370.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t370.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[681,\t\t40.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t40.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[683,\t\t27.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[687,\t\t1329.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1329.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[694,\t\t16.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[695,\t\t14.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[697,\t\t11.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[698,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[702,\t\t73.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[705,\t\t17.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[707,\t\t34.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[714,\t\t15.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[716,\t\t0.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[717,\t\t11.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[722,\t\t20.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[724,\t\t12.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[730,\t\t633.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t633.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[732,\t\t14.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[735,\t\t84.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[741,\t\t214.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t214.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[742,\t\t9.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[743,\t\t1227.688539,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1410.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[747,\t\t12.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[749,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[750,\t\t90.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[753,\t\t311.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t311.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[761,\t\t15.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[762,\t\t1076.088882,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1105.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[765,\t\t59.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t59.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[767,\t\t11.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[772,\t\t18.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[774,\t\t33.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[777,\t\t79.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[778,\t\t14.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[781,\t\t945.392426,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1310.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[784,\t\t1059.960906,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1275.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[785,\t\t3.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[788,\t\t700.494671,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t875.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[789,\t\t77.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t77.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[791,\t\t10.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[792,\t\t62.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t62.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[795,\t\t13.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[800,\t\t36.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[801,\t\t50.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[802,\t\t500.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t500.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[805,\t\t693.813273,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1410.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[806,\t\t35.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[808,\t\t217.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t217.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[809,\t\t12.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[811,\t\t25.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[814,\t\t89.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[816,\t\t80.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t80.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[817,\t\t54.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t54.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[821,\t\t82.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[826,\t\t58.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t58.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[834,\t\t23.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[835,\t\t63.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t63.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[836,\t\t25.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[837,\t\t472.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t472.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[839,\t\t73.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[841,\t\t23.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[843,\t\t333.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t333.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[844,\t\t40.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t40.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[850,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[851,\t\t79.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[853,\t\t11.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[856,\t\t36.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[857,\t\t1402.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1402.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[858,\t\t56.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[860,\t\t25.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[865,\t\t11.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[867,\t\t264.697826,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t769.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[869,\t\t1360.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1360.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[870,\t\t58.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t58.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[872,\t\t22.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[874,\t\t20.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[875,\t\t24.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[882,\t\t17.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[883,\t\t18.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[885,\t\t34.740146,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t490.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[886,\t\t2572.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2572.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[889,\t\t9.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[890,\t\t48.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t48.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[893,\t\t60.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[894,\t\t158.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t158.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[895,\t\t19.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[896,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[898,\t\t84.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[902,\t\t19.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[903,\t\t20.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[905,\t\t137.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t137.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[906,\t\t66.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[907,\t\t67.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[909,\t\t36.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[917,\t\t17.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[918,\t\t38.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[920,\t\t12.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[921,\t\t124.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t124.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[922,\t\t164.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t164.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[923,\t\t146.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t146.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[925,\t\t26.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[931,\t\t217.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t217.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[936,\t\t104.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t104.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[937,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[939,\t\t0.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[940,\t\t29.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[944,\t\t25.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[950,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[952,\t\t31.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t31.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[958,\t\t66.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[959,\t\t45.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[960,\t\t26.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[963,\t\t687.931579,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t875.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[965,\t\t352.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t352.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[967,\t\t37.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t37.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[969,\t\t56.9,\t\t0,\t\t9999,\t\t-9999,\t\t0.999644,\t\t100,\t\t1,\t\t56.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[971,\t\t20.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[978,\t\t4.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[982,\t\t9.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[983,\t\t44.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t44.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[984,\t\t465.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t465.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[985,\t\t22.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[986,\t\t11.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[987,\t\t164.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t164.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[988,\t\t5.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[993,\t\t392.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t392.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[994,\t\t33.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[995,\t\t4.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[997,\t\t18.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[999,\t\t15.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1002,\t\t9.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1007,\t\t23.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1010,\t\t750.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t750.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1011,\t\t18.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1012,\t\t810.029779,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2835.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1014,\t\t599.602726,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t750.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1027,\t\t10.460207,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t48.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1028,\t\t292.918282,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t400.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1029,\t\t27.465302,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1030,\t\t533.877229,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1018.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1031,\t\t1002.917112,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1447.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1032,\t\t79.932691,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t153.510391,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1033,\t\t20.55676,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.164506,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1034,\t\t36.699953,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.262779,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1035,\t\t35.271451,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.886469,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1036,\t\t46.753001,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.223077,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1037,\t\t40.25786,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t94.684044,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1038,\t\t37.755525,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t85.798525,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1039,\t\t101.893155,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t132.724114,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1040,\t\t0.018424,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.064179,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1041,\t\t153.223357,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t204.187624,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1042,\t\t40.87186,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.70053,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1043,\t\t1.823835,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t6.035538,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1044,\t\t11.076386,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.163532,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1045,\t\t12.693234,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.836204,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1046,\t\t18.636555,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t106.787063,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1047,\t\t2.990521,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.029581,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1048,\t\t13.95159,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t71.656883,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1049,\t\t198.425639,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t293.755375,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1050,\t\t39.486108,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.781606,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1051,\t\t285.38149,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t304.42978,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1052,\t\t5.143615,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.66869,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1053,\t\t4.192271,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.368087,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1054,\t\t65.843261,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t273.855776,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1055,\t\t2.569306,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.856069,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1056,\t\t432.936564,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t603.943953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1057,\t\t130.808026,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t426.979979,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1058,\t\t549.489833,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1055.735174,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1059,\t\t360.823263,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t414.871332,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1060,\t\t9.16295,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.351632,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1061,\t\t154.755519,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t161.862597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1062,\t\t2.358253,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.878561,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1063,\t\t6.654734,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.670916,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1064,\t\t154.89402,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t209.786524,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1065,\t\t250.621857,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t339.421643,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1066,\t\t68.904322,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t134.399019,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1067,\t\t6.260048,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t32.653526,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1068,\t\t2.977816,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.009022,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1069,\t\t1.620267,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.190759,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1070,\t\t0.473903,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.788599,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1071,\t\t2.394921,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.328696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1072,\t\t36.154158,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t112.606433,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1073,\t\t20.275153,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t77.81765,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1074,\t\t48.536291,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t153.592986,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1075,\t\t8.668695,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.783448,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1076,\t\t0.719805,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.29551,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1077,\t\t18.059078,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.120041,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1078,\t\t14.921952,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.413246,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1079,\t\t22.955211,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t72.327992,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1080,\t\t44.741318,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t132.149983,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1081,\t\t388.316194,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t405.642115,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1082,\t\t485.516098,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t510.054159,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1083,\t\t613.766095,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t633.681488,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1084,\t\t522.770891,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t602.719371,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1085,\t\t37.272877,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t113.714399,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1086,\t\t69.300753,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t225.59917,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1087,\t\t107.585832,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t116.66597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1088,\t\t35.327353,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.782492,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1089,\t\t297.558685,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t384.449592,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1090,\t\t23.576709,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.140897,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1091,\t\t7.850455,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.7939,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1092,\t\t5.88887,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t54.002032,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1093,\t\t10.655098,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.605298,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1096,\t\t7.860251,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.50612,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1097,\t\t0.394111,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.601122,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1098,\t\t9.296361,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t71.025499,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1099,\t\t54.610258,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t290.937198,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1100,\t\t0.003509,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.026696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1101,\t\t24.535269,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.930665,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1102,\t\t117.607859,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t350.979988,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1103,\t\t93.242905,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t245.381701,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1105,\t\t0.002734,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.178593,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1106,\t\t0.001842,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.289793,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1107,\t\t7.627584,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t76.221615,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1108,\t\t84.395325,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t320.422751,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1109,\t\t0.005786,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.77821,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1110,\t\t0.001346,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.654557,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1111,\t\t11.638705,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.637993,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1113,\t\t0.000435,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.536361,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1114,\t\t2.594751,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.446889,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1115,\t\t0.024181,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.575278,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1116,\t\t0.003557,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t32.601142,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1117,\t\t1.0211,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.792541,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1118,\t\t0.126568,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.725012,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1119,\t\t0.06487,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t43.254023,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1120,\t\t0.003805,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.416001,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1121,\t\t0.000463,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.540589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1122,\t\t0.001107,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.462883,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1123,\t\t0.000619,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.464336,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1124,\t\t0.001002,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.288283,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1125,\t\t0.961999,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.818899,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1126,\t\t1.24405,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.154893,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1127,\t\t0.204465,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t105.296621,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1128,\t\t0.003399,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.06139,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1129,\t\t0.004899,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.738747,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1130,\t\t0.00018,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.025754,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1131,\t\t0.002931,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.897078,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1133,\t\t0.000617,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.719597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1134,\t\t0.000436,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.508453,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1135,\t\t0.027822,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.117819,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1136,\t\t0.000284,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.4027,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1137,\t\t0.098149,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.669012,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1138,\t\t0.002053,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.254278,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1139,\t\t0.000241,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.822769,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1140,\t\t4.49627,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.389457,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1142,\t\t0.00236,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.215733,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1143,\t\t0.502306,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.239356,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1144,\t\t0.030776,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.527382,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1145,\t\t40.324835,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t175.889627,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1146,\t\t0.000738,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.861317,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1147,\t\t0.011916,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.703707,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1148,\t\t0.651323,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.645529,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1149,\t\t0.135893,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.556784,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1150,\t\t0.021433,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.62256,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1151,\t\t0.019427,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.036113,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1152,\t\t0.00013,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.116518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1155,\t\t0.000865,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.609451,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1157,\t\t0.006459,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.354147,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1160,\t\t61.129181,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t238.377761,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1161,\t\t2.896951,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.263391,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1162,\t\t273.439171,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t502.409178,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1163,\t\t206.24686,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t330.03194,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1164,\t\t143.533861,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t285.625412,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1165,\t\t29.685091,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.188579,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1166,\t\t32.175395,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.277163,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1168,\t\t0.000743,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.345774,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1169,\t\t0.003458,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.721845,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1171,\t\t1.967453,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.029885,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1172,\t\t0.631482,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.584043,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1173,\t\t82.749143,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t254.253327,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1175,\t\t0.000868,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.855454,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1176,\t\t0.000324,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.23222,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1177,\t\t0.126674,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.87401,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1178,\t\t0.165025,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.167999,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1179,\t\t0.011629,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.306293,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1181,\t\t13.535858,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t85.739557,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1182,\t\t8.79188,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.319579,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1183,\t\t0.981738,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.222575,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1184,\t\t0.008347,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.219005,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1186,\t\t3.535988,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.916368,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1187,\t\t0.27759,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.814574,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1188,\t\t56.68999,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t179.712741,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1189,\t\t8.957888,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.261805,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1190,\t\t210.457608,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t220.533673,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1191,\t\t70.653669,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.079413,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1192,\t\t8.195868,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.454569,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1193,\t\t0.865781,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.399953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1194,\t\t3.340189,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.986036,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1195,\t\t0.071729,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.202359,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1196,\t\t49.815385,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t160.697956,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1197,\t\t26.370587,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.592266,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1198,\t\t8.079646,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.819157,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1199,\t\t43.056892,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t201.421956,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1200,\t\t11.02043,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.012408,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1201,\t\t17.382661,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.166667,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1202,\t\t20.92899,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.89238,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1203,\t\t143.537583,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t182.623256,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1204,\t\t23.95278,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t47.541821,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1205,\t\t0.219444,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.548843,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1206,\t\t1.467907,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.806894,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1207,\t\t1.289842,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.575453,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1208,\t\t1.785392,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.242031,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1209,\t\t0.039688,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.268261,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1210,\t\t0.579627,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.02599,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1211,\t\t13.976304,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.005229,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1212,\t\t74.870478,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t91.171888,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1213,\t\t46.121501,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.342704,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1214,\t\t2.447531,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.505907,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1215,\t\t0.708893,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.252965,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1216,\t\t16.428571,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.754469,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1217,\t\t32.069234,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.871617,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1218,\t\t0.793403,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.980482,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1219,\t\t0.548688,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.33953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1220,\t\t2.817267,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.597849,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1221,\t\t292.553779,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t593.230436,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1222,\t\t166.288529,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t211.057769,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1223,\t\t3.615447,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.806101,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1224,\t\t54.949188,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t160.523778,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1225,\t\t21.684328,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.931481,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1226,\t\t1.849094,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.982858,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1227,\t\t9.902281,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.482807,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1228,\t\t0.730895,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.021367,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1229,\t\t9.347805,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t51.244222,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1230,\t\t0.238773,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.681276,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1231,\t\t4.366652,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.55478,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1232,\t\t11.333033,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t75.075088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1233,\t\t150.178408,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t575.36828,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1235,\t\t2.638187,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.03734,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1236,\t\t22.763423,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.225035,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1237,\t\t2.778775,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.605409,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1238,\t\t72.798024,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t188.691049,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1239,\t\t0.564342,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.267706,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1240,\t\t241.248703,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t339.51051,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1241,\t\t364.295435,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t385.361595,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1242,\t\t4.834243,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.074038,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1243,\t\t37.302558,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.079842,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1244,\t\t79.039372,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t323.472536,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1245,\t\t2.700683,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.080896,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1246,\t\t11.614519,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.127825,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1247,\t\t4.672328,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.833396,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1248,\t\t11.023432,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t91.958275,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1249,\t\t65.703041,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t76.135177,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1250,\t\t28.580821,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.830519,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1251,\t\t21.224131,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.404345,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1252,\t\t14.138152,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.887727,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1253,\t\t50.455721,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.502694,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1254,\t\t28.780508,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.278695,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1255,\t\t3.003121,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.818419,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1256,\t\t12.275602,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.091842,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1257,\t\t65.168323,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t88.95288,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1258,\t\t68.145193,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t235.487329,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1259,\t\t85.172922,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t109.288719,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1260,\t\t6.875991,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.168717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1261,\t\t173.495737,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t201.699555,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1262,\t\t0.309635,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.524108,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1263,\t\t0.24441,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.352421,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1264,\t\t64.013359,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.035361,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1265,\t\t4.784966,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t6.654727,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1266,\t\t103.17248,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.710849,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1267,\t\t38.430186,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.469006,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1268,\t\t2.034979,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.4295,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1269,\t\t2.322702,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.105829,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1270,\t\t25.03907,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.950511,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1271,\t\t23.845798,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t47.371792,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1272,\t\t0.422069,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.23166,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1273,\t\t0.244404,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.169201,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1274,\t\t50.377516,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t53.095629,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1275,\t\t87.392367,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.0753,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1276,\t\t24.185119,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.655641,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1277,\t\t52.100619,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t65.611252,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1278,\t\t146.059023,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t170.437781,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1279,\t\t0.000154,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.004344,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1280,\t\t0.06616,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.626494,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1281,\t\t0.401488,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.51246,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1282,\t\t0.613544,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.363037,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1283,\t\t402.284475,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1297.764428,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1284,\t\t16.498159,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.426322,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1285,\t\t0.402632,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.937048,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1286,\t\t9.779237,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.872201,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1287,\t\t90.378036,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t93.199628,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1288,\t\t144.534188,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t148.402692,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1289,\t\t165.62078,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t184.149235,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1290,\t\t3.310598,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.901974,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1291,\t\t91.035472,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t98.293351,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1292,\t\t31.980176,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t41.682074,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1293,\t\t2.251511,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.402107,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1294,\t\t4.500984,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.39743,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1295,\t\t5.035929,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.873666,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1296,\t\t6.542922,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.356489,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1297,\t\t69.476429,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t177.778742,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1298,\t\t0.892933,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.014603,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1299,\t\t0.650887,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.158207,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1300,\t\t10.924264,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.74405,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1301,\t\t29.938353,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.863304,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1302,\t\t3.756946,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.877299,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1303,\t\t3.548349,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.335516,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1304,\t\t6.98833,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.594319,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1305,\t\t0.004134,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.004567,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1306,\t\t0.013051,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.827014,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1307,\t\t0.000269,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.29894,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1308,\t\t3.092704,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.278321,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1309,\t\t1.952844,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.34909,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1310,\t\t0.96121,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.64589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1311,\t\t0.915033,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.854004,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1312,\t\t61.692105,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t262.264924,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1313,\t\t23.4633,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.836748,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1314,\t\t9.723847,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.003987,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1315,\t\t7.484353,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.879027,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1316,\t\t0.342208,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.757497,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1317,\t\t2.443039,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.958574,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1318,\t\t1.145435,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.956332,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1319,\t\t5.754202,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.708276,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1320,\t\t10.408423,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.75859,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1321,\t\t0.058081,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.161123,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1322,\t\t0.553533,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.929763,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1323,\t\t111.607065,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t199.111909,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1324,\t\t7.765494,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.063258,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1325,\t\t55.916254,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.497559,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1326,\t\t15.960037,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.928865,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1327,\t\t14.515435,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.796895,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1328,\t\t4.734532,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.063343,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1329,\t\t189.523369,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t218.675424,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1330,\t\t19.894995,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.131028,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1332,\t\t16.042068,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.293088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1333,\t\t36.231617,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.650254,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1334,\t\t0.134934,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.215341,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1335,\t\t2.182146,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.306939,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1336,\t\t25.507951,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.773035,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1337,\t\t17.701639,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t121.31241,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1338,\t\t0.295098,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.832524,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1339,\t\t2.095095,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.086482,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1340,\t\t9.678742,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t70.098327,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1341,\t\t32.05516,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t205.513321,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1342,\t\t0.019287,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.734589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1343,\t\t0.027263,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.102108,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1344,\t\t0.080101,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.226057,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1345,\t\t2.810638,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.971188,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1346,\t\t78.824561,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t214.719215,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1347,\t\t115.323366,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t414.115976,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1348,\t\t4.836222,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.707927,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1349,\t\t8.174869,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t42.352342,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1350,\t\t0.009739,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.094971,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1351,\t\t0.000376,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.015958,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1352,\t\t0.034671,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.83726,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1355,\t\t0.989078,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.688324,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1356,\t\t57.296262,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.486231,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1357,\t\t39.855184,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.459913,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1358,\t\t0.144939,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.247293,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1359,\t\t64.298242,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t70.633589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1363,\t\t0.004007,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.036158,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1364,\t\t0.008183,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.061068,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1365,\t\t6.2e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.000456,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1366,\t\t1.0371,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.229992,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1367,\t\t8.708215,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t43.863891,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1368,\t\t0.162393,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.298243,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1369,\t\t4.676329,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.968859,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1370,\t\t0.206453,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.343308,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1371,\t\t15.125702,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t81.767208,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1372,\t\t187.012409,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t192.966588,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1373,\t\t34.669274,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.200257,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1374,\t\t22.833303,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t108.220146,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1375,\t\t13.048539,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.223816,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1376,\t\t16.260883,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t176.213655,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1377,\t\t91.898696,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t234.376272,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1378,\t\t100.492585,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t246.029906,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1379,\t\t0.001688,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.805984,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1381,\t\t0.003024,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.01257,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1382,\t\t60.392773,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t138.839906,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1383,\t\t21.830255,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t109.821439,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1387,\t\t0.003612,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.493561,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1390,\t\t0.003859,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.732816,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1391,\t\t0.003146,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.521719,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1393,\t\t0.000486,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.376509,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1394,\t\t0.000481,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.077886,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1395,\t\t0.000515,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.073776,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1396,\t\t0.000342,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.026112,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1397,\t\t0.017188,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.084545,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1398,\t\t0.002611,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.779641,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1399,\t\t0.888247,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.868157,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1400,\t\t0.000449,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.297197,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1401,\t\t9.673835,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.339497,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1402,\t\t1.995463,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.328902,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1403,\t\t53.765488,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.651672,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1404,\t\t51.552063,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t134.800518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1405,\t\t3.911245,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.550802,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1406,\t\t1.823208,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.763987,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1407,\t\t0.020768,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.211614,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1408,\t\t27.750555,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t41.078698,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1409,\t\t6.125989,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.019786,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1410,\t\t16.580102,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t37.466518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1411,\t\t29.991893,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.395367,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1412,\t\t1.247754,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.987601,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1413,\t\t1.161805,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.679791,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1414,\t\t7.260981,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.992489,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1415,\t\t1.902862,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.454501,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1416,\t\t1.697076,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.958002,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1417,\t\t0.000225,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.001311,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1418,\t\t31.771568,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t88.264613,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1419,\t\t13.601182,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.260903,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1420,\t\t1.057952,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.399757,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1421,\t\t4.889225,\t\t0,\t\t9999,\t\t-9999,\t\t0.999644,\t\t100,\t\t1,\t\t6.972369,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1422,\t\t3.591055,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.730495,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1423,\t\t1.379632,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.931017,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1424,\t\t52.568259,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t219.092115,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1425,\t\t7.570898,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.366402,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1426,\t\t53.646053,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t68.762602,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1427,\t\t426.696884,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t480.698671,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1428,\t\t229.292533,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t334.885743,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1429,\t\t4.000522,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.279826,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1430,\t\t0.00361,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.034248,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1431,\t\t82.661441,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t227.662022,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1432,\t\t3.068396,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.058931,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1433,\t\t353.343587,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1289.241188,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1434,\t\t12.901546,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.440014,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1435,\t\t16.366899,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t86.713217,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1436,\t\t25.427054,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t98.434116,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1437,\t\t233.567574,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t238.321958,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1438,\t\t303.313525,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t392.815158,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1439,\t\t27.439294,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.103164,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1440,\t\t0.682349,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.833609,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1441,\t\t0.102576,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.171578,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1442,\t\t0.287662,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.715522,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1443,\t\t24.01603,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t103.005076,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1444,\t\t5.78705,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.981696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1445,\t\t8.939839,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.036799,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1446,\t\t665.560328,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t758.547933,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1447,\t\t71.232954,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.477411,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1448,\t\t0.635617,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.523578,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1449,\t\t4.007945,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t95.437673,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1450,\t\t11.695201,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t59.256809,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1451,\t\t11.056834,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t68.198838,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1452,\t\t2.209088,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.068921,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1453,\t\t62.829218,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.93775,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1454,\t\t103.053623,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.126607,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1455,\t\t0.000929,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.654438,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1456,\t\t0.807723,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.054822,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1459,\t\t0.001899,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.309059,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1460,\t\t8.582365,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t101.498473,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1461,\t\t0.00048,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.951737,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1463,\t\t0.000661,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.711207,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1464,\t\t103.699065,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t218.884211,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1466,\t\t0.008472,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.685017,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1467,\t\t0.01035,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.096155,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1468,\t\t0.015871,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.789171,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1469,\t\t9.519658,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t65.007467,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1470,\t\t18.665435,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t78.965265,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1471,\t\t37.296985,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t159.165074,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1472,\t\t0.506929,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.980182,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1473,\t\t5.4e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.362608,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1474,\t\t0.001949,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.398948,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1475,\t\t0.000397,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.39088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1476,\t\t101.327203,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t250.480113,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1477,\t\t2.856374,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.122974,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1479,\t\t3.294063,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.592606,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1480,\t\t10.202484,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.681964,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1481,\t\t0.018812,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.053146,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1482,\t\t4.22506,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.51083,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1483,\t\t0.032046,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.599649,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1484,\t\t0.00133,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.02991,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1485,\t\t0.025059,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.563547,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1486,\t\t0.128922,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.89934,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1487,\t\t0.399374,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.142917,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1488,\t\t0.557496,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.569856,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1489,\t\t0.000102,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.118938,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1490,\t\t153.87342,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t782.463701,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1491,\t\t79.356319,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.622838,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1492,\t\t222.647124,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t229.927503,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1493,\t\t81.369208,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.557175,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1494,\t\t322.728735,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t404.486733,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1495,\t\t25.969556,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.920717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1496,\t\t5.1e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.000282,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1497,\t\t71.545947,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.070006,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1498,\t\t92.120695,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t105.800802,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1499,\t\t0.748238,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.286676,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1500,\t\t0.028955,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.154817,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1501,\t\t1.053275,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.165333,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1502,\t\t0.10328,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.938928,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1503,\t\t29.240906,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.972187,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1504,\t\t122.968061,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t188.822836,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1505,\t\t7.645825,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.765913,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1506,\t\t21.720319,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.406717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1507,\t\t3.842405,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.438042,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1508,\t\t0.06199,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.065259,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1510,\t\t80.0538,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t107.008141,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1511,\t\t112.671979,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.22192,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1512,\t\t52.731338,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.130052,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1513,\t\t20.534213,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.051786,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1514,\t\t0.001102,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.027711,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1516,\t\t0.010731,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.02881,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1517,\t\t0.893235,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.286804,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1518,\t\t0.001327,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.670542,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1519,\t\t9.2e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.04654,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n\t])\n\tppc[\"branch\"] = array([\n\t\t[586,\t\t1,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[589,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[590,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[593,\t\t112,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[595,\t\t115,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[598,\t\t118,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[599,\t\t119,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[602,\t\t121,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[603,\t\t526,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[607,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[608,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[609,\t\t529,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[612,\t\t493,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[614,\t\t130,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[616,\t\t132,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[617,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[618,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[619,\t\t134,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[624,\t\t14,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[629,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[632,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[637,\t\t148,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[638,\t\t149,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[640,\t\t153,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[641,\t\t155,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[642,\t\t533,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[643,\t\t534,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[647,\t\t536,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[652,\t\t167,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[655,\t\t170,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[663,\t\t178,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[666,\t\t180,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[670,\t\t183,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[672,\t\t185,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[676,\t\t19,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[681,\t\t197,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[683,\t\t200,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[687,\t\t202,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[694,\t\t21,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[695,\t\t210,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[697,\t\t211,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[698,\t\t212,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[702,\t\t215,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[705,\t\t217,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[707,\t\t219,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[714,\t\t225,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[716,\t\t226,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[717,\t\t227,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[722,\t\t545,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[724,\t\t238,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[730,\t\t547,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[732,\t\t247,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[735,\t\t253,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[741,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[742,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[743,\t\t500,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[747,\t\t273,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[749,\t\t274,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[750,\t\t557,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[753,\t\t28,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[761,\t\t288,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[762,\t\t289,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[765,\t\t560,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[767,\t\t292,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[772,\t\t3,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[774,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[777,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[778,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[781,\t\t303,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[784,\t\t563,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[785,\t\t501,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[788,\t\t311,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[789,\t\t565,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[791,\t\t314,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[792,\t\t316,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[795,\t\t319,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[800,\t\t326,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[801,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[802,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[805,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[806,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[808,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[809,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[811,\t\t568,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[814,\t\t570,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[816,\t\t335,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[817,\t\t571,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[821,\t\t338,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[826,\t\t339,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[834,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[835,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[836,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[837,\t\t350,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[839,\t\t350,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[841,\t\t573,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[843,\t\t352,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[844,\t\t352,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[850,\t\t574,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[851,\t\t575,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[853,\t\t362,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[856,\t\t363,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[857,\t\t365,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[858,\t\t368,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[860,\t\t371,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[865,\t\t375,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[867,\t\t376,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[869,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[870,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[872,\t\t378,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[874,\t\t576,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[875,\t\t381,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[882,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[883,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[885,\t\t393,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[886,\t\t394,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[889,\t\t397,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[890,\t\t40,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[893,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[894,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[895,\t\t580,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[896,\t\t581,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[898,\t\t403,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[902,\t\t405,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[903,\t\t406,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[905,\t\t413,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[906,\t\t414,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[907,\t\t583,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[909,\t\t417,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[917,\t\t43,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[918,\t\t424,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[920,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[921,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[922,\t\t429,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[923,\t\t432,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[925,\t\t44,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[931,\t\t439,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[936,\t\t445,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[937,\t\t447,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[939,\t\t450,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[940,\t\t451,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[944,\t\t458,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[950,\t\t462,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[952,\t\t47,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[958,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[959,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[960,\t\t479,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[963,\t\t481,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[965,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[967,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[969,\t\t486,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[971,\t\t51,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[978,\t\t491,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[982,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[983,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[984,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[985,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[986,\t\t64,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[987,\t\t65,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[988,\t\t66,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[993,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[994,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[995,\t\t509,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[997,\t\t510,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[999,\t\t70,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1002,\t\t71,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1007,\t\t511,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1010,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1011,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1012,\t\t81,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1014,\t\t83,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1027,\t\t218,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1028,\t\t221,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1029,\t\t268,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1030,\t\t269,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1031,\t\t498,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1032,\t\t1,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1033,\t\t3,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1034,\t\t4,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1035,\t\t6,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1036,\t\t7,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1037,\t\t8,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1038,\t\t9,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1039,\t\t11,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1040,\t\t14,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1041,\t\t16,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1042,\t\t17,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1043,\t\t19,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1044,\t\t21,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1045,\t\t23,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1046,\t\t25,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1047,\t\t27,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1048,\t\t28,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1049,\t\t29,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1050,\t\t31,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1051,\t\t33,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1052,\t\t34,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1053,\t\t35,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1054,\t\t36,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1055,\t\t38,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1056,\t\t39,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1057,\t\t40,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1058,\t\t41,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1059,\t\t43,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1060,\t\t44,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1061,\t\t45,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1062,\t\t47,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1063,\t\t48,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1064,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1065,\t\t50,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1066,\t\t51,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1067,\t\t53,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1068,\t\t54,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1069,\t\t55,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1070,\t\t57,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1071,\t\t58,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1072,\t\t59,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1073,\t\t60,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1074,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1075,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1076,\t\t64,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1077,\t\t65,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1078,\t\t66,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1079,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1080,\t\t70,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1081,\t\t71,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1082,\t\t72,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1083,\t\t73,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1084,\t\t75,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1085,\t\t76,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1086,\t\t77,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1087,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1088,\t\t80,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1089,\t\t81,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1090,\t\t82,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1091,\t\t83,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1092,\t\t84,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1093,\t\t85,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1096,\t\t90,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1097,\t\t91,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1098,\t\t92,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1099,\t\t93,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1100,\t\t97,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1101,\t\t98,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1102,\t\t101,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1103,\t\t102,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1105,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1106,\t\t109,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1107,\t\t110,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1108,\t\t111,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1109,\t\t112,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1110,\t\t113,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1111,\t\t114,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1113,\t\t116,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1114,\t\t118,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1115,\t\t119,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1116,\t\t121,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1117,\t\t122,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1118,\t\t126,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1119,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1120,\t\t130,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1121,\t\t131,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1122,\t\t132,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1123,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1124,\t\t134,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1125,\t\t135,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1126,\t\t136,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1127,\t\t137,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1128,\t\t139,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1129,\t\t140,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1130,\t\t141,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1131,\t\t142,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1133,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1134,\t\t146,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1135,\t\t147,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1136,\t\t148,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1137,\t\t149,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1138,\t\t150,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1139,\t\t151,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1140,\t\t152,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1142,\t\t154,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1143,\t\t155,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1144,\t\t158,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1145,\t\t161,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1146,\t\t162,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1147,\t\t163,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1148,\t\t164,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1149,\t\t166,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1150,\t\t167,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1151,\t\t168,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1152,\t\t169,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1155,\t\t172,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1157,\t\t174,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1160,\t\t177,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1161,\t\t178,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1162,\t\t179,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1163,\t\t180,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1164,\t\t181,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1165,\t\t182,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1166,\t\t183,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1168,\t\t186,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1169,\t\t187,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1171,\t\t189,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1172,\t\t190,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1173,\t\t192,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1175,\t\t194,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1176,\t\t196,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1177,\t\t197,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1178,\t\t198,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1179,\t\t199,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1181,\t\t202,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1182,\t\t203,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1183,\t\t204,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1184,\t\t205,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1186,\t\t207,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1187,\t\t208,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1188,\t\t209,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1189,\t\t210,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1190,\t\t211,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1191,\t\t212,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1192,\t\t213,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1193,\t\t214,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1194,\t\t215,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1195,\t\t216,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1196,\t\t217,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1197,\t\t218,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1198,\t\t219,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1199,\t\t221,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1200,\t\t222,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1201,\t\t223,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1202,\t\t224,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1203,\t\t225,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1204,\t\t226,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1205,\t\t227,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1206,\t\t228,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1207,\t\t229,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1208,\t\t230,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1209,\t\t234,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1210,\t\t235,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1211,\t\t237,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1212,\t\t238,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1213,\t\t239,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1214,\t\t240,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1215,\t\t241,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1216,\t\t242,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1217,\t\t243,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1218,\t\t244,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1219,\t\t247,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1220,\t\t251,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1221,\t\t252,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1222,\t\t253,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1223,\t\t254,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1224,\t\t255,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1225,\t\t256,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1226,\t\t257,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1227,\t\t258,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1228,\t\t260,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1229,\t\t263,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1230,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1231,\t\t266,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1232,\t\t267,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1233,\t\t268,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1235,\t\t271,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1236,\t\t272,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1237,\t\t273,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1238,\t\t274,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1239,\t\t275,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1240,\t\t276,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1241,\t\t278,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1242,\t\t281,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1243,\t\t282,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1244,\t\t283,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1245,\t\t284,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1246,\t\t285,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1247,\t\t286,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1248,\t\t287,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1249,\t\t288,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1250,\t\t289,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1251,\t\t291,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1252,\t\t292,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1253,\t\t293,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1254,\t\t294,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1255,\t\t295,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1256,\t\t296,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1257,\t\t297,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1258,\t\t298,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1259,\t\t299,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1260,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1261,\t\t302,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1262,\t\t303,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1263,\t\t304,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1264,\t\t307,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1265,\t\t308,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1266,\t\t309,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1267,\t\t311,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1268,\t\t312,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1269,\t\t314,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1270,\t\t316,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1271,\t\t317,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1272,\t\t318,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1273,\t\t319,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1274,\t\t321,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1275,\t\t322,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1276,\t\t323,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1277,\t\t324,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1278,\t\t325,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1279,\t\t326,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1280,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1281,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1282,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1283,\t\t331,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1284,\t\t333,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1285,\t\t335,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1286,\t\t337,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1287,\t\t338,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1288,\t\t339,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1289,\t\t340,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1290,\t\t341,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1291,\t\t342,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1292,\t\t343,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1293,\t\t344,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1294,\t\t345,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1295,\t\t346,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1296,\t\t347,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1297,\t\t348,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1298,\t\t350,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1299,\t\t352,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1300,\t\t353,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1301,\t\t354,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1302,\t\t355,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1303,\t\t356,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1304,\t\t357,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1305,\t\t359,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1306,\t\t361,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1307,\t\t362,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1308,\t\t363,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1309,\t\t364,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1310,\t\t365,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1311,\t\t366,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1312,\t\t367,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1313,\t\t368,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1314,\t\t369,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1315,\t\t370,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1316,\t\t371,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1317,\t\t372,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1318,\t\t373,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1319,\t\t374,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1320,\t\t375,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1321,\t\t376,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1322,\t\t377,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1323,\t\t378,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1324,\t\t379,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1325,\t\t381,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1326,\t\t384,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1327,\t\t385,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1328,\t\t386,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1329,\t\t387,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1330,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1332,\t\t391,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1333,\t\t392,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1334,\t\t393,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1335,\t\t394,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1336,\t\t395,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1337,\t\t396,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1338,\t\t397,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1339,\t\t398,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1340,\t\t399,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1341,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1342,\t\t403,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1343,\t\t404,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1344,\t\t405,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1345,\t\t406,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1346,\t\t407,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1347,\t\t408,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1348,\t\t410,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1349,\t\t411,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1350,\t\t412,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1351,\t\t413,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1352,\t\t414,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1355,\t\t418,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1356,\t\t419,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1357,\t\t420,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1358,\t\t421,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1359,\t\t422,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1363,\t\t426,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1364,\t\t427,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1365,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1366,\t\t429,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1367,\t\t430,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1368,\t\t431,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1369,\t\t432,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1370,\t\t433,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1371,\t\t434,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1372,\t\t435,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1373,\t\t436,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1374,\t\t437,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1375,\t\t438,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1376,\t\t439,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1377,\t\t440,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1378,\t\t441,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1379,\t\t442,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1381,\t\t445,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1382,\t\t446,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1383,\t\t447,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1387,\t\t451,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1390,\t\t455,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1391,\t\t456,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1393,\t\t458,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1394,\t\t459,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1395,\t\t460,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1396,\t\t461,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1397,\t\t462,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1398,\t\t463,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1399,\t\t464,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1400,\t\t465,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1401,\t\t466,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1402,\t\t467,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1403,\t\t468,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1404,\t\t469,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1405,\t\t470,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1406,\t\t471,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1407,\t\t472,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1408,\t\t473,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1409,\t\t474,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1410,\t\t475,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1411,\t\t476,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1412,\t\t477,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1413,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1414,\t\t479,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1415,\t\t480,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1416,\t\t481,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1417,\t\t482,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1418,\t\t483,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1419,\t\t484,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1420,\t\t485,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1421,\t\t486,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1422,\t\t487,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1423,\t\t488,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1424,\t\t489,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1425,\t\t490,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1426,\t\t491,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1427,\t\t492,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1428,\t\t493,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1429,\t\t494,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1430,\t\t495,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1431,\t\t496,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1432,\t\t497,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1433,\t\t498,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1434,\t\t499,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1435,\t\t500,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1436,\t\t501,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1437,\t\t502,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1438,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1439,\t\t504,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1440,\t\t505,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1441,\t\t506,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1442,\t\t507,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1443,\t\t508,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1444,\t\t509,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1445,\t\t510,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1446,\t\t511,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1447,\t\t512,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1448,\t\t513,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1449,\t\t514,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1450,\t\t515,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1451,\t\t516,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1452,\t\t517,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1453,\t\t518,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1454,\t\t519,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1455,\t\t520,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1456,\t\t521,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1459,\t\t524,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1460,\t\t525,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1461,\t\t526,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1463,\t\t528,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1464,\t\t529,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1466,\t\t531,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1467,\t\t532,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1468,\t\t533,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1469,\t\t534,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1470,\t\t535,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1471,\t\t536,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1472,\t\t537,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1473,\t\t538,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1474,\t\t539,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1475,\t\t540,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1476,\t\t541,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1477,\t\t542,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1479,\t\t544,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1480,\t\t545,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1481,\t\t546,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1482,\t\t547,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1483,\t\t548,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1484,\t\t549,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1485,\t\t550,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1486,\t\t551,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1487,\t\t552,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1488,\t\t554,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1489,\t\t555,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1490,\t\t556,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1491,\t\t557,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1492,\t\t558,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1493,\t\t559,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1494,\t\t560,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1495,\t\t561,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1496,\t\t562,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1497,\t\t563,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1498,\t\t564,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1499,\t\t565,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1500,\t\t566,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1501,\t\t567,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1502,\t\t568,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1503,\t\t569,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1504,\t\t570,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1505,\t\t571,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1506,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1507,\t\t573,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1508,\t\t574,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1510,\t\t576,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1511,\t\t577,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1512,\t\t578,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1513,\t\t579,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1514,\t\t580,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1516,\t\t582,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1517,\t\t583,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1518,\t\t584,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1519,\t\t585,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1,\t\t490,\t\t0,\t\t0.01433884297520661,\t\t0.151691958358336,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t43.375\t\t],\n\t\t[3,\t\t4,\t\t0,\t\t0.006291637811634348,\t\t0.903417549506624,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t72.681\t\t],\n\t\t[491,\t\t6,\t\t0,\t\t0.011200661157024791,\t\t0.118492839955776,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.882\t\t],\n\t\t[7,\t\t5,\t\t0,\t\t0.005794840720221606,\t\t0.20802058859584005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.471\t\t],\n\t\t[8,\t\t9,\t\t0,\t\t0.0024379328254847646,\t\t0.350063268897336,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.163\t\t],\n\t\t[492,\t\t11,\t\t0,\t\t0.018224793388429753,\t\t0.0482004476327704,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.565\t\t],\n\t\t[11,\t\t493,\t\t0,\t\t0.030286942148760328,\t\t0.08010209706571599,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.809\t\t],\n\t\t[492,\t\t493,\t\t0,\t\t0.04521652892561983,\t\t0.11958747011094399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t68.39\t\t],\n\t\t[494,\t\t14,\t\t0,\t\t0.012990743801652892,\t\t0.137430291356512,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.297\t\t],\n\t\t[13,\t\t15,\t\t0,\t\t0.007681959833795014,\t\t0.27576354266704156,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.371\t\t],\n\t\t[16,\t\t5,\t\t0,\t\t0.006275623268698061,\t\t0.22527950450957998,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t36.248000000000005\t\t],\n\t\t[17,\t\t18,\t\t0,\t\t0.04623522622347646,\t\t0.9335989000302801,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t200.291\t\t],\n\t\t[17,\t\t12,\t\t0,\t\t0.0056020313942728535,\t\t0.113118303398186,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.268\t\t],\n\t\t[14,\t\t495,\t\t0,\t\t0.0017957024793388433,\t\t0.018996904156819597,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.432\t\t],\n\t\t[494,\t\t19,\t\t0,\t\t0.010246611570247935,\t\t0.10839986031771602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.996\t\t],\n\t\t[20,\t\t21,\t\t0,\t\t0.005415685595567867,\t\t0.19440984828307922,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t31.281\t\t],\n\t\t[20,\t\t22,\t\t0,\t\t0.0049706544321329645,\t\t0.713737278110032,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.42100000000001\t\t],\n\t\t[497,\t\t23,\t\t0,\t\t0.002190413223140496,\t\t0.005793146490362,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.313\t\t],\n\t\t[23,\t\t499,\t\t0,\t\t0.020799669421487598,\t\t0.22004164444829602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.919\t\t],\n\t\t[25,\t\t26,\t\t0,\t\t0.00141845567867036,\t\t0.050919084651523595,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.193\t\t],\n\t\t[25,\t\t22,\t\t0,\t\t0.0035578254847645433,\t\t0.0319293051869808,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.275\t\t],\n\t\t[23,\t\t27,\t\t0,\t\t0.027738181818181818,\t\t0.073361203699828,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.95399999999999\t\t],\n\t\t[28,\t\t23,\t\t0,\t\t0.012841652892561981,\t\t0.0339632611780132,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.423\t\t],\n\t\t[8,\t\t21,\t\t0,\t\t0.004948753462603878,\t\t0.17764812836304802,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t28.584\t\t],\n\t\t[9,\t\t29,\t\t0,\t\t0.002212863573407202,\t\t0.31774552934092004,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t25.563000000000002\t\t],\n\t\t[30,\t\t25,\t\t0,\t\t0.019958795013850415,\t\t0.17911796401827998,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t57.641000000000005\t\t],\n\t\t[31,\t\t32,\t\t0,\t\t0.0299776084949446,\t\t0.605319030583196,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t129.863\t\t],\n\t\t[32,\t\t33,\t\t0,\t\t0.016762234533725762,\t\t0.33846927983213604,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.61399999999999\t\t],\n\t\t[34,\t\t35,\t\t0,\t\t0.001931900826446281,\t\t0.020437759184893597,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t5.843999999999999\t\t],\n\t\t[35,\t\t36,\t\t0,\t\t0.0008730578512396695,\t\t0.0092361605077588,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.641\t\t],\n\t\t[490,\t\t6,\t\t0,\t\t0.049352066115702475,\t\t0.130525028606764,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.645\t\t],\n\t\t[37,\t\t10,\t\t0,\t\t0.02404639889196676,\t\t0.485553838251812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t104.169\t\t],\n\t\t[10,\t\t38,\t\t0,\t\t0.006848799630657894,\t\t0.13829351176534158,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.669\t\t],\n\t\t[37,\t\t38,\t\t0,\t\t0.01437834718372576,\t\t1.1613317560186958,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t124.574\t\t],\n\t\t[39,\t\t40,\t\t0,\t\t0.04521629732222991,\t\t0.913024308337812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t195.877\t\t],\n\t\t[39,\t\t41,\t\t0,\t\t0.017466989843005543,\t\t0.35269996139852006,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t75.667\t\t],\n\t\t[42,\t\t41,\t\t0,\t\t0.031145429362880884,\t\t0.6289001042979919,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t134.922\t\t],\n\t\t[18,\t\t42,\t\t0,\t\t0.03439750692520776,\t\t0.6945672650962679,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t149.01\t\t],\n\t\t[492,\t\t43,\t\t0,\t\t0.01819173553719008,\t\t0.192452068436848,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t55.03\t\t],\n\t\t[44,\t\t45,\t\t0,\t\t0.02562314049586777,\t\t0.067767398802972,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.755\t\t],\n\t\t[44,\t\t505,\t\t0,\t\t0.006061487603305785,\t\t0.0160312607980052,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.168\t\t],\n\t\t[46,\t\t12,\t\t0,\t\t0.0014741170360110802,\t\t0.2116687641962416,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.029\t\t],\n\t\t[47,\t\t48,\t\t0,\t\t0.005344182825484765,\t\t0.01199019212302604,\t\t428.0,\t\t428.0,\t\t428.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.7170000000000005\t\t],\n\t\t[49,\t\t50,\t\t0,\t\t0.0019151662049861494,\t\t0.0171874439892256,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.531000000000001\t\t],\n\t\t[31,\t\t33,\t\t0,\t\t0.013475992613088641,\t\t0.27211225959163604,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t58.378\t\t],\n\t\t[31,\t\t51,\t\t0,\t\t0.003518611495844875,\t\t0.5052381383693519,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.647\t\t],\n\t\t[52,\t\t53,\t\t0,\t\t0.010464421745152355,\t\t1.5025884408875438,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t120.885\t\t],\n\t\t[52,\t\t54,\t\t0,\t\t0.0076126500461911354,\t\t0.1537174637168,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.978\t\t],\n\t\t[506,\t\t55,\t\t0,\t\t0.012634380165289257,\t\t0.133660287181212,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.219\t\t],\n\t\t[506,\t\t507,\t\t0,\t\t0.044157355371900825,\t\t0.11678619613628,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t66.788\t\t],\n\t\t[57,\t\t506,\t\t0,\t\t0.004687272727272727,\t\t0.049587095736244,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.179\t\t],\n\t\t[57,\t\t58,\t\t0,\t\t0.014436363636363634,\t\t0.0381809096340232,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.835\t\t],\n\t\t[58,\t\t506,\t\t0,\t\t0.019797685950413223,\t\t0.052360391943288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.944000000000003\t\t],\n\t\t[59,\t\t60,\t\t0,\t\t0.019407548476454296,\t\t0.174170863885556,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.049\t\t],\n\t\t[508,\t\t62,\t\t0,\t\t0.051111404958677685,\t\t0.03379452026753001,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.653\t\t],\n\t\t[30,\t\t61,\t\t0,\t\t0.03143698060941828,\t\t0.28212765137935203,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t90.79\t\t],\n\t\t[63,\t\t506,\t\t0,\t\t0.027457190082644623,\t\t0.072618044249872,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.528999999999996\t\t],\n\t\t[13,\t\t64,\t\t0,\t\t0.0014816481994459833,\t\t0.2127501654814608,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.116\t\t],\n\t\t[65,\t\t66,\t\t0,\t\t0.03778185595567867,\t\t0.7629053006222161,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t163.671\t\t],\n\t\t[59,\t\t67,\t\t0,\t\t0.0051880193905817175,\t\t0.046559297286324804,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.982999999999999\t\t],\n\t\t[61,\t\t67,\t\t0,\t\t0.012931440443213295,\t\t0.1160517597580644,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.346\t\t],\n\t\t[68,\t\t69,\t\t0,\t\t0.011149584487534626,\t\t0.4002427745096039,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.4\t\t],\n\t\t[70,\t\t69,\t\t0,\t\t0.009625346260387812,\t\t0.345526355460808,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.596000000000004\t\t],\n\t\t[71,\t\t72,\t\t0,\t\t0.008878635734072021,\t\t0.318721276477736,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.283\t\t],\n\t\t[73,\t\t74,\t\t0,\t\t0.012529547553116345,\t\t0.253001288604392,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t54.278\t\t],\n\t\t[37,\t\t75,\t\t0,\t\t0.027459141274238225,\t\t0.5544652029066119,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t118.95299999999999\t\t],\n\t\t[72,\t\t75,\t\t0,\t\t0.006688711911357341,\t\t0.240108375006292,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.634\t\t],\n\t\t[37,\t\t72,\t\t0,\t\t0.036222068328739615,\t\t0.7314094881920841,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t156.914\t\t],\n\t\t[76,\t\t77,\t\t0,\t\t0.004683777700831025,\t\t0.6725445900750401,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t54.107\t\t],\n\t\t[77,\t\t51,\t\t0,\t\t0.00363183864265928,\t\t0.5214964473447999,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t41.955\t\t],\n\t\t[73,\t\t72,\t\t0,\t\t0.025475069252077563,\t\t0.514402082018968,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.35799999999999\t\t],\n\t\t[18,\t\t40,\t\t0,\t\t0.01302770083102493,\t\t0.26306018504072,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.43600000000001\t\t],\n\t\t[492,\t\t45,\t\t0,\t\t0.0308703030303719,\t\t0.18370114733484796,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t70.03699999999999\t\t],\n\t\t[10,\t\t74,\t\t0,\t\t0.030167359187465374,\t\t0.609150547206812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t130.685\t\t],\n\t\t[45,\t\t511,\t\t0,\t\t0.08203371900826446,\t\t0.05424014819960001,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.038000000000004\t\t],\n\t\t[78,\t\t32,\t\t0,\t\t0.013458795013850415,\t\t0.48313777647302397,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.738\t\t],\n\t\t[79,\t\t80,\t\t0,\t\t0.0038086911357340715,\t\t0.1367226831743568,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.999000000000002\t\t],\n\t\t[81,\t\t79,\t\t0,\t\t0.010767832409972299,\t\t0.3865388099484561,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t62.195\t\t],\n\t\t[34,\t\t82,\t\t0,\t\t0.0015497520661157025,\t\t0.00409874294399768,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.344\t\t],\n\t\t[83,\t\t84,\t\t0,\t\t0.00902611570247934,\t\t0.0238720301499152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.652000000000001\t\t],\n\t\t[83,\t\t499,\t\t0,\t\t0.04179570247933885,\t\t0.0276350398834796,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.608\t\t],\n\t\t[85,\t\t86,\t\t0,\t\t0.00802354570637119,\t\t0.28802563884886,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.343999999999994\t\t],\n\t\t[87,\t\t86,\t\t0,\t\t0.01904968836565097,\t\t0.683837154069184,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.031\t\t],\n\t\t[88,\t\t89,\t\t0,\t\t0.00380297520661157,\t\t0.010058007429140002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.752000000000001\t\t],\n\t\t[90,\t\t86,\t\t0,\t\t0.012097818559556786,\t\t0.434282055192244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t69.877\t\t],\n\t\t[91,\t\t86,\t\t0,\t\t9.26246537396122e-05,\t\t0.013299992817559201,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.07\t\t],\n\t\t[86,\t\t92,\t\t0,\t\t0.0001852493074792244,\t\t0.0066499964087796005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.07\t\t],\n\t\t[86,\t\t93,\t\t0,\t\t0.008152181440443215,\t\t0.292643346635492,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.086999999999996\t\t],\n\t\t[94,\t\t86,\t\t0,\t\t0.012883829639889197,\t\t0.46249792780547194,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.417\t\t],\n\t\t[86,\t\t95,\t\t0,\t\t0.010421052631578947,\t\t0.37409026526870803,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t60.192\t\t],\n\t\t[513,\t\t517,\t\t0,\t\t0.0008733884297520661,\t\t0.0023099144321748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.321\t\t],\n\t\t[97,\t\t66,\t\t0,\t\t0.03812777008310249,\t\t0.34217338998058805,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.113\t\t],\n\t\t[42,\t\t98,\t\t0,\t\t0.003091759002770083,\t\t0.44394630230884,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t35.716\t\t],\n\t\t[99,\t\t100,\t\t0,\t\t0.016371537396121884,\t\t0.587698093837988,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t94.56200000000001\t\t],\n\t\t[42,\t\t101,\t\t0,\t\t0.008165339335180054,\t\t0.29311568282888,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.163000000000004\t\t],\n\t\t[102,\t\t42,\t\t0,\t\t0.012403047091412742,\t\t0.44523901189173193,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t71.64\t\t],\n\t\t[103,\t\t87,\t\t0,\t\t0.007073060941828254,\t\t0.25390556381756,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.854\t\t],\n\t\t[104,\t\t103,\t\t0,\t\t0.0028852146814404432,\t\t0.1035721403291428,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.665\t\t],\n\t\t[105,\t\t87,\t\t0,\t\t0.006406682825484765,\t\t0.22998422159488002,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.005\t\t],\n\t\t[106,\t\t107,\t\t0,\t\t0.005714219759923823,\t\t0.11538365264216799,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.754\t\t],\n\t\t[108,\t\t107,\t\t0,\t\t0.0025427631578947367,\t\t0.09127896939786201,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.687000000000001\t\t],\n\t\t[109,\t\t106,\t\t0,\t\t0.003030470914127424,\t\t0.10878648330773438,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.504\t\t],\n\t\t[110,\t\t111,\t\t0,\t\t0.019821849030470913,\t\t0.7115558306889919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.491\t\t],\n\t\t[87,\t\t112,\t\t0,\t\t0.006135907202216068,\t\t0.220264039928212,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.441\t\t],\n\t\t[113,\t\t87,\t\t0,\t\t0.003981648199445983,\t\t0.14293141813921081,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.998\t\t],\n\t\t[87,\t\t85,\t\t0,\t\t0.011046225761772853,\t\t0.3965324494097,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.803000000000004\t\t],\n\t\t[110,\t\t114,\t\t0,\t\t0.011665339335180056,\t\t0.418757110306188,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.37899999999999\t\t],\n\t\t[115,\t\t116,\t\t0,\t\t0.007048925619834712,\t\t0.07457124214588401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.323\t\t],\n\t\t[117,\t\t118,\t\t0,\t\t0.005987534626038782,\t\t0.21493782785077598,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.584\t\t],\n\t\t[117,\t\t119,\t\t0,\t\t0.0038738746537396117,\t\t0.5562504472696961,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.751000000000005\t\t],\n\t\t[117,\t\t120,\t\t0,\t\t0.005886686288088643,\t\t0.8452704781039522,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t68.003\t\t],\n\t\t[121,\t\t122,\t\t0,\t\t0.0021170360110803325,\t\t0.0759964075574972,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.228\t\t],\n\t\t[123,\t\t124,\t\t0,\t\t0.0018386426592797783,\t\t0.0660027680945204,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.62\t\t],\n\t\t[125,\t\t126,\t\t0,\t\t0.004941135734072022,\t\t0.17737467056702802,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.54\t\t],\n\t\t[127,\t\t119,\t\t0,\t\t0.0029027008310249305,\t\t0.1041998502705648,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.766\t\t],\n\t\t[118,\t\t128,\t\t0,\t\t0.007397160664819945,\t\t0.265539950057812,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.726000000000006\t\t],\n\t\t[121,\t\t119,\t\t0,\t\t0.002552458448753463,\t\t0.0916270065931116,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.743\t\t],\n\t\t[530,\t\t527,\t\t0,\t\t0.022726611570247933,\t\t0.060106736329903994,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.374\t\t],\n\t\t[125,\t\t130,\t\t0,\t\t0.002931440443213297,\t\t0.105231531956442,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.932000000000002\t\t],\n\t\t[125,\t\t123,\t\t0,\t\t0.0019078081717451524,\t\t0.2739425623421336,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t22.039\t\t],\n\t\t[131,\t\t132,\t\t0,\t\t0.0035744459833795014,\t\t0.12831385593973843,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.646\t\t],\n\t\t[133,\t\t123,\t\t0,\t\t0.003864439058171745,\t\t0.13872389704704202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.320999999999998\t\t],\n\t\t[524,\t\t134,\t\t0,\t\t0.008092231404958678,\t\t0.08560847143881999,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.479\t\t],\n\t\t[135,\t\t136,\t\t0,\t\t0.005242901662049862,\t\t0.1882073282678,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.283\t\t],\n\t\t[123,\t\t131,\t\t0,\t\t0.003138331024930748,\t\t0.1126583971045252,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.127\t\t],\n\t\t[117,\t\t128,\t\t0,\t\t0.010800034626038782,\t\t0.38769479063117196,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.381\t\t],\n\t\t[137,\t\t521,\t\t0,\t\t0.013832396694214875,\t\t0.14633421587532003,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t41.843\t\t],\n\t\t[531,\t\t514,\t\t0,\t\t0.0059504132231404955,\t\t0.035409362037522,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.5\t\t],\n\t\t[139,\t\t521,\t\t0,\t\t0.021257520661157023,\t\t0.05622132386323199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.152\t\t],\n\t\t[140,\t\t514,\t\t0,\t\t0.018527603305785127,\t\t0.04900131122836401,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.023000000000003\t\t],\n\t\t[522,\t\t141,\t\t0,\t\t0.012168595041322314,\t\t0.032183175718526795,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.405\t\t],\n\t\t[142,\t\t523,\t\t0,\t\t0.007060165289256198,\t\t0.0746901476577608,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.357\t\t],\n\t\t[530,\t\t526,\t\t0,\t\t0.020281652892561983,\t\t0.053640374808152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.676\t\t],\n\t\t[140,\t\t532,\t\t0,\t\t0.004669090909090909,\t\t0.0123486871461184,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.062\t\t],\n\t\t[142,\t\t144,\t\t0,\t\t0.006678126721756199,\t\t0.0397397958689204,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.151\t\t],\n\t\t[140,\t\t522,\t\t0,\t\t0.020450247933884298,\t\t0.05408627047793199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.930999999999997\t\t],\n\t\t[145,\t\t146,\t\t0,\t\t0.028527603305785125,\t\t0.07544904460236,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.148\t\t],\n\t\t[147,\t\t523,\t\t0,\t\t0.02461289256198347,\t\t0.0650955220034416,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t37.227\t\t],\n\t\t[144,\t\t523,\t\t0,\t\t0.008479338842975206,\t\t0.0224259292904064,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.825\t\t],\n\t\t[139,\t\t523,\t\t0,\t\t0.029245619834710742,\t\t0.0193370088934308,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.116999999999997\t\t],\n\t\t[140,\t\t141,\t\t0,\t\t0.008362975206611572,\t\t0.022118173847506,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.649000000000001\t\t],\n\t\t[528,\t\t526,\t\t0,\t\t0.015389090909090908,\t\t0.0407006573227188,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.276\t\t],\n\t\t[528,\t\t148,\t\t0,\t\t0.014306115702479338,\t\t0.0378364333712244,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.638\t\t],\n\t\t[149,\t\t150,\t\t0,\t\t0.013604628099173552,\t\t0.035981157661543604,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.576999999999998\t\t],\n\t\t[145,\t\t528,\t\t0,\t\t0.00320595041322314,\t\t0.0084790121737992,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.849\t\t],\n\t\t[530,\t\t151,\t\t0,\t\t0.013144462809917355,\t\t0.0347641247737036,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.881\t\t],\n\t\t[524,\t\t152,\t\t0,\t\t0.014598347107438016,\t\t0.03860931919944,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.08\t\t],\n\t\t[149,\t\t525,\t\t0,\t\t0.016897190082644627,\t\t0.17875695122823998,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t51.114\t\t],\n\t\t[139,\t\t514,\t\t0,\t\t0.007824132231404959,\t\t0.020693056313687997,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.834000000000001\t\t],\n\t\t[126,\t\t120,\t\t0,\t\t0.012780297783933518,\t\t0.458781387757004,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.819\t\t],\n\t\t[530,\t\t153,\t\t0,\t\t0.02254545454545455,\t\t0.059627617060924,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.1\t\t],\n\t\t[528,\t\t147,\t\t0,\t\t0.15786710743801652,\t\t0.104380679149868,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t119.387\t\t],\n\t\t[528,\t\t154,\t\t0,\t\t0.006528264462809917,\t\t0.017265779790547203,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t9.874\t\t],\n\t\t[130,\t\t120,\t\t0,\t\t0.01450502077562327,\t\t0.5206947188067639,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t83.781\t\t],\n\t\t[528,\t\t155,\t\t0,\t\t0.16064132231404957,\t\t0.1062149715341,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t121.485\t\t],\n\t\t[524,\t\t533,\t\t0,\t\t0.004432727272727273,\t\t0.0468942356109744,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.409\t\t],\n\t\t[524,\t\t149,\t\t0,\t\t0.0056413223140495865,\t\t0.05968007537478799,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.065\t\t],\n\t\t[154,\t\t150,\t\t0,\t\t0.007539173553719007,\t\t0.0199394052006688,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t11.402999999999999\t\t],\n\t\t[157,\t\t110,\t\t0,\t\t0.009962084487534625,\t\t0.357614433044424,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t57.541000000000004\t\t],\n\t\t[119,\t\t158,\t\t0,\t\t0.0002490189289012004,\t\t0.08045252664623159,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t3,\t\t1,\t\t-360,\t\t4.315\t\t],\n\t\t[159,\t\t60,\t\t0,\t\t0.010967451523545706,\t\t0.0984261617997728,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.674\t\t],\n\t\t[536,\t\t161,\t\t0,\t\t0.021314380165289255,\t\t0.056371704363524,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.238\t\t],\n\t\t[115,\t\t151,\t\t0,\t\t0.00379404958677686,\t\t0.0401376047510724,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.477\t\t],\n\t\t[162,\t\t134,\t\t0,\t\t0.0015910743801652895,\t\t0.016832124393744,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t4.813\t\t],\n\t\t[115,\t\t526,\t\t0,\t\t0.0037884297520661154,\t\t0.010019537998747198,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.73\t\t],\n\t\t[138,\t\t87,\t\t0,\t\t0.0011838642659279777,\t\t0.16999131006813442,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t13.675999999999998\t\t],\n\t\t[123,\t\t163,\t\t0,\t\t0.0022778739612188364,\t\t0.08177009602828919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.157\t\t],\n\t\t[112,\t\t164,\t\t0,\t\t0.0008672957063711912,\t\t0.12453516639176802,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.019\t\t],\n\t\t[112,\t\t165,\t\t0,\t\t0.005989439058171744,\t\t0.21500619230086396,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.595\t\t],\n\t\t[166,\t\t165,\t\t0,\t\t0.002632790858725762,\t\t0.09451074335350361,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.207\t\t],\n\t\t[167,\t\t537,\t\t0,\t\t0.00832595041322314,\t\t0.08808100664460242,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t25.186\t\t],\n\t\t[168,\t\t104,\t\t0,\t\t0.002552458448753463,\t\t0.0916270065931116,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.743\t\t],\n\t\t[531,\t\t520,\t\t0,\t\t0.016156694214876033,\t\t0.042730794079516396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.436999999999998\t\t],\n\t\t[139,\t\t520,\t\t0,\t\t0.010682314049586776,\t\t0.0282522993797748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.157\t\t],\n\t\t[520,\t\t169,\t\t0,\t\t0.0011328925619834712,\t\t0.0119849761681232,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t3.427\t\t],\n\t\t[168,\t\t105,\t\t0,\t\t0.007340893351800554,\t\t0.26352009133553606,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.401\t\t],\n\t\t[520,\t\t170,\t\t0,\t\t0.005842644628099174,\t\t0.015452470732151198,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t8.837\t\t],\n\t\t[171,\t\t89,\t\t0,\t\t0.005505454545454546,\t\t0.058242717567848004,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.654\t\t],\n\t\t[521,\t\t172,\t\t0,\t\t0.006304793388429752,\t\t0.06669899780522001,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.072\t\t],\n\t\t[123,\t\t173,\t\t0,\t\t0.005247403047091413,\t\t0.18836891696656402,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.309\t\t],\n\t\t[521,\t\t174,\t\t0,\t\t0.013300495867768597,\t\t0.035176796844864404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.117\t\t],\n\t\t[37,\t\t39,\t\t0,\t\t0.004338873499549862,\t\t0.35044859579205606,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t37.592\t\t],\n\t\t[530,\t\t175,\t\t0,\t\t0.013128595041322313,\t\t0.0347221581224188,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.857\t\t],\n\t\t[530,\t\t176,\t\t0,\t\t0.005685289256198347,\t\t0.01503630144005,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.599\t\t],\n\t\t[88,\t\t530,\t\t0,\t\t0.006015867768595041,\t\t0.0159106066755372,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.099\t\t],\n\t\t[177,\t\t496,\t\t0,\t\t0.018632066115702478,\t\t0.19711036673178398,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.361999999999995\t\t],\n\t\t[178,\t\t525,\t\t0,\t\t0.03106842975206612,\t\t0.08216895464241199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.99100000000001\t\t],\n\t\t[179,\t\t493,\t\t0,\t\t0.057079669421487594,\t\t0.15096278779194802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.333\t\t],\n\t\t[180,\t\t181,\t\t0,\t\t0.041027438016528923,\t\t0.10850827416682,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.053999999999995\t\t],\n\t\t[182,\t\t180,\t\t0,\t\t0.00866314049586777,\t\t0.09164817200545601,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.206\t\t],\n\t\t[179,\t\t181,\t\t0,\t\t0.01957223140495868,\t\t0.051764115772731996,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.603\t\t],\n\t\t[180,\t\t493,\t\t0,\t\t0.06676561983471074,\t\t0.17657993119175203,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t100.98299999999999\t\t],\n\t\t[183,\t\t30,\t\t0,\t\t0.0024804362880886427,\t\t0.356166349712776,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t28.654\t\t],\n\t\t[183,\t\t21,\t\t0,\t\t0.0025647506925207757,\t\t0.36827307214930394,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t29.628\t\t],\n\t\t[538,\t\t185,\t\t0,\t\t0.018631404958677687,\t\t0.0123189607681008,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.09\t\t],\n\t\t[538,\t\t89,\t\t0,\t\t0.014509752066115702,\t\t0.038375005396288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.945999999999998\t\t],\n\t\t[184,\t\t186,\t\t0,\t\t0.0016554709141274237,\t\t0.059427351084826,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.562000000000001\t\t],\n\t\t[184,\t\t187,\t\t0,\t\t0.002698753462603878,\t\t0.09687863927102919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.588\t\t],\n\t\t[520,\t\t172,\t\t0,\t\t0.0034188429752066113,\t\t0.0361682589818792,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.342\t\t],\n\t\t[89,\t\t175,\t\t0,\t\t0.0037309090909090903,\t\t0.0098674088877672,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.643\t\t],\n\t\t[185,\t\t89,\t\t0,\t\t0.005812892561983471,\t\t0.0153737832609196,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.792\t\t],\n\t\t[89,\t\t188,\t\t0,\t\t0.003108760330578513,\t\t0.008221966434607202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.702\t\t],\n\t\t[189,\t\t190,\t\t0,\t\t0.008599492151454294,\t\t0.17364414688031998,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.253\t\t],\n\t\t[539,\t\t172,\t\t0,\t\t0.0021570247933884296,\t\t0.022819366646419197,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t6.525\t\t],\n\t\t[504,\t\t192,\t\t0,\t\t0.0003084297520661157,\t\t0.00326290713886456,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.9329999999999999\t\t],\n\t\t[105,\t\t186,\t\t0,\t\t0.003273372576177285,\t\t0.1175060580379876,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.907\t\t],\n\t\t[105,\t\t187,\t\t0,\t\t0.0021712257617728533,\t\t0.0779416868808324,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.540999999999999\t\t],\n\t\t[539,\t\t193,\t\t0,\t\t0.005608595041322314,\t\t0.01483346262541,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.482999999999999\t\t],\n\t\t[187,\t\t194,\t\t0,\t\t4.8649584487534626e-05,\t\t0.0069856037041576,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.562\t\t],\n\t\t[539,\t\t540,\t\t0,\t\t0.004394710743801653,\t\t0.0116230138006708,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.647\t\t],\n\t\t[539,\t\t196,\t\t0,\t\t0.00332297520661157,\t\t0.008788516227194,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.026\t\t],\n\t\t[197,\t\t540,\t\t0,\t\t0.004737190082644629,\t\t0.012528794024621601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.165\t\t],\n\t\t[110,\t\t198,\t\t0,\t\t0.00018724030470914128,\t\t0.02688587333118328,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.1630000000000003\t\t],\n\t\t[197,\t\t539,\t\t0,\t\t0.009172231404958677,\t\t0.024258473063998802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.873\t\t],\n\t\t[199,\t\t537,\t\t0,\t\t0.03612826446280991,\t\t0.0238877676441712,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.322\t\t],\n\t\t[134,\t\t526,\t\t0,\t\t0.007771239669421488,\t\t0.020553167475975197,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.754000000000001\t\t],\n\t\t[200,\t\t193,\t\t0,\t\t0.0009322314049586776,\t\t0.009862163056380801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.82\t\t],\n\t\t[4,\t\t201,\t\t0,\t\t0.013726108033240996,\t\t0.49273365914097605,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t79.282\t\t],\n\t\t[202,\t\t86,\t\t0,\t\t0.00013365650969529087,\t\t0.00479794133417816,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.772\t\t],\n\t\t[85,\t\t203,\t\t0,\t\t0.0019011426592797783,\t\t0.2729854600553416,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.962\t\t],\n\t\t[147,\t\t204,\t\t0,\t\t0.0073874380165289254,\t\t0.0781523963903056,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t22.346999999999998\t\t],\n\t\t[147,\t\t205,\t\t0,\t\t0.005959669421487603,\t\t0.00394049369636956,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.507\t\t],\n\t\t[123,\t\t206,\t\t0,\t\t0.0005753116343490305,\t\t0.0826091142668064,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t6.646\t\t],\n\t\t[537,\t\t207,\t\t0,\t\t0.018456198347107437,\t\t0.048812461297776,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.915\t\t],\n\t\t[165,\t\t208,\t\t0,\t\t0.00414612188365651,\t\t0.14883562055771601,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.948\t\t],\n\t\t[4,\t\t94,\t\t0,\t\t0.013687673130193905,\t\t0.49135394025941603,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t79.06\t\t],\n\t\t[4,\t\t2,\t\t0,\t\t5.2054478301015697e-05,\t\t0.016817654469309,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t3,\t\t1,\t\t-360,\t\t0.902\t\t],\n\t\t[209,\t\t4,\t\t0,\t\t0.0022369286703601107,\t\t0.32120104149338397,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.840999999999998\t\t],\n\t\t[119,\t\t163,\t\t0,\t\t0.003535145429362881,\t\t0.12690306230914922,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.419\t\t],\n\t\t[210,\t\t3,\t\t0,\t\t0.0003150969529085873,\t\t0.011311208844832242,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.82\t\t],\n\t\t[99,\t\t211,\t\t0,\t\t0.0035045013850415513,\t\t0.1258030161741948,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.242\t\t],\n\t\t[99,\t\t69,\t\t0,\t\t0.021717970914127423,\t\t0.7796219621557,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t125.443\t\t],\n\t\t[212,\t\t99,\t\t0,\t\t0.008453774238227147,\t\t0.30346978938770003,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.82899999999999\t\t],\n\t\t[213,\t\t214,\t\t0,\t\t0.01490115702479339,\t\t0.15764073118032798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t45.076\t\t],\n\t\t[510,\t\t215,\t\t0,\t\t0.002174710743801653,\t\t0.09202587186721281,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t13.157\t\t],\n\t\t[128,\t\t69,\t\t0,\t\t0.010711651662049862,\t\t1.538088234801848,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t123.741\t\t],\n\t\t[216,\t\t69,\t\t0,\t\t0.009628462603878117,\t\t1.3825528982351443,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t111.228\t\t],\n\t\t[217,\t\t98,\t\t0,\t\t0.0012787396121883656,\t\t0.045903620070299994,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.386\t\t],\n\t\t[504,\t\t218,\t\t0,\t\t0.027480991735537193,\t\t0.072680994226412,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.565\t\t],\n\t\t[177,\t\t504,\t\t0,\t\t0.07054809917355372,\t\t0.18658373169634002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t106.704\t\t],\n\t\t[219,\t\t209,\t\t0,\t\t0.003938798476454294,\t\t0.5655728721401839,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t45.501000000000005\t\t],\n\t\t[219,\t\t220,\t\t0,\t\t0.0013026315789473684,\t\t0.1870451326342096,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t15.048\t\t],\n\t\t[94,\t\t95,\t\t0,\t\t0.01070740997229917,\t\t0.38436979242743197,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.846000000000004\t\t],\n\t\t[159,\t\t221,\t\t0,\t\t0.009937153739612188,\t\t0.356719480257712,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.397\t\t],\n\t\t[34,\t\t161,\t\t0,\t\t0.010965289256198347,\t\t0.116002818645824,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.17\t\t],\n\t\t[222,\t\t221,\t\t0,\t\t0.0046457756232686975,\t\t0.16677196601221997,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.834\t\t],\n\t\t[211,\t\t52,\t\t0,\t\t0.05267313019390582,\t\t0.472709090515552,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t152.12\t\t],\n\t\t[215,\t\t223,\t\t0,\t\t0.04873190082644628,\t\t0.128884831985184,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.707\t\t],\n\t\t[224,\t\t215,\t\t0,\t\t0.019086280991735535,\t\t0.050478887076288004,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.868000000000002\t\t],\n\t\t[225,\t\t224,\t\t0,\t\t0.04200925619834711,\t\t0.11110496071615601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.538999999999994\t\t],\n\t\t[224,\t\t223,\t\t0,\t\t0.031061818181818183,\t\t0.082151468537468,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.981\t\t],\n\t\t[226,\t\t6,\t\t0,\t\t0.06420099173553719,\t\t0.0424492677936932,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.552\t\t],\n\t\t[7,\t\t3,\t\t0,\t\t0.009332929362880887,\t\t0.335029305054692,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t53.907\t\t],\n\t\t[216,\t\t227,\t\t0,\t\t0.01989941135734072,\t\t0.7143401282507,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.939\t\t],\n\t\t[228,\t\t229,\t\t0,\t\t0.010545454545454545,\t\t0.027890337012274,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.95\t\t],\n\t\t[227,\t\t230,\t\t0,\t\t0.003993074792243767,\t\t0.573366419334696,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t46.128\t\t],\n\t\t[231,\t\t53,\t\t0,\t\t0.007193213296398893,\t\t1.0328749562310842,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t83.096\t\t],\n\t\t[544,\t\t545,\t\t0,\t\t0.013061818181818181,\t\t0.034545548464856,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.756\t\t],\n\t\t[234,\t\t235,\t\t0,\t\t0.04608859504132231,\t\t0.121893887321888,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t69.709\t\t],\n\t\t[546,\t\t214,\t\t0,\t\t0.057025454545454546,\t\t0.15081940173295602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.251\t\t],\n\t\t[233,\t\t227,\t\t0,\t\t0.0029001038781163438,\t\t0.1041066260218888,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.750999999999998\t\t],\n\t\t[237,\t\t238,\t\t0,\t\t0.026324628099173554,\t\t0.06962267451304,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.816\t\t],\n\t\t[212,\t\t100,\t\t0,\t\t0.007955505540166205,\t\t0.285583163531816,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.951\t\t],\n\t\t[519,\t\t239,\t\t0,\t\t0.01740429752066116,\t\t0.046030422038308406,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.324\t\t],\n\t\t[238,\t\t519,\t\t0,\t\t0.015166280991735538,\t\t0.040111375593995205,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.939\t\t],\n\t\t[213,\t\t240,\t\t0,\t\t0.01665388429752066,\t\t0.04404574915373599,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.189\t\t],\n\t\t[241,\t\t242,\t\t0,\t\t0.009862015235457064,\t\t0.3540221919932281,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.963\t\t],\n\t\t[70,\t\t241,\t\t0,\t\t0.003819858033240997,\t\t0.5484941897752321,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.126999999999995\t\t],\n\t\t[509,\t\t213,\t\t0,\t\t0.011363636363636364,\t\t0.120216969880216,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t34.375\t\t],\n\t\t[68,\t\t243,\t\t0,\t\t0.003611668975069252,\t\t0.1296500701715312,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.861\t\t],\n\t\t[243,\t\t244,\t\t0,\t\t0.0007699099722991691,\t\t0.027637882270859202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.447\t\t],\n\t\t[68,\t\t244,\t\t0,\t\t0.004104051246537396,\t\t0.147325387728876,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.705\t\t],\n\t\t[544,\t\t547,\t\t0,\t\t0.02418776859504132,\t\t0.255884661882476,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.168\t\t],\n\t\t[245,\t\t227,\t\t0,\t\t0.012676419667590028,\t\t0.45505241780707606,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.219\t\t],\n\t\t[246,\t\t208,\t\t0,\t\t0.0010155817174515235,\t\t0.0364568961999408,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.8660000000000005\t\t],\n\t\t[112,\t\t208,\t\t0,\t\t0.0017927631578947367,\t\t0.0643558063672372,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.355\t\t],\n\t\t[165,\t\t247,\t\t0,\t\t0.0002113919667590028,\t\t0.0075884538459086,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.2209999999999999\t\t],\n\t\t[537,\t\t549,\t\t0,\t\t0.00032066115702479337,\t\t0.00084807607842936,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.485\t\t],\n\t\t[537,\t\t550,\t\t0,\t\t0.00032198347107438016,\t\t0.0008515732993697601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.48700000000000004\t\t],\n\t\t[537,\t\t551,\t\t0,\t\t0.0002651239669421488,\t\t0.0007011927988648,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.401\t\t],\n\t\t[110,\t\t251,\t\t0,\t\t0.00023857340720221602,\t\t0.008564200982522441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.3780000000000001\t\t],\n\t\t[510,\t\t252,\t\t0,\t\t0.08467702479338843,\t\t0.055987884365424005,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.03699999999999\t\t],\n\t\t[529,\t\t253,\t\t0,\t\t0.04859504132231405,\t\t0.12852286961777998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.5\t\t],\n\t\t[237,\t\t239,\t\t0,\t\t0.03309421487603306,\t\t0.08752669712542799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.055\t\t],\n\t\t[254,\t\t238,\t\t0,\t\t0.07815008264462811,\t\t0.05167231372274401,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t59.101000000000006\t\t],\n\t\t[69,\t\t255,\t\t0,\t\t0.0009369806094182826,\t\t0.134541235754472,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.824000000000002\t\t],\n\t\t[510,\t\t225,\t\t0,\t\t0.021953719008264466,\t\t0.232250442756508,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t66.41\t\t],\n\t\t[256,\t\t257,\t\t0,\t\t0.010125619834710746,\t\t0.0267799693631888,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.315\t\t],\n\t\t[258,\t\t190,\t\t0,\t\t0.011717451523545707,\t\t0.10515695255750121,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.84\t\t],\n\t\t[258,\t\t259,\t\t0,\t\t0.015782548476454293,\t\t0.1416387085570408,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.58\t\t],\n\t\t[260,\t\t261,\t\t0,\t\t0.006791031855955679,\t\t0.9751256416231477,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t78.45\t\t],\n\t\t[554,\t\t553,\t\t0,\t\t0.17583338842975205,\t\t0.11625986438453201,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t132.974\t\t],\n\t\t[515,\t\t263,\t\t0,\t\t0.006987107438016529,\t\t0.0739172618295936,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.136\t\t],\n\t\t[14,\t\t264,\t\t0,\t\t0.01700694214876033,\t\t0.17991802858084,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.446000000000005\t\t],\n\t\t[116,\t\t555,\t\t0,\t\t0.0009768595041322315,\t\t0.0103342878835768,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.955\t\t],\n\t\t[151,\t\t116,\t\t0,\t\t0.007244958677685951,\t\t0.0191612735410668,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.958\t\t],\n\t\t[111,\t\t114,\t\t0,\t\t0.008806613573407202,\t\t0.3161358573133961,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.867\t\t],\n\t\t[77,\t\t111,\t\t0,\t\t0.00288452216066482,\t\t0.41418912211817605,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.321999999999996\t\t],\n\t\t[266,\t\t525,\t\t0,\t\t0.01042909090909091,\t\t0.027582581569373602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.774000000000001\t\t],\n\t\t[267,\t\t120,\t\t0,\t\t0.013136945983379503,\t\t0.471584184581432,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t75.87899999999999\t\t],\n\t\t[268,\t\t269,\t\t0,\t\t0.0010327272727272726,\t\t0.0027313295556817604,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.5619999999999998\t\t],\n\t\t[556,\t\t271,\t\t0,\t\t0.052289586776859506,\t\t0.0345735262323792,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.544000000000004\t\t],\n\t\t[556,\t\t272,\t\t0,\t\t0.04685355371900827,\t\t0.030979257409249603,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.433\t\t],\n\t\t[529,\t\t273,\t\t0,\t\t0.0034604958677685953,\t\t0.009152227205140799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.234\t\t],\n\t\t[128,\t\t274,\t\t0,\t\t0.0029350761772853184,\t\t0.1053620459045884,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.953\t\t],\n\t\t[34,\t\t275,\t\t0,\t\t0.0008290909090909092,\t\t0.00054818938265696,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.627\t\t],\n\t\t[503,\t\t276,\t\t0,\t\t0.006707438016528925,\t\t0.07095861291266,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t20.29\t\t],\n\t\t[503,\t\t504,\t\t0,\t\t0.06432727272727272,\t\t0.680524223098808,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t194.59\t\t],\n\t\t[177,\t\t218,\t\t0,\t\t0.04330380165289256,\t\t0.114528740018308,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t65.497\t\t],\n\t\t[277,\t\t278,\t\t0,\t\t0.007191135734072023,\t\t1.032576638635032,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t83.072\t\t],\n\t\t[557,\t\t558,\t\t0,\t\t0.04341289256198347,\t\t0.258338836678648,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t98.493\t\t],\n\t\t[557,\t\t559,\t\t0,\t\t0.03415867768595042,\t\t0.09034195998366001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.665\t\t],\n\t\t[559,\t\t558,\t\t0,\t\t0.04474314049586777,\t\t0.11833546501370001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.67399999999999\t\t],\n\t\t[277,\t\t78,\t\t0,\t\t0.03585768698060942,\t\t0.32180078416049196,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t103.557\t\t],\n\t\t[277,\t\t279,\t\t0,\t\t0.021390927977839334,\t\t0.191970480441328,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.777\t\t],\n\t\t[78,\t\t279,\t\t0,\t\t0.015811980609418283,\t\t0.1419028439283376,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.665\t\t],\n\t\t[281,\t\t282,\t\t0,\t\t0.0023178670360110803,\t\t0.08320574945862161,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.388\t\t],\n\t\t[283,\t\t161,\t\t0,\t\t0.036741157024793386,\t\t0.09717203248350399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t55.571000000000005\t\t],\n\t\t[268,\t\t161,\t\t0,\t\t0.018883636363636366,\t\t0.199771751868832,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.123000000000005\t\t],\n\t\t[256,\t\t284,\t\t0,\t\t0.010755371900826446,\t\t0.113782083346976,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t32.535\t\t],\n\t\t[515,\t\t516,\t\t0,\t\t0.04071140495867769,\t\t0.107672438361532,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.576\t\t],\n\t\t[263,\t\t516,\t\t0,\t\t0.0030355371900826445,\t\t0.128452925198488,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t18.365\t\t],\n\t\t[516,\t\t285,\t\t0,\t\t0.006908429752066116,\t\t0.018271230811372,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.449000000000002\t\t],\n\t\t[63,\t\t286,\t\t0,\t\t0.019088925619834708,\t\t0.050485881518556,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.872\t\t],\n\t\t[287,\t\t516,\t\t0,\t\t0.01732892561983471,\t\t0.011457770111127998,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.105\t\t],\n\t\t[8,\t\t102,\t\t0,\t\t0.015100069252077563,\t\t0.542055501663692,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t87.21799999999999\t\t],\n\t\t[8,\t\t101,\t\t0,\t\t0.019246883656509697,\t\t0.69091598202144,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t111.17\t\t],\n\t\t[80,\t\t288,\t\t0,\t\t0.007984072022160666,\t\t0.2866086302684072,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t46.11600000000001\t\t],\n\t\t[80,\t\t289,\t\t0,\t\t0.0003782317636201524,\t\t0.122198345223416,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t6.553999999999999\t\t],\n\t\t[276,\t\t560,\t\t0,\t\t0.01778314049586777,\t\t0.047032375838192794,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.897\t\t],\n\t\t[37,\t\t290,\t\t0,\t\t0.005629501385041551,\t\t0.4546919507138321,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t48.773999999999994\t\t],\n\t\t[290,\t\t74,\t\t0,\t\t0.02071595106187673,\t\t1.673216783321968,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t179.483\t\t],\n\t\t[512,\t\t291,\t\t0,\t\t0.0053299173553719,\t\t0.056385693247479204,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t16.123\t\t],\n\t\t[78,\t\t292,\t\t0,\t\t0.0058149815327908595,\t\t0.469673087481408,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t50.381\t\t],\n\t\t[199,\t\t548,\t\t0,\t\t0.0015530578512396695,\t\t0.00410748599634868,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.349\t\t],\n\t\t[491,\t\t293,\t\t0,\t\t0.014176528925619833,\t\t0.009373426429729999,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.720999999999998\t\t],\n\t\t[4,\t\t294,\t\t0,\t\t9.669321329639889e-05,\t\t0.013884198109531681,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.117\t\t],\n\t\t[490,\t\t541,\t\t0,\t\t0.050580495867768596,\t\t0.133773946861896,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.503\t\t],\n\t\t[491,\t\t295,\t\t0,\t\t0.010613553719008264,\t\t0.028070443890777202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.053\t\t],\n\t\t[491,\t\t296,\t\t0,\t\t0.004400661157024794,\t\t0.0116387512948784,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.656000000000001\t\t],\n\t\t[295,\t\t297,\t\t0,\t\t0.020297520661157024,\t\t0.053682341459340005,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.7\t\t],\n\t\t[508,\t\t161,\t\t0,\t\t0.023239669421487603,\t\t0.061463658055360006,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.15\t\t],\n\t\t[117,\t\t123,\t\t0,\t\t0.005876211911357341,\t\t0.21094161505628,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.941\t\t],\n\t\t[133,\t\t117,\t\t0,\t\t0.004469182825484764,\t\t0.0401081792747688,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.907\t\t],\n\t\t[71,\t\t74,\t\t0,\t\t0.03904524469065097,\t\t0.7884161162841721,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t169.144\t\t],\n\t\t[74,\t\t278,\t\t0,\t\t0.0077122576177285325,\t\t1.10740463560792,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t89.09200000000001\t\t],\n\t\t[298,\t\t515,\t\t0,\t\t0.021701157024793388,\t\t0.05739464148919599,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.823\t\t],\n\t\t[5,\t\t299,\t\t0,\t\t0.0016232686980609415,\t\t0.058271370400665996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.376\t\t],\n\t\t[32,\t\t292,\t\t0,\t\t0.009679362880886427,\t\t0.34746541983297996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.908\t\t],\n\t\t[5,\t\t29,\t\t0,\t\t0.00743395083102493,\t\t1.0674425076571843,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t85.87700000000001\t\t],\n\t\t[503,\t\t560,\t\t0,\t\t0.015140495867768593,\t\t0.160172719142436,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.8\t\t],\n\t\t[300,\t\t301,\t\t0,\t\t0.004892053324099723,\t\t0.7024509290644521,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.513000000000005\t\t],\n\t\t[51,\t\t300,\t\t0,\t\t0.002573493767313019,\t\t0.3695284920307039,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.729\t\t],\n\t\t[244,\t\t302,\t\t0,\t\t0.007714508310249307,\t\t1.107727813004004,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t89.118\t\t],\n\t\t[31,\t\t302,\t\t0,\t\t0.004369113573407203,\t\t0.6273619041941161,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.472\t\t],\n\t\t[51,\t\t282,\t\t0,\t\t0.006288434903047093,\t\t0.9029576432132521,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.64399999999999\t\t],\n\t\t[303,\t\t304,\t\t0,\t\t8.795013850415512e-05,\t\t0.000789298639172312,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.254\t\t],\n\t\t[305,\t\t304,\t\t0,\t\t0.003881117266849031,\t\t0.0783689646873844,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.813\t\t],\n\t\t[305,\t\t259,\t\t0,\t\t0.0025625,\t\t0.36794989475177603,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t29.601999999999997\t\t],\n\t\t[306,\t\t307,\t\t0,\t\t0.03223268698060942,\t\t0.289268628831688,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t93.088\t\t],\n\t\t[305,\t\t308,\t\t0,\t\t0.0024272853185595567,\t\t0.0217833994511184,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.01\t\t],\n\t\t[305,\t\t309,\t\t0,\t\t0.011014773776523545,\t\t0.22241441259921202,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.716\t\t],\n\t\t[310,\t\t309,\t\t0,\t\t0.009565962603878117,\t\t0.343394627639832,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.253\t\t],\n\t\t[306,\t\t309,\t\t0,\t\t0.035333795013850415,\t\t0.31709917455019604,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t102.044\t\t],\n\t\t[311,\t\t280,\t\t0,\t\t0.003433691135734072,\t\t0.1232611016590444,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.833\t\t],\n\t\t[280,\t\t278,\t\t0,\t\t0.009749769159764544,\t\t0.7874838737974121,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t84.47200000000001\t\t],\n\t\t[311,\t\t32,\t\t0,\t\t0.01205909510619806,\t\t0.9740069506375919,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t104.48\t\t],\n\t\t[13,\t\t312,\t\t0,\t\t0.0043324965373961214,\t\t0.622104056565324,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.049\t\t],\n\t\t[313,\t\t314,\t\t0,\t\t0.006092624653739613,\t\t0.218710302449316,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.191\t\t],\n\t\t[312,\t\t313,\t\t0,\t\t0.00893957756232687,\t\t0.32090893884734,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.635\t\t],\n\t\t[547,\t\t566,\t\t0,\t\t0.027035702479338848,\t\t0.286013220297816,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.783\t\t],\n\t\t[245,\t\t315,\t\t0,\t\t0.014162569252077564,\t\t0.508401547875772,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.803\t\t],\n\t\t[312,\t\t316,\t\t0,\t\t8.803670360110802e-05,\t\t0.01264120812658816,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.0170000000000001\t\t],\n\t\t[312,\t\t314,\t\t0,\t\t0.005339854570637119,\t\t0.191687700220296,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.843000000000004\t\t],\n\t\t[554,\t\t546,\t\t0,\t\t0.08174743801652892,\t\t0.21620344446439202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t123.64299999999999\t\t],\n\t\t[262,\t\t216,\t\t0,\t\t0.042641966759002774,\t\t0.38268554099981195,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t123.15\t\t],\n\t\t[317,\t\t233,\t\t0,\t\t0.005647276084951523,\t\t0.114031901035644,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.464000000000002\t\t],\n\t\t[318,\t\t317,\t\t0,\t\t0.008311634349030471,\t\t0.16783161497270002,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t36.006\t\t],\n\t\t[231,\t\t52,\t\t0,\t\t0.035263677285318554,\t\t1.2658796434850879,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t203.683\t\t],\n\t\t[319,\t\t567,\t\t0,\t\t0.006089586776859504,\t\t0.0644223069721,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.421\t\t],\n\t\t[557,\t\t321,\t\t0,\t\t0.010004628099173555,\t\t0.10583989458750401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.264\t\t],\n\t\t[277,\t\t65,\t\t0,\t\t0.009430170821779778,\t\t0.7616700793261759,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t81.703\t\t],\n\t\t[322,\t\t288,\t\t0,\t\t0.006545013850415513,\t\t0.528637424797136,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.706\t\t],\n\t\t[322,\t\t323,\t\t0,\t\t0.0018503000923372577,\t\t0.14944779312484,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t16.031\t\t],\n\t\t[277,\t\t324,\t\t0,\t\t0.019719529085872576,\t\t0.39818407235049996,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t85.425\t\t],\n\t\t[324,\t\t325,\t\t0,\t\t0.01103508771932133,\t\t0.22282459929396403,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.803999999999995\t\t],\n\t\t[277,\t\t325,\t\t0,\t\t0.008665743305609418,\t\t0.174981914850048,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.54\t\t],\n\t\t[326,\t\t327,\t\t0,\t\t0.007654214876033058,\t\t0.0202436634226288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.577\t\t],\n\t\t[328,\t\t326,\t\t0,\t\t0.10300958677685952,\t\t0.068109252150368,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.90100000000001\t\t],\n\t\t[328,\t\t327,\t\t0,\t\t0.09827173553719008,\t\t0.064976616491468,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.318\t\t],\n\t\t[326,\t\t329,\t\t0,\t\t0.028062148760330575,\t\t0.07421802283046801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.443999999999996\t\t],\n\t\t[568,\t\t329,\t\t0,\t\t0.05699900826446282,\t\t0.15074945731414802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.211\t\t],\n\t\t[568,\t\t326,\t\t0,\t\t0.03218644628099173,\t\t0.08512585494846397,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.681999999999995\t\t],\n\t\t[332,\t\t78,\t\t0,\t\t0.006471029547541551,\t\t0.522661750455416,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.065\t\t],\n\t\t[333,\t\t306,\t\t0,\t\t0.008580159279778392,\t\t0.308006702824228,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.559\t\t],\n\t\t[332,\t\t333,\t\t0,\t\t0.007504674515235457,\t\t0.26939943395502003,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.347\t\t],\n\t\t[332,\t\t334,\t\t0,\t\t0.017124653739612188,\t\t0.15368328149175597,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.456\t\t],\n\t\t[66,\t\t334,\t\t0,\t\t0.030625,\t\t0.27484062260471603,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t88.445\t\t],\n\t\t[330,\t\t335,\t\t0,\t\t0.00550536703601108,\t\t0.790516769355108,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.598\t\t],\n\t\t[336,\t\t66,\t\t0,\t\t0.015054362880886425,\t\t0.1351036887216764,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.477\t\t],\n\t\t[330,\t\t336,\t\t0,\t\t0.039036357340720224,\t\t0.350327404269788,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t112.73700000000001\t\t],\n\t\t[68,\t\t70,\t\t0,\t\t0.016314058171745152,\t\t0.14640868261713597,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.115\t\t],\n\t\t[509,\t\t337,\t\t0,\t\t0.03494082644628099,\t\t0.09241056617056001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t52.848\t\t],\n\t\t[324,\t\t288,\t\t0,\t\t0.012627423822714683,\t\t0.11332339674541761,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t36.468\t\t],\n\t\t[338,\t\t559,\t\t0,\t\t0.009228099173553718,\t\t0.097624922595552,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t27.915\t\t],\n\t\t[339,\t\t559,\t\t0,\t\t0.03560595041322315,\t\t0.023542417076125203,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.927\t\t],\n\t\t[339,\t\t340,\t\t0,\t\t0.08711537190082644,\t\t0.23040041287850396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t131.762\t\t],\n\t\t[559,\t\t340,\t\t0,\t\t0.20983272727272728,\t\t0.138740000599684,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t158.686\t\t],\n\t\t[341,\t\t292,\t\t0,\t\t0.0009329409048961218,\t\t0.07535316024134399,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.083\t\t],\n\t\t[557,\t\t342,\t\t0,\t\t0.006019834710743802,\t\t0.0636843933534336,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t18.21\t\t],\n\t\t[558,\t\t343,\t\t0,\t\t0.010650247933884296,\t\t0.11266996708783199,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.217\t\t],\n\t\t[502,\t\t340,\t\t0,\t\t0.021737520661157025,\t\t0.22996326026071198,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t65.756\t\t],\n\t\t[72,\t\t32,\t\t0,\t\t0.00675502077562327,\t\t0.969954803293024,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t78.03399999999999\t\t],\n\t\t[344,\t\t345,\t\t0,\t\t0.0005762927054480609,\t\t0.04654686738645321,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.993\t\t],\n\t\t[346,\t\t47,\t\t0,\t\t0.0011340027700831024,\t\t0.04070792194158799,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.55\t\t],\n\t\t[46,\t\t47,\t\t0,\t\t0.0008975069252077563,\t\t0.0322183003580208,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.184\t\t],\n\t\t[346,\t\t345,\t\t0,\t\t0.0007217797783933517,\t\t0.025910126194627202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.169\t\t],\n\t\t[347,\t\t328,\t\t0,\t\t0.029905454545454544,\t\t0.07909314882361201,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.232\t\t],\n\t\t[347,\t\t348,\t\t0,\t\t0.04883438016528925,\t\t0.129155866607944,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.862\t\t],\n\t\t[571,\t\t348,\t\t0,\t\t0.041548429752066116,\t\t0.10988617921762801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.842\t\t],\n\t\t[347,\t\t572,\t\t0,\t\t0.016052231404958678,\t\t0.04245451362512801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.279\t\t],\n\t\t[571,\t\t570,\t\t0,\t\t0.17379041322314048,\t\t0.11490906279551602,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t131.429\t\t],\n\t\t[14,\t\t350,\t\t0,\t\t0.02166743801652892,\t\t0.05730546235524,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.772\t\t],\n\t\t[350,\t\t573,\t\t0,\t\t0.026277685950413226,\t\t0.06949852316919598,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.745\t\t],\n\t\t[15,\t\t351,\t\t0,\t\t0.02639265927977839,\t\t0.236857956201204,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.222\t\t],\n\t\t[352,\t\t15,\t\t0,\t\t0.0015260560941828254,\t\t0.219126704094076,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.629\t\t],\n\t\t[15,\t\t335,\t\t0,\t\t0.0035338758079432133,\t\t1.1417173740880242,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.235\t\t],\n\t\t[232,\t\t227,\t\t0,\t\t5.5747922437673134e-05,\t\t0.000500303468136644,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.161\t\t],\n\t\t[565,\t\t544,\t\t0,\t\t0.0394803305785124,\t\t0.10441652566461601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t59.714\t\t],\n\t\t[235,\t\t567,\t\t0,\t\t0.02391404958677686,\t\t0.25298896294275997,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.34\t\t],\n\t\t[567,\t\t286,\t\t0,\t\t0.008068760330578512,\t\t0.34144067500694797,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.816\t\t],\n\t\t[353,\t\t519,\t\t0,\t\t0.007621818181818182,\t\t0.080631926038356,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.055999999999997\t\t],\n\t\t[354,\t\t353,\t\t0,\t\t0.0008436363636363636,\t\t0.00892490784392768,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.552\t\t],\n\t\t[355,\t\t354,\t\t0,\t\t0.0068502479338842966,\t\t0.0181173530898976,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.360999999999999\t\t],\n\t\t[354,\t\t356,\t\t0,\t\t0.01855404958677686,\t\t0.049071255647172,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.063000000000002\t\t],\n\t\t[357,\t\t358,\t\t0,\t\t0.0034823407202216067,\t\t0.5000300103406239,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.228\t\t],\n\t\t[574,\t\t359,\t\t0,\t\t0.013352066115702478,\t\t0.0353131884615884,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.195\t\t],\n\t\t[235,\t\t575,\t\t0,\t\t0.007459504132231404,\t\t0.0789147905557,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.565\t\t],\n\t\t[167,\t\t361,\t\t0,\t\t0.000616198347107438,\t\t0.0065188198358579995,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.864\t\t],\n\t\t[528,\t\t362,\t\t0,\t\t0.0011960330578512398,\t\t0.012652945368078402,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.6180000000000003\t\t],\n\t\t[363,\t\t344,\t\t0,\t\t0.0002662742382271468,\t\t0.009558592968871479,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.538\t\t],\n\t\t[259,\t\t364,\t\t0,\t\t0.013069713758102496,\t\t0.26390852570525997,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.618\t\t],\n\t\t[54,\t\t56,\t\t0,\t\t0.007723337950138504,\t\t0.0693122289241068,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.305\t\t],\n\t\t[365,\t\t364,\t\t0,\t\t0.0049974607571537395,\t\t0.10091058802821559,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.649\t\t],\n\t\t[231,\t\t366,\t\t0,\t\t0.0013273891966759002,\t\t0.0476500209962672,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t7.667000000000001\t\t],\n\t\t[30,\t\t367,\t\t0,\t\t0.01126108033240997,\t\t0.1010613005635992,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.522\t\t],\n\t\t[61,\t\t367,\t\t0,\t\t0.020337603878116343,\t\t0.18251754162067196,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t58.735\t\t],\n\t\t[254,\t\t368,\t\t0,\t\t0.0004297520661157025,\t\t0.00454638722456732,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.3\t\t],\n\t\t[254,\t\t369,\t\t0,\t\t0.00015999999999999999,\t\t0.00169265493591832,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.484\t\t],\n\t\t[254,\t\t370,\t\t0,\t\t0.0003669421487603306,\t\t0.0038819152455960805,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.11\t\t],\n\t\t[99,\t\t358,\t\t0,\t\t0.0020184383656509696,\t\t0.28982797432374396,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.316999999999997\t\t],\n\t\t[354,\t\t519,\t\t0,\t\t0.006762644628099174,\t\t0.07154264880985199,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.457\t\t],\n\t\t[571,\t\t371,\t\t0,\t\t0.023726942148760328,\t\t0.06275238397221199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.887\t\t],\n\t\t[207,\t\t372,\t\t0,\t\t0.002329256198347108,\t\t0.006160354689297601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.523\t\t],\n\t\t[57,\t\t373,\t\t0,\t\t0.0017725619834710745,\t\t0.0046880246727212796,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.681\t\t],\n\t\t[209,\t\t374,\t\t0,\t\t0.0010122922437673131,\t\t0.0363388121515216,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.847\t\t],\n\t\t[375,\t\t376,\t\t0,\t\t0.0045364727608518006,\t\t0.0916021467933684,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.652\t\t],\n\t\t[376,\t\t377,\t\t0,\t\t0.0030886426592797783,\t\t0.062367022394423606,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.38\t\t],\n\t\t[16,\t\t49,\t\t0,\t\t0.002266101108033241,\t\t0.32538991773524,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.178\t\t],\n\t\t[318,\t\t377,\t\t0,\t\t0.004755078485685596,\t\t0.0960163149704152,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.599\t\t],\n\t\t[378,\t\t297,\t\t0,\t\t0.01753917355371901,\t\t0.046387138574374404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.528000000000002\t\t],\n\t\t[562,\t\t379,\t\t0,\t\t0.01802314049586777,\t\t0.047667121439141605,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.26\t\t],\n\t\t[576,\t\t563,\t\t0,\t\t0.001808264462809917,\t\t0.004782449638150801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.735\t\t],\n\t\t[576,\t\t381,\t\t0,\t\t0.0034320661157024794,\t\t0.009077036954898,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.191\t\t],\n\t\t[577,\t\t576,\t\t0,\t\t0.06004495867768594,\t\t0.15880530575430396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t90.818\t\t],\n\t\t[244,\t\t383,\t\t0,\t\t0.006845567867036011,\t\t0.1382282547912684,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.655\t\t],\n\t\t[244,\t\t306,\t\t0,\t\t0.02679108956599723,\t\t0.5409756541164079,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t116.059\t\t],\n\t\t[383,\t\t306,\t\t0,\t\t0.0300685595567867,\t\t0.269846910348376,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.838\t\t],\n\t\t[380,\t\t306,\t\t0,\t\t0.00025605955678670365,\t\t0.03676764369572,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.958\t\t],\n\t\t[252,\t\t225,\t\t0,\t\t0.062094545454545444,\t\t0.041056499553586,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.958999999999996\t\t],\n\t\t[220,\t\t76,\t\t0,\t\t0.002772074099722992,\t\t0.398042682239984,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.023\t\t],\n\t\t[542,\t\t384,\t\t0,\t\t0.007939834710743802,\t\t0.020999063146094,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.009\t\t],\n\t\t[385,\t\t384,\t\t0,\t\t0.053734876033057856,\t\t0.035529141854791196,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.637\t\t],\n\t\t[542,\t\t385,\t\t0,\t\t0.011306115702479337,\t\t0.119608453436296,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t34.201\t\t],\n\t\t[386,\t\t385,\t\t0,\t\t0.003668760330578512,\t\t0.0388121580140316,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.097999999999999\t\t],\n\t\t[387,\t\t578,\t\t0,\t\t0.015444628099173553,\t\t0.16339016240905604,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.72\t\t],\n\t\t[332,\t\t388,\t\t0,\t\t0.014036184210526315,\t\t0.5038646344377999,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.07300000000001\t\t],\n\t\t[382,\t\t332,\t\t0,\t\t0.017764369806094183,\t\t0.637697365901468,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t102.60700000000001\t\t],\n\t\t[382,\t\t388,\t\t0,\t\t0.00476159972299169,\t\t0.17092976750548,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.503\t\t],\n\t\t[579,\t\t578,\t\t0,\t\t0.01911074380165289,\t\t0.050543585664,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.905\t\t],\n\t\t[577,\t\t387,\t\t0,\t\t0.07597818181818182,\t\t0.20094506949431204,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.917\t\t],\n\t\t[144,\t\t390,\t\t0,\t\t0.0004277685950413223,\t\t0.0011313509747276,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.647\t\t],\n\t\t[37,\t\t49,\t\t0,\t\t0.008441481994459835,\t\t0.303028527944352,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t48.758\t\t],\n\t\t[391,\t\t233,\t\t0,\t\t0.014211218836565096,\t\t0.1275369872004348,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.042\t\t],\n\t\t[392,\t\t310,\t\t0,\t\t0.007035318559556785,\t\t0.06313767618386361,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.317999999999998\t\t],\n\t\t[260,\t\t393,\t\t0,\t\t0.006341412742382271,\t\t0.0569102963692744,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.314\t\t],\n\t\t[394,\t\t230,\t\t0,\t\t0.0007590027700831025,\t\t0.00681158510656168,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.1919999999999997\t\t],\n\t\t[395,\t\t282,\t\t0,\t\t0.008762984764542936,\t\t0.314569689934484,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.615\t\t],\n\t\t[395,\t\t244,\t\t0,\t\t0.0034046052631578946,\t\t0.12221699007344,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.665\t\t],\n\t\t[25,\t\t396,\t\t0,\t\t0.008809037396121884,\t\t0.316222866612064,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.881\t\t],\n\t\t[81,\t\t74,\t\t0,\t\t0.0075207756232686974,\t\t0.26997742429652244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t43.44\t\t],\n\t\t[278,\t\t80,\t\t0,\t\t0.016286011080332407,\t\t0.5846279085788,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t94.068\t\t],\n\t\t[81,\t\t278,\t\t0,\t\t0.021054016620498613,\t\t0.755787629231688,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t121.60799999999999\t\t],\n\t\t[569,\t\t570,\t\t0,\t\t0.03253950413223141,\t\t0.08605961294018,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.216\t\t],\n\t\t[397,\t\t552,\t\t0,\t\t0.006289586776859504,\t\t0.0166345314104904,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.513\t\t],\n\t\t[542,\t\t398,\t\t0,\t\t0.0005580165289256199,\t\t0.0059033089500572,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.6880000000000002\t\t],\n\t\t[398,\t\t385,\t\t0,\t\t0.021893553719008262,\t\t0.05790348713648401,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.114000000000004\t\t],\n\t\t[399,\t\t499,\t\t0,\t\t0.03266380165289256,\t\t0.021597087927192803,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.701999999999998\t\t],\n\t\t[83,\t\t399,\t\t0,\t\t0.025700495867768593,\t\t0.016992996557050798,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.436\t\t],\n\t\t[498,\t\t400,\t\t0,\t\t0.012134214876033058,\t\t0.032092247974028,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.352999999999998\t\t],\n\t\t[518,\t\t239,\t\t0,\t\t0.04685289256198347,\t\t0.123915281026504,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t70.865\t\t],\n\t\t[575,\t\t543,\t\t0,\t\t0.0030307438016528923,\t\t0.032062521596058796,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.168\t\t],\n\t\t[401,\t\t360,\t\t0,\t\t0.007957063711911357,\t\t0.071409774520472,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.98\t\t],\n\t\t[580,\t\t581,\t\t0,\t\t0.007134545454545454,\t\t0.018869255592422397,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.790999999999999\t\t],\n\t\t[401,\t\t402,\t\t0,\t\t0.0033434903047091418,\t\t0.030005778188384805,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.656\t\t],\n\t\t[403,\t\t231,\t\t0,\t\t0.009592105263157893,\t\t0.08608327126915,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.701999999999998\t\t],\n\t\t[189,\t\t360,\t\t0,\t\t0.028456024930747923,\t\t0.255375399471348,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t82.181\t\t],\n\t\t[234,\t\t404,\t\t0,\t\t0.008092561983471074,\t\t0.0214029921648796,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.24\t\t],\n\t\t[235,\t\t404,\t\t0,\t\t0.05107504132231405,\t\t0.13508190749437998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.251\t\t],\n\t\t[235,\t\t580,\t\t0,\t\t0.000580495867768595,\t\t0.00153527999352772,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.878\t\t],\n\t\t[216,\t\t259,\t\t0,\t\t0.0022115650969529088,\t\t0.079389770210892,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t12.774000000000001\t\t],\n\t\t[405,\t\t259,\t\t0,\t\t0.0052832409972299165,\t\t0.1896554115982928,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.516\t\t],\n\t\t[405,\t\t318,\t\t0,\t\t0.0066348684210526315,\t\t0.23817552558268398,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t38.323\t\t],\n\t\t[406,\t\t230,\t\t0,\t\t8.098164819944598e-05,\t\t0.046512685161986804,\t\t6845.0,\t\t6845.0,\t\t6845.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.871\t\t],\n\t\t[542,\t\t407,\t\t0,\t\t0.025569586776859506,\t\t0.067625761355152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.674\t\t],\n\t\t[23,\t\t408,\t\t0,\t\t0.03224528925619835,\t\t0.08528148128033601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.771\t\t],\n\t\t[577,\t\t348,\t\t0,\t\t0.012999008264462809,\t\t0.13751772188026398,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.321999999999996\t\t],\n\t\t[562,\t\t564,\t\t0,\t\t0.06921520661157024,\t\t0.18305853298686803,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t104.68799999999999\t\t],\n\t\t[582,\t\t507,\t\t0,\t\t0.006357685950413223,\t\t0.016814638289042002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.616\t\t],\n\t\t[27,\t\t410,\t\t0,\t\t0.0030042975206611565,\t\t0.007945685980170399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.544\t\t],\n\t\t[501,\t\t27,\t\t0,\t\t0.003811570247933884,\t\t0.040322957460962,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.53\t\t],\n\t\t[27,\t\t411,\t\t0,\t\t0.004648595041322314,\t\t0.012294480221518,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.031000000000001\t\t],\n\t\t[411,\t\t410,\t\t0,\t\t0.002054214876033058,\t\t0.0054329327333556,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.1069999999999998\t\t],\n\t\t[403,\t\t360,\t\t0,\t\t0.008191481994459833,\t\t0.07351353506655639,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.656999999999996\t\t],\n\t\t[412,\t\t360,\t\t0,\t\t0.016761772853185596,\t\t0.15042664773666,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.408\t\t],\n\t\t[326,\t\t413,\t\t0,\t\t0.012077024793388432,\t\t0.12776397267356798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t36.533\t\t],\n\t\t[414,\t\t413,\t\t0,\t\t0.008093223140495867,\t\t0.08561896310149601,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t24.482\t\t],\n\t\t[6,\t\t297,\t\t0,\t\t0.019472396694214876,\t\t0.0128750188978664,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.725999999999999\t\t],\n\t\t[554,\t\t580,\t\t0,\t\t0.07435371900826447,\t\t0.196648733567264,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t112.46\t\t],\n\t\t[262,\t\t401,\t\t0,\t\t0.03931232686980609,\t\t0.35280406181043206,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t113.53399999999999\t\t],\n\t\t[499,\t\t556,\t\t0,\t\t0.04185586776859504,\t\t0.11069928308639199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t63.306999999999995\t\t],\n\t\t[224,\t\t229,\t\t0,\t\t0.004135206611570248,\t\t0.0437467367631624,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.509\t\t],\n\t\t[583,\t\t507,\t\t0,\t\t0.024632727272727268,\t\t0.065147980317596,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.257\t\t],\n\t\t[415,\t\t307,\t\t0,\t\t0.015675554016620498,\t\t0.1406784987952448,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.271\t\t],\n\t\t[416,\t\t507,\t\t0,\t\t0.0010555371900826446,\t\t0.011166626467730801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.193\t\t],\n\t\t[284,\t\t561,\t\t0,\t\t0.015221487603305786,\t\t0.16102953827307598,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.045\t\t],\n\t\t[543,\t\t417,\t\t0,\t\t0.0006614876033057851,\t\t0.027991756419545603,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t4.002\t\t],\n\t\t[418,\t\t506,\t\t0,\t\t0.0009395041322314049,\t\t0.009939101917118,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.842\t\t],\n\t\t[220,\t\t157,\t\t0,\t\t0.004599549861495845,\t\t0.165112574384632,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.566999999999997\t\t],\n\t\t[295,\t\t419,\t\t0,\t\t0.0012023140495867769,\t\t0.012719392565946,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.637\t\t],\n\t\t[295,\t\t420,\t\t0,\t\t0.0008003305785123967,\t\t0.008466771900532,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.421\t\t],\n\t\t[541,\t\t62,\t\t0,\t\t0.05133355371900827,\t\t0.0339414035471236,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.821\t\t],\n\t\t[52,\t\t421,\t\t0,\t\t0.00013885041551246538,\t\t0.004984389831631239,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.802\t\t],\n\t\t[60,\t\t160,\t\t0,\t\t6.128808864265928e-05,\t\t0.000550023067454096,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.177\t\t],\n\t\t[535,\t\t161,\t\t0,\t\t3.735537190082645e-05,\t\t0.00039518596644331203,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.113\t\t],\n\t\t[267,\t\t282,\t\t0,\t\t0.0065652700831024926,\t\t0.235677115717012,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.921\t\t],\n\t\t[52,\t\t365,\t\t0,\t\t0.007655586334279779,\t\t0.15458444922992,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.164\t\t],\n\t\t[28,\t\t27,\t\t0,\t\t0.015726942148760328,\t\t0.041594197273402404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.787\t\t],\n\t\t[30,\t\t201,\t\t0,\t\t0.009128289473684211,\t\t0.327683234253536,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t52.725\t\t],\n\t\t[422,\t\t81,\t\t0,\t\t0.0004226685133887349,\t\t0.13655487952674,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t6,\t\t1,\t\t-360,\t\t7.324\t\t],\n\t\t[119,\t\t425,\t\t0,\t\t0.003579120498614958,\t\t0.1284816595874996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.673000000000002\t\t],\n\t\t[423,\t\t425,\t\t0,\t\t0.0006518351800554017,\t\t0.0233992864289392,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.765\t\t],\n\t\t[424,\t\t425,\t\t0,\t\t0.005922957063711911,\t\t0.21261965153389198,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.211\t\t],\n\t\t[426,\t\t428,\t\t0,\t\t0.013948429752066116,\t\t0.14756174042535197,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t42.193999999999996\t\t],\n\t\t[427,\t\t428,\t\t0,\t\t0.0002664462809917355,\t\t0.0028187600792304794,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.8059999999999999\t\t],\n\t\t[19,\t\t428,\t\t0,\t\t0.023607603305785128,\t\t0.24974703912892798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t71.413\t\t],\n\t\t[45,\t\t429,\t\t0,\t\t0.02562314049586777,\t\t0.067767398802972,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.755\t\t],\n\t\t[44,\t\t429,\t\t0,\t\t5.289256198347107e-05,\t\t0.00013988883767892,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.08\t\t],\n\t\t[505,\t\t429,\t\t0,\t\t0.006012561983471073,\t\t0.015901863623161996,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.094\t\t],\n\t\t[231,\t\t431,\t\t0,\t\t0.011677285318559558,\t\t0.4191859418495199,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.44800000000001\t\t],\n\t\t[190,\t\t431,\t\t0,\t\t0.009600761772853185,\t\t0.34464383257266795,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.45399999999999\t\t],\n\t\t[430,\t\t431,\t\t0,\t\t0.0028100761772853187,\t\t0.1008748520662472,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.230999999999998\t\t],\n\t\t[286,\t\t433,\t\t0,\t\t0.01568694214876033,\t\t0.16595362535967603,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.453\t\t],\n\t\t[432,\t\t433,\t\t0,\t\t0.00010049586776859504,\t\t0.00106315516636076,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.304\t\t],\n\t\t[506,\t\t433,\t\t0,\t\t0.0065904132231404955,\t\t0.06972059669946801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.936\t\t],\n\t\t[23,\t\t434,\t\t0,\t\t0.02613685950413223,\t\t0.069126069139116,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.532\t\t],\n\t\t[400,\t\t434,\t\t0,\t\t0.008155371900826446,\t\t0.021569110159669603,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t12.335\t\t],\n\t\t[500,\t\t434,\t\t0,\t\t0.006338512396694216,\t\t0.0167639285853336,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t9.587\t\t],\n\t\t[32,\t\t436,\t\t0,\t\t0.0044813019390581715,\t\t0.16086776359270402,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.884\t\t],\n\t\t[435,\t\t436,\t\t0,\t\t0.0006634349030470914,\t\t0.023815688073266,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.832\t\t],\n\t\t[78,\t\t436,\t\t0,\t\t0.00897680055401662,\t\t0.32224515307884394,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.85\t\t],\n\t\t[86,\t\t438,\t\t0,\t\t0.014693213296398892,\t\t0.52745036936438,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t84.868\t\t],\n\t\t[437,\t\t438,\t\t0,\t\t1.0387811634349031e-05,\t\t0.0003728969948845,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.06\t\t],\n\t\t[221,\t\t438,\t\t0,\t\t0.002280124653739612,\t\t0.081850890377238,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.17\t\t],\n\t\t[207,\t\t439,\t\t0,\t\t0.055703801652892564,\t\t0.0368309823503996,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.126000000000005\t\t],\n\t\t[516,\t\t439,\t\t0,\t\t0.05448462809917355,\t\t0.03602487292327441,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.20399999999999\t\t],\n\t\t[513,\t\t439,\t\t0,\t\t0.046726611570247926,\t\t0.0308953241066316,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.336999999999996\t\t],\n\t\t[181,\t\t441,\t\t0,\t\t0.040805289256198356,\t\t0.10792074104825197,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.718\t\t],\n\t\t[440,\t\t441,\t\t0,\t\t0.0001322314049586777,\t\t0.000349722094197784,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.2\t\t],\n\t\t[504,\t\t441,\t\t0,\t\t0.05916099173553719,\t\t0.156467413554364,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t89.48100000000001\t\t],\n\t\t[135,\t\t442,\t\t0,\t\t0.004956890581717451,\t\t0.177940231009092,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.631\t\t],\n\t\t[109,\t\t442,\t\t0,\t\t0.0015380886426592797,\t\t0.055213615042649204,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.884\t\t],\n\t\t[112,\t\t442,\t\t0,\t\t0.0027304362880886425,\t\t0.09801597510545401,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.770999999999999\t\t],\n\t\t[113,\t\t443,\t\t0,\t\t0.0019885734072022164,\t\t0.07138491472072879,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.485999999999999\t\t],\n\t\t[132,\t\t443,\t\t0,\t\t0.006788434903047091,\t\t0.24368818615747198,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.21\t\t],\n\t\t[107,\t\t443,\t\t0,\t\t2.2333795013850418e-05,\t\t0.000801728539002036,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.129\t\t],\n\t\t[444,\t\t445,\t\t0,\t\t7.877423822714682e-05,\t\t0.00282780221121528,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.455\t\t],\n\t\t[112,\t\t445,\t\t0,\t\t0.002816135734072022,\t\t0.101092375313206,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.266\t\t],\n\t\t[109,\t\t445,\t\t0,\t\t0.0014354224376731304,\t\t0.0515281497432104,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.291\t\t],\n\t\t[119,\t\t447,\t\t0,\t\t0.005212690443213296,\t\t0.74849127803204,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t60.217\t\t],\n\t\t[100,\t\t447,\t\t0,\t\t0.0050695117728531865,\t\t0.7279322237145921,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t58.563\t\t],\n\t\t[446,\t\t447,\t\t0,\t\t2.9518698060941832e-05,\t\t0.00423859584186224,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.341\t\t],\n\t\t[124,\t\t448,\t\t0,\t\t6.509695290858726e-05,\t\t0.00233682116794768,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.376\t\t],\n\t\t[125,\t\t448,\t\t0,\t\t0.00615148891966759,\t\t0.22082338542026803,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.531\t\t],\n\t\t[131,\t\t448,\t\t0,\t\t3.912742382271468e-05,\t\t0.0014045786807313759,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.226\t\t],\n\t\t[449,\t\t450,\t\t0,\t\t0.0023614958448753462,\t\t0.08477191683710039,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.64\t\t],\n\t\t[173,\t\t450,\t\t0,\t\t0.002862361495844876,\t\t0.10275176694050518,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.533\t\t],\n\t\t[184,\t\t450,\t\t0,\t\t0.004022853185595568,\t\t0.14441057621844403,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.236\t\t],\n\t\t[144,\t\t451,\t\t0,\t\t0.007672727272727273,\t\t0.020292624515794402,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.605\t\t],\n\t\t[140,\t\t451,\t\t0,\t\t0.006991074380165291,\t\t0.018489807120219602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.574000000000002\t\t],\n\t\t[514,\t\t451,\t\t0,\t\t0.01149289256198347,\t\t0.030396095817207994,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.383\t\t],\n\t\t[537,\t\t585,\t\t0,\t\t0.05072595041322314,\t\t0.134158641165824,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.723\t\t],\n\t\t[141,\t\t585,\t\t0,\t\t0.007994710743801653,\t\t0.0211441978151932,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.092\t\t],\n\t\t[584,\t\t585,\t\t0,\t\t9.256198347107438e-05,\t\t0.000244805465938352,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.14\t\t],\n\t\t[522,\t\t454,\t\t0,\t\t0.0035008264462809916,\t\t0.0092588924438956,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.295\t\t],\n\t\t[144,\t\t454,\t\t0,\t\t0.00452892561983471,\t\t0.011977981726290799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.85\t\t],\n\t\t[453,\t\t454,\t\t0,\t\t0.001114710743801653,\t\t0.0029481572540882,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.686\t\t],\n\t\t[199,\t\t456,\t\t0,\t\t0.013063140495867768,\t\t0.0086372614214612,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.879\t\t],\n\t\t[140,\t\t456,\t\t0,\t\t0.005061818181818182,\t\t0.013387361765852802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t7.656000000000001\t\t],\n\t\t[455,\t\t456,\t\t0,\t\t0.0011365289256198346,\t\t0.00300586139962416,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.719\t\t],\n\t\t[537,\t\t456,\t\t0,\t\t0.039058512396694216,\t\t0.025825228046024003,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.538\t\t],\n\t\t[538,\t\t457,\t\t0,\t\t0.027927272727272728,\t\t0.0184653265736368,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.12\t\t],\n\t\t[153,\t\t457,\t\t0,\t\t0.030093223140495867,\t\t0.019897438549384,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.758000000000003\t\t],\n\t\t[176,\t\t457,\t\t0,\t\t0.004579173553719009,\t\t0.0030277190305137603,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.463\t\t],\n\t\t[524,\t\t459,\t\t0,\t\t0.004318677685950414,\t\t0.011421923596476799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.532\t\t],\n\t\t[458,\t\t459,\t\t0,\t\t0.001993388429752066,\t\t0.0052720605700488,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.015\t\t],\n\t\t[134,\t\t459,\t\t0,\t\t0.011813553719008265,\t\t0.031244171895617998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.868\t\t],\n\t\t[460,\t\t461,\t\t0,\t\t6.611570247933885e-05,\t\t0.000174861047098892,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.1\t\t],\n\t\t[150,\t\t461,\t\t0,\t\t0.008018512396694214,\t\t0.021207147792120403,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.128\t\t],\n\t\t[149,\t\t461,\t\t0,\t\t0.005586115702479339,\t\t0.0147740098693748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.449\t\t],\n\t\t[521,\t\t463,\t\t0,\t\t0.014348429752066114,\t\t0.009487086110365599,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.850999999999999\t\t],\n\t\t[462,\t\t463,\t\t0,\t\t0.007197355371900825,\t\t0.0047588433967958406,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.443\t\t],\n\t\t[538,\t\t463,\t\t0,\t\t0.012211570247933883,\t\t0.0080742088497664,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.235\t\t],\n\t\t[110,\t\t464,\t\t0,\t\t0.0025753116343490306,\t\t0.0924473799817492,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.875\t\t],\n\t\t[90,\t\t464,\t\t0,\t\t0.007328947368421053,\t\t0.26309125979076,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.332\t\t],\n\t\t[165,\t\t464,\t\t0,\t\t0.002152527700831025,\t\t0.0772704722900764,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.433\t\t],\n\t\t[458,\t\t465,\t\t0,\t\t0.002003305785123967,\t\t0.0052982897270776,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.03\t\t],\n\t\t[134,\t\t465,\t\t0,\t\t0.011838677685950413,\t\t0.031310619093534,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.906\t\t],\n\t\t[524,\t\t465,\t\t0,\t\t0.004293553719008264,\t\t0.0113554763986092,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.494\t\t],\n\t\t[466,\t\t467,\t\t0,\t\t0.0023509349030470914,\t\t0.084392804892244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.579\t\t],\n\t\t[110,\t\t467,\t\t0,\t\t0.0025337603878116343,\t\t0.09095579200221118,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.635\t\t],\n\t\t[165,\t\t467,\t\t0,\t\t0.0022891274238227145,\t\t0.08217406777274441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.222000000000001\t\t],\n\t\t[468,\t\t469,\t\t0,\t\t0.0005269421487603305,\t\t0.0013936425453786,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.797\t\t],\n\t\t[541,\t\t469,\t\t0,\t\t0.022390743801652895,\t\t0.05921844221026801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.866\t\t],\n\t\t[490,\t\t469,\t\t0,\t\t0.028243305785123966,\t\t0.07469714209944801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.718\t\t],\n\t\t[263,\t\t471,\t\t0,\t\t0.0371900826446281,\t\t0.0245898347482832,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.125\t\t],\n\t\t[470,\t\t471,\t\t0,\t\t0.001570909090909091,\t\t0.0010386746197682802,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.188\t\t],\n\t\t[534,\t\t471,\t\t0,\t\t0.024497190082644622,\t\t0.0161973787927468,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.526\t\t],\n\t\t[136,\t\t472,\t\t0,\t\t0.0007079293628808865,\t\t0.025412930201351602,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.0889999999999995\t\t],\n\t\t[110,\t\t472,\t\t0,\t\t0.00019511772853185596,\t\t0.0070042485539216805,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.127\t\t],\n\t\t[251,\t\t472,\t\t0,\t\t4.207063711911357e-05,\t\t0.00151023282928764,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.243\t\t],\n\t\t[226,\t\t474,\t\t0,\t\t0.017639669421487602,\t\t0.011663231841509601,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.34\t\t],\n\t\t[473,\t\t474,\t\t0,\t\t0.003467107438016529,\t\t0.00916971330986216,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t5.244\t\t],\n\t\t[257,\t\t474,\t\t0,\t\t0.020264462809917356,\t\t0.053594910935781594,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.65\t\t],\n\t\t[6,\t\t474,\t\t0,\t\t0.08066247933884299,\t\t0.05333349367016,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.001000000000005\t\t],\n\t\t[299,\t\t475,\t\t0,\t\t0.013238227146814403,\t\t0.47521993028123993,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.464\t\t],\n\t\t[3,\t\t475,\t\t0,\t\t0.0002794321329639889,\t\t0.010030929162389441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.614\t\t],\n\t\t[210,\t\t475,\t\t0,\t\t0.0001481994459833795,\t\t0.00531999712702368,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.856\t\t],\n\t\t[297,\t\t476,\t\t0,\t\t0.0193500826446281,\t\t0.05117658265464801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.267\t\t],\n\t\t[296,\t\t476,\t\t0,\t\t0.005596694214876033,\t\t0.014801987636898,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.465\t\t],\n\t\t[295,\t\t476,\t\t0,\t\t0.0009474380165289256,\t\t0.00250575880492432,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.433\t\t],\n\t\t[313,\t\t478,\t\t0,\t\t0.008696849030470914,\t\t0.31219557906752804,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.233000000000004\t\t],\n\t\t[477,\t\t478,\t\t0,\t\t1.5235457063711912e-05,\t\t0.0005469155924977479,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.08800000000000001\t\t],\n\t\t[245,\t\t478,\t\t0,\t\t0.005264542936288089,\t\t0.188984197007248,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.408\t\t],\n\t\t[479,\t\t481,\t\t0,\t\t0.028420495867768597,\t\t0.07516576970575199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.986000000000004\t\t],\n\t\t[565,\t\t481,\t\t0,\t\t0.024842314049586776,\t\t0.065702289836964,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.574\t\t],\n\t\t[480,\t\t481,\t\t0,\t\t7.735537190082645e-05,\t\t0.000204587425105844,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.11699999999999999\t\t],\n\t\t[415,\t\t482,\t\t0,\t\t0.011021814404432133,\t\t0.0989140353680364,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.831\t\t],\n\t\t[56,\t\t482,\t\t0,\t\t0.002630886426592798,\t\t0.0236105947261788,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.598\t\t],\n\t\t[409,\t\t482,\t\t0,\t\t0.0007635041551246537,\t\t0.0068519822810072005,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.205\t\t],\n\t\t[483,\t\t484,\t\t0,\t\t9.037396121883656e-05,\t\t0.000811050963873968,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.261\t\t],\n\t\t[3,\t\t484,\t\t0,\t\t0.010022160664819944,\t\t0.08994275516621358,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.944000000000003\t\t],\n\t\t[301,\t\t484,\t\t0,\t\t0.00966516620498615,\t\t0.08673894848517479,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.913\t\t],\n\t\t[233,\t\t485,\t\t0,\t\t0.01410180055401662,\t\t0.1265550251138996,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.726\t\t],\n\t\t[392,\t\t485,\t\t0,\t\t0.00914819944598338,\t\t0.0820994883738036,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.42\t\t],\n\t\t[391,\t\t485,\t\t0,\t\t8.518005540166207e-05,\t\t0.000764438839512864,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.24600000000000002\t\t],\n\t\t[579,\t\t488,\t\t0,\t\t0.004636473829194215,\t\t0.11036180126571601,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.038\t\t],\n\t\t[486,\t\t488,\t\t0,\t\t0.00016969696969690082,\t\t0.00403929018798184,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.77\t\t],\n\t\t[487,\t\t488,\t\t0,\t\t0.00014567493112954544,\t\t0.00346749456396992,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.6609999999999999\t\t],\n\t\t[270,\t\t489,\t\t0,\t\t0.0001745152354570637,\t\t0.0062646695140596,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.008\t\t],\n\t\t[331,\t\t489,\t\t0,\t\t0.003002943213296399,\t\t0.10779830627119119,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.345\t\t],\n\t\t[396,\t\t489,\t\t0,\t\t0.01124792243767313,\t\t0.40377286606072005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.968\t\t],\n\t\t[519,\t\t253,\t\t0,\t\t0.013353485337561985,\t\t0.141267767926912,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.394293146100004\t\t],\n\t\t[382,\t\t349,\t\t0,\t\t0.009091647380263157,\t\t1.30547149138788,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t105.02671053600001\t\t],\n\t\t[349,\t\t351,\t\t0,\t\t0.0005858117819605263,\t\t0.0841168325920224,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.76729770521\t\t],\n\t\t[459,\t\t465,\t\t0,\t\t1.578788789911157e-05,\t\t0.00016702153987596,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.047758360894800005\t\t],\n\t\t[549,\t\t550,\t\t0,\t\t3.680432518409091e-05,\t\t0.000389356391787088,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.111333083682\t\t],\n\t\t[550,\t\t551,\t\t0,\t\t5.755645674710744e-05,\t\t0.0006088951287918401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.17410828165999997\t\t],\n\t\t[194,\t\t195,\t\t0,\t\t1.7560672583171745e-05,\t\t0.00252154053805592,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.202860889681\t\t],\n\t\t[247,\t\t248,\t\t0,\t\t2.1755213937811637e-05,\t\t0.0031238355819477198,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.25131623141\t\t],\n\t\t[2,\t\t294,\t\t0,\t\t2.3531392658518004e-05,\t\t0.003378877444715,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.271834647991\t\t],\n\t\t[549,\t\t551,\t\t0,\t\t9.265809538429751e-05,\t\t0.0009802386406577602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.28029073853799996\t\t],\n\t\t[54,\t\t365,\t\t0,\t\t2.573045189134349e-05,\t\t0.00369464080598484,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.297238180249\t\t],\n\t\t[131,\t\t265,\t\t0,\t\t2.7616389041343487e-05,\t\t0.00396544290388756,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.319024526206\t\t],\n\t\t[91,\t\t92,\t\t0,\t\t2.8945628197853184e-05,\t\t0.0041563086239824396,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.33437989694200004\t\t],\n\t\t[247,\t\t249,\t\t0,\t\t3.098840072160664e-05,\t\t0.00444963074500788,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.357978005136\t\t],\n\t\t[186,\t\t191,\t\t0,\t\t3.1591661821191135e-05,\t\t0.00453625312865552,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.36494687735799997\t\t],\n\t\t[129,\t\t173,\t\t0,\t\t3.202671277479225e-05,\t\t0.00459872218332188,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.369972585975\t\t],\n\t\t[96,\t\t202,\t\t0,\t\t3.5971247867797784e-05,\t\t0.00516511877739804,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.415539855369\t\t],\n\t\t[53,\t\t320,\t\t0,\t\t3.784209581142659e-05,\t\t0.00543375421308236,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.437151890814\t\t],\n\t\t[24,\t\t396,\t\t0,\t\t4.144748602818559e-05,\t\t0.005951452925597279,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.47880135859800005\t\t],\n\t\t[133,\t\t156,\t\t0,\t\t4.431754564044322e-05,\t\t0.0063635653674415605,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.511956287238\t\t],\n\t\t[442,\t\t452,\t\t0,\t\t4.483572190450138e-05,\t\t0.006437970402313801,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.517942259441\t\t],\n\t\t[445,\t\t452,\t\t0,\t\t4.490753296371191e-05,\t\t0.0064482817668697215,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.518771820797\t\t],\n\t\t[247,\t\t250,\t\t0,\t\t4.594910768732687e-05,\t\t0.00659784169268824,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.530804092004\t\t],\n\t\t[187,\t\t195,\t\t0,\t\t4.755760376239612e-05,\t\t0.006828805970367921,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.549385438663\t\t],\n\t\t[216,\t\t236,\t\t0,\t\t5.03353075283241e-05,\t\t0.00722765701751724,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.581473472567\t\t],\n\t\t[244,\t\t389,\t\t0,\t\t5.1633313019736845e-05,\t\t0.007414037889302401,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.596468032004\t\t],\n\t\t[394,\t\t406,\t\t0,\t\t5.6346419007686985e-05,\t\t0.008090793734075721,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.650913832377\t\t],\n\t\t[442,\t\t445,\t\t0,\t\t6.388070648310249e-05,\t\t0.00917264360085512,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.737949921293\t\t],\n\t\t[442,\t\t444,\t\t0,\t\t6.584378362735456e-05,\t\t0.00945452224616264,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.760627388463\t\t],\n\t\t[198,\t\t472,\t\t0,\t\t8.37554210498615e-05,\t\t0.0120264578966664,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.967542623967\t\t],\n\t\t[464,\t\t467,\t\t0,\t\t8.460287496468144e-05,\t\t0.01214814397621276,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.977332411594\t\t],\n\t\t[198,\t\t251,\t\t0,\t\t8.83613182396122e-05,\t\t0.012687819608389479,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.0207499483\t\t],\n\t\t[112,\t\t143,\t\t0,\t\t9.049653833033241e-05,\t\t0.012994416294241841,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.04541601079\t\t],\n\t\t[2,\t\t490,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[5,\t\t491,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[10,\t\t492,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[12,\t\t493,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[13,\t\t494,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[15,\t\t495,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[18,\t\t496,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[20,\t\t497,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[22,\t\t498,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[24,\t\t499,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[26,\t\t500,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[30,\t\t501,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[32,\t\t502,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[37,\t\t503,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[42,\t\t504,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[46,\t\t505,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[52,\t\t506,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[56,\t\t507,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[61,\t\t508,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[68,\t\t509,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[69,\t\t510,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[74,\t\t511,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[78,\t\t512,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[86,\t\t513,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[87,\t\t514,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[94,\t\t515,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[95,\t\t516,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[96,\t\t517,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[99,\t\t518,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[100,\t\t519,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[104,\t\t520,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[105,\t\t521,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[106,\t\t522,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[107,\t\t523,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[117,\t\t524,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[120,\t\t525,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[123,\t\t526,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[124,\t\t527,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[125,\t\t528,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[128,\t\t529,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[129,\t\t530,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[138,\t\t531,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[143,\t\t532,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[156,\t\t533,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[157,\t\t534,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[159,\t\t535,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[160,\t\t536,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[165,\t\t537,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[184,\t\t538,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[191,\t\t539,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[195,\t\t540,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[201,\t\t541,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[220,\t\t542,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[231,\t\t543,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[232,\t\t544,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[233,\t\t545,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[236,\t\t546,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[245,\t\t547,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[246,\t\t548,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[248,\t\t549,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[249,\t\t550,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[250,\t\t551,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[259,\t\t552,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[261,\t\t553,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[262,\t\t554,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[265,\t\t555,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[270,\t\t556,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[277,\t\t557,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[279,\t\t558,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[280,\t\t559,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[290,\t\t560,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[301,\t\t561,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[305,\t\t562,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[306,\t\t563,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[310,\t\t564,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[313,\t\t565,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[315,\t\t566,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[320,\t\t567,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[330,\t\t568,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[332,\t\t569,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[334,\t\t570,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[336,\t\t571,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[349,\t\t572,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[351,\t\t573,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[358,\t\t574,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[360,\t\t575,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[380,\t\t576,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[382,\t\t577,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[383,\t\t578,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[389,\t\t579,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[401,\t\t580,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[402,\t\t581,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[409,\t\t582,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[415,\t\t583,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[444,\t\t584,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[452,\t\t585,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t]\n\t])\n\tppc[\"gen_control\"] = array([\n\t\t[586,\t\t1,\t\t0.08658028904199107,\t\t4.329014452099554,\t\t0, 0, 0],\n\t\t[589,\t\t1,\t\t0.010042676909098597,\t\t0.5021338454549299,\t\t0, 0, 0],\n\t\t[590,\t\t1,\t\t0.012095775674984046,\t\t0.6047887837492023,\t\t0, 0, 0],\n\t\t[593,\t\t1,\t\t0.0017666198683200384,\t\t0.08833099341600192,\t\t0, 0, 0],\n\t\t[595,\t\t1,\t\t1.50560576164933,\t\t75.2802880824665,\t\t0, 0, 0],\n\t\t[598,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[599,\t\t1,\t\t0.0029602819415092537,\t\t0.1480140970754627,\t\t0, 0, 0],\n\t\t[602,\t\t1,\t\t0.007830423200121252,\t\t0.39152116000606263,\t\t0, 0, 0],\n\t\t[603,\t\t1,\t\t1.0997606567649967,\t\t54.98803283824984,\t\t0, 0, 0],\n\t\t[607,\t\t1,\t\t0.5729577951308232,\t\t28.64788975654116,\t\t0, 0, 0],\n\t\t[608,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[609,\t\t1,\t\t0.0057932399285449895,\t\t0.2896619964272495,\t\t0, 0, 0],\n\t\t[612,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[614,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[616,\t\t1,\t\t0.0046154933496649645,\t\t0.23077466748324824,\t\t0, 0, 0],\n\t\t[617,\t\t1,\t\t0.04360845440717932,\t\t2.1804227203589663,\t\t0, 0, 0],\n\t\t[618,\t\t1,\t\t0.010631550198538607,\t\t0.5315775099269304,\t\t0, 0, 0],\n\t\t[619,\t\t1,\t\t0.037560566569687294,\t\t1.8780283284843649,\t\t0, 0, 0],\n\t\t[624,\t\t1,\t\t0.004297183463481174,\t\t0.21485917317405873,\t\t0, 0, 0],\n\t\t[629,\t\t1,\t\t0.023968734429639437,\t\t1.198436721481972,\t\t0, 0, 0],\n\t\t[632,\t\t1,\t\t0.01435577586688896,\t\t0.717788793344448,\t\t0, 0, 0],\n\t\t[637,\t\t1,\t\t0.017093240888069558,\t\t0.854662044403478,\t\t0, 0, 0],\n\t\t[638,\t\t1,\t\t0.02048324117592693,\t\t1.0241620587963465,\t\t0, 0, 0],\n\t\t[640,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[641,\t\t1,\t\t0.0040107045659157625,\t\t0.20053522829578813,\t\t0, 0, 0],\n\t\t[642,\t\t1,\t\t0.00919915571071155,\t\t0.4599577855355775,\t\t0, 0, 0],\n\t\t[643,\t\t1,\t\t0.27279157245950864,\t\t13.639578622975431,\t\t0, 0, 0],\n\t\t[647,\t\t1,\t\t0.00445633840657307,\t\t0.2228169203286535,\t\t0, 0, 0],\n\t\t[652,\t\t1,\t\t0.00746436683100989,\t\t0.37321834155049455,\t\t0, 0, 0],\n\t\t[655,\t\t1,\t\t0.019576058000303126,\t\t0.9788029000151565,\t\t0, 0, 0],\n\t\t[663,\t\t1,\t\t0.00238732414637843,\t\t0.1193662073189215,\t\t0, 0, 0],\n\t\t[666,\t\t1,\t\t0.00919915571071155,\t\t0.4599577855355775,\t\t0, 0, 0],\n\t\t[670,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[672,\t\t1,\t\t0.010536057232683471,\t\t0.5268028616341736,\t\t0, 0, 0],\n\t\t[676,\t\t1,\t\t0.11777465788800255,\t\t5.888732894400127,\t\t0, 0, 0],\n\t\t[681,\t\t1,\t\t0.0063821132179850025,\t\t0.31910566089925013,\t\t0, 0, 0],\n\t\t[683,\t\t1,\t\t0.008753521870054244,\t\t0.4376760935027122,\t\t0, 0, 0],\n\t\t[687,\t\t1,\t\t0.42303383873825773,\t\t21.151691936912886,\t\t0, 0, 0],\n\t\t[694,\t\t1,\t\t0.005220282133414166,\t\t0.2610141066707083,\t\t0, 0, 0],\n\t\t[695,\t\t1,\t\t0.004679155326901723,\t\t0.23395776634508614,\t\t0, 0, 0],\n\t\t[697,\t\t1,\t\t0.0036923946797319715,\t\t0.1846197339865986,\t\t0, 0, 0],\n\t\t[698,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[702,\t\t1,\t\t0.023363945645890238,\t\t1.168197282294512,\t\t0, 0, 0],\n\t\t[705,\t\t1,\t\t0.005411268065124442,\t\t0.27056340325622213,\t\t0, 0, 0],\n\t\t[707,\t\t1,\t\t0.010822536130248884,\t\t0.5411268065124443,\t\t0, 0, 0],\n\t\t[714,\t\t1,\t\t0.00477464829275686,\t\t0.238732414637843,\t\t0, 0, 0],\n\t\t[716,\t\t1,\t\t1.5915494309189534e-05,\t\t0.0007957747154594768,\t\t0, 0, 0],\n\t\t[717,\t\t1,\t\t0.0017507043740108488,\t\t0.08753521870054244,\t\t0, 0, 0],\n\t\t[722,\t\t1,\t\t0.006589014644004467,\t\t0.3294507322002233,\t\t0, 0, 0],\n\t\t[724,\t\t1,\t\t0.0019257748114119334,\t\t0.09628874057059668,\t\t0, 0, 0],\n\t\t[730,\t\t1,\t\t0.10077690996578814,\t\t5.038845498289407,\t\t0, 0, 0],\n\t\t[732,\t\t1,\t\t0.004647324338283344,\t\t0.2323662169141672,\t\t0, 0, 0],\n\t\t[735,\t\t1,\t\t0.013496339174192726,\t\t0.6748169587096363,\t\t0, 0, 0],\n\t\t[741,\t\t1,\t\t0.0340591578216656,\t\t1.7029578910832803,\t\t0, 0, 0],\n\t\t[742,\t\t1,\t\t0.0028647889756541157,\t\t0.14323944878270578,\t\t0, 0, 0],\n\t\t[743,\t\t1,\t\t0.44881693951914486,\t\t22.440846975957243,\t\t0, 0, 0],\n\t\t[747,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[749,\t\t1,\t\t0.0025464790894703256,\t\t0.12732395447351627,\t\t0, 0, 0],\n\t\t[750,\t\t1,\t\t0.028902537665488188,\t\t1.4451268832744095,\t\t0, 0, 0],\n\t\t[753,\t\t1,\t\t0.049624511256052974,\t\t2.4812255628026487,\t\t0, 0, 0],\n\t\t[761,\t\t1,\t\t0.004997465213085514,\t\t0.2498732606542757,\t\t0, 0, 0],\n\t\t[762,\t\t1,\t\t0.3517324242330887,\t\t17.586621211654435,\t\t0, 0, 0],\n\t\t[765,\t\t1,\t\t0.018780283284843647,\t\t0.9390141642421824,\t\t0, 0, 0],\n\t\t[767,\t\t1,\t\t0.0035650707252584553,\t\t0.17825353626292276,\t\t0, 0, 0],\n\t\t[772,\t\t1,\t\t0.002992112930127632,\t\t0.1496056465063816,\t\t0, 0, 0],\n\t\t[774,\t\t1,\t\t0.010663381187156987,\t\t0.5331690593578494,\t\t0, 0, 0],\n\t\t[777,\t\t1,\t\t0.012573240504259732,\t\t0.6286620252129866,\t\t0, 0, 0],\n\t\t[778,\t\t1,\t\t0.004679155326901723,\t\t0.23395776634508614,\t\t0, 0, 0],\n\t\t[781,\t\t1,\t\t0.4169859509007658,\t\t20.84929754503829,\t\t0, 0, 0],\n\t\t[784,\t\t1,\t\t0.4058451048843331,\t\t20.292255244216655,\t\t0, 0, 0],\n\t\t[785,\t\t1,\t\t0.00047746482927568597,\t\t0.0238732414637843,\t\t0, 0, 0],\n\t\t[788,\t\t1,\t\t0.2785211504108168,\t\t13.926057520540843,\t\t0, 0, 0],\n\t\t[789,\t\t1,\t\t0.0123185925953127,\t\t0.615929629765635,\t\t0, 0, 0],\n\t\t[791,\t\t1,\t\t0.0031830988618379067,\t\t0.15915494309189535,\t\t0, 0, 0],\n\t\t[792,\t\t1,\t\t0.009979014931861837,\t\t0.49895074659309185,\t\t0, 0, 0],\n\t\t[795,\t\t1,\t\t0.004329014452099553,\t\t0.2164507226049777,\t\t0, 0, 0],\n\t\t[800,\t\t1,\t\t0.0058091554228541795,\t\t0.290457771142709,\t\t0, 0, 0],\n\t\t[801,\t\t1,\t\t0.007957747154594767,\t\t0.3978873577297384,\t\t0, 0, 0],\n\t\t[802,\t\t1,\t\t0.07957747154594767,\t\t3.9788735772973833,\t\t0, 0, 0],\n\t\t[805,\t\t1,\t\t0.44881693951914486,\t\t22.440846975957243,\t\t0, 0, 0],\n\t\t[806,\t\t1,\t\t0.005697746962689853,\t\t0.2848873481344927,\t\t0, 0, 0],\n\t\t[808,\t\t1,\t\t0.034616200122487235,\t\t1.7308100061243619,\t\t0, 0, 0],\n\t\t[809,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[811,\t\t1,\t\t0.0040107045659157625,\t\t0.20053522829578813,\t\t0, 0, 0],\n\t\t[814,\t\t1,\t\t0.014164789935178685,\t\t0.7082394967589343,\t\t0, 0, 0],\n\t\t[816,\t\t1,\t\t0.012748310941660816,\t\t0.6374155470830408,\t\t0, 0, 0],\n\t\t[817,\t\t1,\t\t0.017188733853924696,\t\t0.8594366926962349,\t\t0, 0, 0],\n\t\t[821,\t\t1,\t\t0.013130282805081364,\t\t0.6565141402540683,\t\t0, 0, 0],\n\t\t[826,\t\t1,\t\t0.018461973398659858,\t\t0.9230986699329929,\t\t0, 0, 0],\n\t\t[834,\t\t1,\t\t0.007416620348082323,\t\t0.37083101740411617,\t\t0, 0, 0],\n\t\t[835,\t\t1,\t\t0.010138169874953733,\t\t0.5069084937476867,\t\t0, 0, 0],\n\t\t[836,\t\t1,\t\t0.008116902097686661,\t\t0.4058451048843331,\t\t0, 0, 0],\n\t\t[837,\t\t1,\t\t0.15024226627874918,\t\t7.512113313937459,\t\t0, 0, 0],\n\t\t[839,\t\t1,\t\t0.011666057328635928,\t\t0.5833028664317964,\t\t0, 0, 0],\n\t\t[841,\t\t1,\t\t0.0037083101740411615,\t\t0.18541550870205808,\t\t0, 0, 0],\n\t\t[843,\t\t1,\t\t0.10599719209920229,\t\t5.2998596049601145,\t\t0, 0, 0],\n\t\t[844,\t\t1,\t\t0.012732395447351627,\t\t0.6366197723675814,\t\t0, 0, 0],\n\t\t[850,\t\t1,\t\t0.005092958178940651,\t\t0.25464790894703254,\t\t0, 0, 0],\n\t\t[851,\t\t1,\t\t0.01265281797580568,\t\t0.632640898790284,\t\t0, 0, 0],\n\t\t[853,\t\t1,\t\t0.0036923946797319715,\t\t0.1846197339865986,\t\t0, 0, 0],\n\t\t[856,\t\t1,\t\t0.011459155902616463,\t\t0.5729577951308231,\t\t0, 0, 0],\n\t\t[857,\t\t1,\t\t0.4462704604296745,\t\t22.313523021483725,\t\t0, 0, 0],\n\t\t[858,\t\t1,\t\t0.01808000153523931,\t\t0.9040000767619655,\t\t0, 0, 0],\n\t\t[860,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[865,\t\t1,\t\t0.0035014087480216977,\t\t0.17507043740108488,\t\t0, 0, 0],\n\t\t[867,\t\t1,\t\t0.24478030247533505,\t\t12.239015123766753,\t\t0, 0, 0],\n\t\t[869,\t\t1,\t\t0.4329014452099553,\t\t21.645072260497766,\t\t0, 0, 0],\n\t\t[870,\t\t1,\t\t0.018589297353133374,\t\t0.9294648676566688,\t\t0, 0, 0],\n\t\t[872,\t\t1,\t\t0.00716197243913529,\t\t0.3580986219567645,\t\t0, 0, 0],\n\t\t[874,\t\t1,\t\t0.006589014644004467,\t\t0.3294507322002233,\t\t0, 0, 0],\n\t\t[875,\t\t1,\t\t0.007766761222884492,\t\t0.38833806114422464,\t\t0, 0, 0],\n\t\t[882,\t\t1,\t\t0.005538592019597957,\t\t0.2769296009798979,\t\t0, 0, 0],\n\t\t[883,\t\t1,\t\t0.005729577951308231,\t\t0.28647889756541156,\t\t0, 0, 0],\n\t\t[885,\t\t1,\t\t0.15597184423005742,\t\t7.798592211502871,\t\t0, 0, 0],\n\t\t[886,\t\t1,\t\t0.8186930272647096,\t\t40.93465136323548,\t\t0, 0, 0],\n\t\t[889,\t\t1,\t\t0.0030239439187460114,\t\t0.15119719593730058,\t\t0, 0, 0],\n\t\t[890,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[893,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[894,\t\t1,\t\t0.025146481008519465,\t\t1.2573240504259733,\t\t0, 0, 0],\n\t\t[895,\t\t1,\t\t0.0030239439187460114,\t\t0.15119719593730058,\t\t0, 0, 0],\n\t\t[896,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[898,\t\t1,\t\t0.013464508185574344,\t\t0.6732254092787172,\t\t0, 0, 0],\n\t\t[902,\t\t1,\t\t0.006207042780583919,\t\t0.31035213902919595,\t\t0, 0, 0],\n\t\t[903,\t\t1,\t\t0.0031990143561470966,\t\t0.15995071780735484,\t\t0, 0, 0],\n\t\t[905,\t\t1,\t\t0.021851973686517232,\t\t1.0925986843258617,\t\t0, 0, 0],\n\t\t[906,\t\t1,\t\t0.010504226244065093,\t\t0.5252113122032547,\t\t0, 0, 0],\n\t\t[907,\t\t1,\t\t0.02142225534016911,\t\t1.0711127670084555,\t\t0, 0, 0],\n\t\t[909,\t\t1,\t\t0.005856901905781748,\t\t0.2928450952890874,\t\t0, 0, 0],\n\t\t[917,\t\t1,\t\t0.005411268065124442,\t\t0.27056340325622213,\t\t0, 0, 0],\n\t\t[918,\t\t1,\t\t0.012254930618075942,\t\t0.612746530903797,\t\t0, 0, 0],\n\t\t[920,\t\t1,\t\t0.0020371832715762603,\t\t0.10185916357881303,\t\t0, 0, 0],\n\t\t[921,\t\t1,\t\t0.019735212943395024,\t\t0.9867606471697512,\t\t0, 0, 0],\n\t\t[922,\t\t1,\t\t0.05220282133414166,\t\t2.6101410667070835,\t\t0, 0, 0],\n\t\t[923,\t\t1,\t\t0.023236621691416718,\t\t1.161831084570836,\t\t0, 0, 0],\n\t\t[925,\t\t1,\t\t0.008276057040778557,\t\t0.4138028520389279,\t\t0, 0, 0],\n\t\t[931,\t\t1,\t\t0.03455253814525047,\t\t1.7276269072625237,\t\t0, 0, 0],\n\t\t[936,\t\t1,\t\t0.016615776058793875,\t\t0.8307888029396938,\t\t0, 0, 0],\n\t\t[937,\t\t1,\t\t0.00477464829275686,\t\t0.238732414637843,\t\t0, 0, 0],\n\t\t[939,\t\t1,\t\t1.5915494309189534e-05,\t\t0.0007957747154594768,\t\t0, 0, 0],\n\t\t[940,\t\t1,\t\t0.009421972631040205,\t\t0.47109863155201026,\t\t0, 0, 0],\n\t\t[944,\t\t1,\t\t0.004042535554534142,\t\t0.2021267777267071,\t\t0, 0, 0],\n\t\t[950,\t\t1,\t\t0.005092958178940651,\t\t0.25464790894703254,\t\t0, 0, 0],\n\t\t[952,\t\t1,\t\t0.005045211696013082,\t\t0.2522605848006541,\t\t0, 0, 0],\n\t\t[958,\t\t1,\t\t0.010615634704229418,\t\t0.530781735211471,\t\t0, 0, 0],\n\t\t[959,\t\t1,\t\t0.007241549910681238,\t\t0.3620774955340619,\t\t0, 0, 0],\n\t\t[960,\t\t1,\t\t0.004217605991935227,\t\t0.21088029959676136,\t\t0, 0, 0],\n\t\t[963,\t\t1,\t\t0.2785211504108168,\t\t13.926057520540843,\t\t0, 0, 0],\n\t\t[965,\t\t1,\t\t0.11204507993669433,\t\t5.602253996834716,\t\t0, 0, 0],\n\t\t[967,\t\t1,\t\t0.01193662073189215,\t\t0.5968310365946076,\t\t0, 0, 0],\n\t\t[969,\t\t1,\t\t0.018111832523857688,\t\t0.9055916261928845,\t\t0, 0, 0],\n\t\t[971,\t\t1,\t\t0.0031830988618379067,\t\t0.15915494309189535,\t\t0, 0, 0],\n\t\t[978,\t\t1,\t\t0.0007321127382227185,\t\t0.03660563691113593,\t\t0, 0, 0],\n\t\t[982,\t\t1,\t\t0.0015756339366097638,\t\t0.07878169683048819,\t\t0, 0, 0],\n\t\t[983,\t\t1,\t\t0.01400563499208679,\t\t0.7002817496043395,\t\t0, 0, 0],\n\t\t[984,\t\t1,\t\t0.14801409707546268,\t\t7.400704853773133,\t\t0, 0, 0],\n\t\t[985,\t\t1,\t\t0.0035014087480216977,\t\t0.17507043740108488,\t\t0, 0, 0],\n\t\t[986,\t\t1,\t\t0.0017825353626292277,\t\t0.08912676813146138,\t\t0, 0, 0],\n\t\t[987,\t\t1,\t\t0.02618098813861678,\t\t1.3090494069308392,\t\t0, 0, 0],\n\t\t[988,\t\t1,\t\t0.0008116902097686662,\t\t0.04058451048843331,\t\t0, 0, 0],\n\t\t[993,\t\t1,\t\t0.06238873769202297,\t\t3.119436884601149,\t\t0, 0, 0],\n\t\t[994,\t\t1,\t\t0.010504226244065093,\t\t0.5252113122032547,\t\t0, 0, 0],\n\t\t[995,\t\t1,\t\t0.0006684507609859605,\t\t0.033422538049298026,\t\t0, 0, 0],\n\t\t[997,\t\t1,\t\t0.005984225860255264,\t\t0.2992112930127632,\t\t0, 0, 0],\n\t\t[999,\t\t1,\t\t0.004965634224467135,\t\t0.24828171122335674,\t\t0, 0, 0],\n\t\t[1002,\t\t1,\t\t0.0031512678732195276,\t\t0.15756339366097638,\t\t0, 0, 0],\n\t\t[1007,\t\t1,\t\t0.007416620348082323,\t\t0.37083101740411617,\t\t0, 0, 0],\n\t\t[1010,\t\t1,\t\t0.238732414637843,\t\t11.93662073189215,\t\t0, 0, 0],\n\t\t[1011,\t\t1,\t\t0.005952394871636886,\t\t0.2976197435818443,\t\t0, 0, 0],\n\t\t[1012,\t\t1,\t\t0.9024085273310466,\t\t45.12042636655233,\t\t0, 0, 0],\n\t\t[1014,\t\t1,\t\t0.238732414637843,\t\t11.93662073189215,\t\t0, 0, 0],\n\t\t[1027,\t\t3,\t\t0.003074873500535418,\t\t0.15374367502677092,\t\t2.22, 61.69, 0.004502],\n\t\t[1028,\t\t2,\t\t0.025464790894703257,\t\t1.273239544735163,\t\t0, 0, 0],\n\t\t[1029,\t\t2,\t\t0.003819718634205488,\t\t0.19098593171027442,\t\t0, 0, 0],\n\t\t[1030,\t\t2,\t\t0.06480789282701978,\t\t3.2403946413509894,\t\t0, 0, 0],\n\t\t[1031,\t\t2,\t\t0.0921316134570364,\t\t4.60658067285182,\t\t0, 0, 0],\n\t\t[1032,\t\t2,\t\t0.009772775025341927,\t\t0.4886387512670964,\t\t0, 0, 0],\n\t\t[1033,\t\t2,\t\t0.0031935716694765437,\t\t0.15967858347382718,\t\t0, 0, 0],\n\t\t[1034,\t\t2,\t\t0.005364335122251813,\t\t0.26821675611259066,\t\t0, 0, 0],\n\t\t[1035,\t\t3,\t\t0.00317587127473044,\t\t0.158793563736522,\t\t2.22, 61.69, 0.004502],\n\t\t[1036,\t\t2,\t\t0.0042795539826391196,\t\t0.21397769913195597,\t\t0, 0, 0],\n\t\t[1037,\t\t2,\t\t0.0060277734620055035,\t\t0.3013886731002752,\t\t0, 0, 0],\n\t\t[1038,\t\t2,\t\t0.005462103769994554,\t\t0.2731051884997277,\t\t0, 0, 0],\n\t\t[1039,\t\t2,\t\t0.008449479506347874,\t\t0.42247397531739384,\t\t0, 0, 0],\n\t\t[1040,\t\t3,\t\t4.085784833929019e-06,\t\t0.00020428924169645096,\t\t2.22, 61.69, 0.004502],\n\t\t[1041,\t\t2,\t\t0.012998987840239671,\t\t0.6499493920119837,\t\t0, 0, 0],\n\t\t[1042,\t\t2,\t\t0.00335501991632689,\t\t0.1677509958163445,\t\t0, 0, 0],\n\t\t[1043,\t\t3,\t\t0.00038423431443050963,\t\t0.019211715721525482,\t\t2.22, 61.69, 0.004502],\n\t\t[1044,\t\t3,\t\t0.0023022419250361527,\t\t0.11511209625180763,\t\t2.22, 61.69, 0.004502],\n\t\t[1045,\t\t2,\t\t0.003936615026511589,\t\t0.19683075132557948,\t\t0, 0, 0],\n\t\t[1046,\t\t2,\t\t0.006045611128115316,\t\t0.30228055640576584,\t\t0, 0, 0],\n\t\t[1047,\t\t3,\t\t0.0008294889076348922,\t\t0.04147444538174461,\t\t2.22, 61.69, 0.004502],\n\t\t[1048,\t\t2,\t\t0.00445182315071625,\t\t0.22259115753581254,\t\t0, 0, 0],\n\t\t[1049,\t\t2,\t\t0.01870104799381521,\t\t0.9350523996907605,\t\t0, 0, 0],\n\t\t[1050,\t\t2,\t\t0.0033601814151550304,\t\t0.1680090707577515,\t\t0, 0, 0],\n\t\t[1051,\t\t2,\t\t0.019380601737792977,\t\t0.969030086889649,\t\t0, 0, 0],\n\t\t[1052,\t\t3,\t\t0.001315809692296204,\t\t0.06579048461481019,\t\t2.22, 61.69, 0.004502],\n\t\t[1053,\t\t3,\t\t0.001042024786453249,\t\t0.05210123932266245,\t\t2.22, 61.69, 0.004502],\n\t\t[1054,\t\t2,\t\t0.017434200209443074,\t\t0.8717100104721537,\t\t0, 0, 0],\n\t\t[1055,\t\t3,\t\t0.0001818229987415119,\t\t0.009091149937075596,\t\t2.22, 61.69, 0.004502],\n\t\t[1056,\t\t2,\t\t0.0384482661909012,\t\t1.9224133095450602,\t\t0, 0, 0],\n\t\t[1057,\t\t2,\t\t0.02718238967557453,\t\t1.3591194837787268,\t\t0, 0, 0],\n\t\t[1058,\t\t2,\t\t0.06721018861714274,\t\t3.3605094308571375,\t\t0, 0, 0],\n\t\t[1059,\t\t2,\t\t0.02641152929543176,\t\t1.320576464771588,\t\t0, 0, 0],\n\t\t[1060,\t\t3,\t\t0.0006590053340983933,\t\t0.03295026670491967,\t\t2.22, 61.69, 0.004502],\n\t\t[1061,\t\t2,\t\t0.010304492946979937,\t\t0.5152246473489969,\t\t0, 0, 0],\n\t\t[1062,\t\t3,\t\t0.00018325491392786168,\t\t0.009162745696393085,\t\t2.22, 61.69, 0.004502],\n\t\t[1063,\t\t3,\t\t0.0005520076745724519,\t\t0.0276003837286226,\t\t2.22, 61.69, 0.004502],\n\t\t[1064,\t\t2,\t\t0.013355424896304362,\t\t0.667771244815218,\t\t0, 0, 0],\n\t\t[1065,\t\t2,\t\t0.021608252882636087,\t\t1.0804126441318045,\t\t0, 0, 0],\n\t\t[1066,\t\t2,\t\t0.008556107291276397,\t\t0.4278053645638199,\t\t0, 0, 0],\n\t\t[1067,\t\t3,\t\t0.002000933756260183,\t\t0.10004668781300916,\t\t2.22, 61.69, 0.004502],\n\t\t[1068,\t\t3,\t\t0.0003188842576981683,\t\t0.015944212884908417,\t\t2.22, 61.69, 0.004502],\n\t\t[1069,\t\t3,\t\t0.00020313001706596343,\t\t0.010156500853298172,\t\t2.22, 61.69, 0.004502],\n\t\t[1070,\t\t3,\t\t5.020379247175116e-05,\t\t0.0025101896235875582,\t\t2.22, 61.69, 0.004502],\n\t\t[1071,\t\t3,\t\t0.0002755733400308117,\t\t0.013778667001540588,\t\t2.22, 61.69, 0.004502],\n\t\t[1072,\t\t2,\t\t0.007168748144119091,\t\t0.3584374072059546,\t\t0, 0, 0],\n\t\t[1073,\t\t2,\t\t0.004954025493475761,\t\t0.24770127467378808,\t\t0, 0, 0],\n\t\t[1074,\t\t2,\t\t0.009778033156939965,\t\t0.48890165784699824,\t\t0, 0, 0],\n\t\t[1075,\t\t3,\t\t0.0010048055180333312,\t\t0.05024027590166657,\t\t2.22, 61.69, 0.004502],\n\t\t[1076,\t\t3,\t\t0.00014613668285460223,\t\t0.007306834142730112,\t\t2.22, 61.69, 0.004502],\n\t\t[1077,\t\t3,\t\t0.0016628534246063698,\t\t0.08314267123031849,\t\t2.22, 61.69, 0.004502],\n\t\t[1078,\t\t3,\t\t0.0021908153060440304,\t\t0.10954076530220153,\t\t2.22, 61.69, 0.004502],\n\t\t[1079,\t\t2,\t\t0.004604543003215469,\t\t0.23022715016077344,\t\t0, 0, 0],\n\t\t[1080,\t\t2,\t\t0.008412929217414397,\t\t0.4206464608707199,\t\t0, 0, 0],\n\t\t[1081,\t\t2,\t\t0.025823979083824652,\t\t1.2911989541912325,\t\t0, 0, 0],\n\t\t[1082,\t\t2,\t\t0.03247105626963941,\t\t1.623552813481971,\t\t0, 0, 0],\n\t\t[1083,\t\t2,\t\t0.04034141649573272,\t\t2.017070824786636,\t\t0, 0, 0],\n\t\t[1084,\t\t2,\t\t0.0383703068502718,\t\t1.9185153425135901,\t\t0, 0, 0],\n\t\t[1085,\t\t2,\t\t0.007239283505967098,\t\t0.3619641752983549,\t\t0, 0, 0],\n\t\t[1086,\t\t2,\t\t0.01436208920263519,\t\t0.7181044601317595,\t\t0, 0, 0],\n\t\t[1087,\t\t2,\t\t0.007427186304799236,\t\t0.3713593152399618,\t\t0, 0, 0],\n\t\t[1088,\t\t3,\t\t0.0023416461987310717,\t\t0.11708230993655358,\t\t2.22, 61.69, 0.004502],\n\t\t[1089,\t\t2,\t\t0.024474821190373128,\t\t1.2237410595186564,\t\t0, 0, 0],\n\t\t[1090,\t\t2,\t\t0.005674885746854652,\t\t0.2837442873427326,\t\t0, 0, 0],\n\t\t[1091,\t\t3,\t\t0.0025559246387118852,\t\t0.12779623193559428,\t\t2.22, 61.69, 0.004502],\n\t\t[1092,\t\t2,\t\t0.0022614569222204907,\t\t0.11307284611102454,\t\t0, 0, 0],\n\t\t[1093,\t\t2,\t\t0.005405735887485864,\t\t0.2702867943742932,\t\t0, 0, 0],\n\t\t[1096,\t\t2,\t\t0.0032869739467971857,\t\t0.16434869733985927,\t\t0, 0, 0],\n\t\t[1097,\t\t3,\t\t0.00017300345148886943,\t\t0.008650172574443471,\t\t2.22, 61.69, 0.004502],\n\t\t[1098,\t\t2,\t\t0.003289044333560044,\t\t0.1644522166780022,\t\t0, 0, 0],\n\t\t[1099,\t\t2,\t\t0.017502038182814306,\t\t0.8751019091407154,\t\t0, 0, 0],\n\t\t[1100,\t\t3,\t\t1.2394935240118277e-06,\t\t6.19746762005914e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1101,\t\t2,\t\t0.005343192104787693,\t\t0.2671596052393847,\t\t0, 0, 0],\n\t\t[1102,\t\t2,\t\t0.02234407998394998,\t\t1.1172039991974991,\t\t0, 0, 0],\n\t\t[1103,\t\t2,\t\t0.01562148424141561,\t\t0.7810742120707805,\t\t0, 0, 0],\n\t\t[1105,\t\t3,\t\t5.553489395638779e-05,\t\t0.0027767446978193898,\t\t2.22, 61.69, 0.004502],\n\t\t[1106,\t\t3,\t\t5.824860207634129e-05,\t\t0.0029124301038170645,\t\t2.22, 61.69, 0.004502],\n\t\t[1107,\t\t2,\t\t0.0030626723973069554,\t\t0.15313361986534774,\t\t0, 0, 0],\n\t\t[1108,\t\t2,\t\t0.02039874588539438,\t\t1.019937294269719,\t\t0, 0, 0],\n\t\t[1109,\t\t3,\t\t2.0410230979817453e-05,\t\t0.0010205115489908725,\t\t2.22, 61.69, 0.004502],\n\t\t[1110,\t\t3,\t\t4.209100319936101e-05,\t\t0.0021045501599680503,\t\t2.22, 61.69, 0.004502],\n\t\t[1111,\t\t2,\t\t0.004130994840039845,\t\t0.20654974200199225,\t\t0, 0, 0],\n\t\t[1113,\t\t3,\t\t8.967736039222342e-05,\t\t0.004483868019611171,\t\t2.22, 61.69, 0.004502],\n\t\t[1114,\t\t3,\t\t0.0008287580610983356,\t\t0.04143790305491678,\t\t2.22, 61.69, 0.004502],\n\t\t[1115,\t\t2,\t\t0.0012846199411427445,\t\t0.06423099705713722,\t\t0, 0, 0],\n\t\t[1116,\t\t3,\t\t0.0008266680607579276,\t\t0.04133340303789638,\t\t2.22, 61.69, 0.004502],\n\t\t[1117,\t\t2,\t\t0.002423390125278668,\t\t0.12116950626393344,\t\t0, 0, 0],\n\t\t[1118,\t\t3,\t\t0.0002364061774524349,\t\t0.011820308872621746,\t\t2.22, 61.69, 0.004502],\n\t\t[1119,\t\t3,\t\t0.001103839988378201,\t\t0.05519199941891006,\t\t2.22, 61.69, 0.004502],\n\t\t[1120,\t\t3,\t\t6.167750655223761e-05,\t\t0.0030838753276118814,\t\t2.22, 61.69, 0.004502],\n\t\t[1121,\t\t3,\t\t1.3755046233043984e-05,\t\t0.0006877523116521993,\t\t2.22, 61.69, 0.004502],\n\t\t[1122,\t\t3,\t\t3.7205183102116836e-05,\t\t0.0018602591551058418,\t\t2.22, 61.69, 0.004502],\n\t\t[1123,\t\t3,\t\t3.718482927877816e-05,\t\t0.001859241463938908,\t\t2.22, 61.69, 0.004502],\n\t\t[1124,\t\t3,\t\t3.2767805859797654e-05,\t\t0.0016383902929898828,\t\t2.22, 61.69, 0.004502],\n\t\t[1125,\t\t3,\t\t0.0007768493279403406,\t\t0.038842466397017036,\t\t2.22, 61.69, 0.004502],\n\t\t[1126,\t\t3,\t\t0.0008993573657867038,\t\t0.04496786828933519,\t\t2.22, 61.69, 0.004502],\n\t\t[1127,\t\t2,\t\t0.002692639158359382,\t\t0.13463195791796911,\t\t0, 0, 0],\n\t\t[1128,\t\t3,\t\t7.798648051461309e-05,\t\t0.0038993240257306546,\t\t2.22, 61.69, 0.004502],\n\t\t[1129,\t\t3,\t\t0.00012067336277826449,\t\t0.006033668138913225,\t\t2.22, 61.69, 0.004502],\n\t\t[1130,\t\t3,\t\t2.6018013552869856e-05,\t\t0.0013009006776434928,\t\t2.22, 61.69, 0.004502],\n\t\t[1131,\t\t3,\t\t7.376731283474909e-05,\t\t0.0036883656417374547,\t\t2.22, 61.69, 0.004502],\n\t\t[1133,\t\t3,\t\t1.8309816678670237e-05,\t\t0.000915490833933512,\t\t2.22, 61.69, 0.004502],\n\t\t[1134,\t\t3,\t\t1.2937356389347597e-05,\t\t0.0006468678194673798,\t\t2.22, 61.69, 0.004502],\n\t\t[1135,\t\t3,\t\t0.0002090133345259136,\t\t0.01045066672629568,\t\t2.22, 61.69, 0.004502],\n\t\t[1136,\t\t3,\t\t1.0239317808798805e-05,\t\t0.0005119658904399403,\t\t2.22, 61.69, 0.004502],\n\t\t[1137,\t\t3,\t\t0.00010517941277154545,\t\t0.005258970638577273,\t\t2.22, 61.69, 0.004502],\n\t\t[1138,\t\t3,\t\t3.202927158114444e-05,\t\t0.0016014635790572223,\t\t2.22, 61.69, 0.004502],\n\t\t[1139,\t\t3,\t\t0.000502422140661582,\t\t0.0251211070330791,\t\t2.22, 61.69, 0.004502],\n\t\t[1140,\t\t3,\t\t0.0014920849297188569,\t\t0.07460424648594284,\t\t2.22, 61.69, 0.004502],\n\t\t[1142,\t\t3,\t\t3.108855958207156e-05,\t\t0.001554427979103578,\t\t2.22, 61.69, 0.004502],\n\t\t[1143,\t\t3,\t\t0.0007010706467170471,\t\t0.03505353233585236,\t\t2.22, 61.69, 0.004502],\n\t\t[1144,\t\t2,\t\t0.0013348659944216786,\t\t0.06674329972108395,\t\t0, 0, 0],\n\t\t[1145,\t\t2,\t\t0.011197481443497569,\t\t0.5598740721748785,\t\t0, 0, 0],\n\t\t[1146,\t\t3,\t\t2.1915822140241895e-05,\t\t0.0010957911070120948,\t\t2.22, 61.69, 0.004502],\n\t\t[1147,\t\t3,\t\t0.0011597195411981833,\t\t0.05798597705990917,\t\t2.22, 61.69, 0.004502],\n\t\t[1148,\t\t3,\t\t0.000530075604509743,\t\t0.026503780225487154,\t\t2.22, 61.69, 0.004502],\n\t\t[1149,\t\t3,\t\t0.00023332074897085096,\t\t0.011666037448542547,\t\t2.22, 61.69, 0.004502],\n\t\t[1150,\t\t3,\t\t9.434708716193637e-05,\t\t0.004717354358096819,\t\t2.22, 61.69, 0.004502],\n\t\t[1151,\t\t3,\t\t0.00033266619332396894,\t\t0.01663330966619845,\t\t2.22, 61.69, 0.004502],\n\t\t[1152,\t\t3,\t\t2.968290590764656e-06,\t\t0.00014841452953823282,\t\t2.22, 61.69, 0.004502],\n\t\t[1155,\t\t3,\t\t1.5547398540825696e-05,\t\t0.0007773699270412849,\t\t2.22, 61.69, 0.004502],\n\t\t[1157,\t\t3,\t\t0.00011110922316080263,\t\t0.005555461158040131,\t\t2.22, 61.69, 0.004502],\n\t\t[1160,\t\t2,\t\t0.015175599618213626,\t\t0.7587799809106813,\t\t0, 0, 0],\n\t\t[1161,\t\t3,\t\t0.0010857043774739259,\t\t0.054285218873696306,\t\t2.22, 61.69, 0.004502],\n\t\t[1162,\t\t2,\t\t0.031984361657767045,\t\t1.5992180828883522,\t\t0, 0, 0],\n\t\t[1163,\t\t2,\t\t0.021010485834812704,\t\t1.0505242917406352,\t\t0, 0, 0],\n\t\t[1164,\t\t2,\t\t0.018183478445661972,\t\t0.9091739222830987,\t\t0, 0, 0],\n\t\t[1165,\t\t2,\t\t0.003640738012495192,\t\t0.18203690062475963,\t\t0, 0, 0],\n\t\t[1166,\t\t2,\t\t0.005301588846150501,\t\t0.26507944230752506,\t\t0, 0, 0],\n\t\t[1168,\t\t3,\t\t3.419450196278286e-05,\t\t0.0017097250981391431,\t\t2.22, 61.69, 0.004502],\n\t\t[1169,\t\t3,\t\t6.93880139226225e-05,\t\t0.003469400696131125,\t\t2.22, 61.69, 0.004502],\n\t\t[1171,\t\t3,\t\t0.0005748603194505088,\t\t0.02874301597252544,\t\t2.22, 61.69, 0.004502],\n\t\t[1172,\t\t3,\t\t0.00020447436337759674,\t\t0.010223718168879837,\t\t2.22, 61.69, 0.004502],\n\t\t[1173,\t\t2,\t\t0.01618626952698487,\t\t0.8093134763492436,\t\t0, 0, 0],\n\t\t[1175,\t\t3,\t\t2.1782391725402467e-05,\t\t0.0010891195862701233,\t\t2.22, 61.69, 0.004502],\n\t\t[1176,\t\t3,\t\t5.923360885186837e-06,\t\t0.0002961680442593419,\t\t2.22, 61.69, 0.004502],\n\t\t[1177,\t\t3,\t\t0.0007213874875701519,\t\t0.036069374378507595,\t\t2.22, 61.69, 0.004502],\n\t\t[1178,\t\t3,\t\t0.00010205808100824817,\t\t0.005102904050412409,\t\t2.22, 61.69, 0.004502],\n\t\t[1179,\t\t3,\t\t3.44925871051151e-05,\t\t0.0017246293552557552,\t\t2.22, 61.69, 0.004502],\n\t\t[1181,\t\t2,\t\t0.004495779034217764,\t\t0.2247889517108882,\t\t0, 0, 0],\n\t\t[1182,\t\t2,\t\t0.0037840530757545184,\t\t0.1892026537877259,\t\t0, 0, 0],\n\t\t[1183,\t\t3,\t\t0.00109035926940026,\t\t0.054517963470013,\t\t2.22, 61.69, 0.004502],\n\t\t[1184,\t\t3,\t\t0.00010790631226403063,\t\t0.005395315613201532,\t\t2.22, 61.69, 0.004502],\n\t\t[1186,\t\t3,\t\t0.001498769521577056,\t\t0.0749384760788528,\t\t2.22, 61.69, 0.004502],\n\t\t[1187,\t\t3,\t\t0.0002833468274902024,\t\t0.01416734137451012,\t\t2.22, 61.69, 0.004502],\n\t\t[1188,\t\t2,\t\t0.011440868435801076,\t\t0.5720434217900537,\t\t0, 0, 0],\n\t\t[1189,\t\t3,\t\t0.001289906586581014,\t\t0.06449532932905071,\t\t2.22, 61.69, 0.004502],\n\t\t[1190,\t\t2,\t\t0.01403960969000889,\t\t0.7019804845004446,\t\t0, 0, 0],\n\t\t[1191,\t\t2,\t\t0.004652379906159672,\t\t0.23261899530798363,\t\t0, 0, 0],\n\t\t[1192,\t\t3,\t\t0.0013658402687938922,\t\t0.06829201343969461,\t\t2.22, 61.69, 0.004502],\n\t\t[1193,\t\t3,\t\t0.00015278576957249078,\t\t0.007639288478624539,\t\t2.22, 61.69, 0.004502],\n\t\t[1194,\t\t3,\t\t0.0005720688022791215,\t\t0.028603440113956075,\t\t2.22, 61.69, 0.004502],\n\t\t[1195,\t\t3,\t\t1.2882573563174789e-05,\t\t0.0006441286781587394,\t\t2.22, 61.69, 0.004502],\n\t\t[1196,\t\t2,\t\t0.010230349597894291,\t\t0.5115174798947145,\t\t0, 0, 0],\n\t\t[1197,\t\t2,\t\t0.005767282789943071,\t\t0.2883641394971536,\t\t0, 0, 0],\n\t\t[1198,\t\t3,\t\t0.002534966273924786,\t\t0.12674831369623932,\t\t2.22, 61.69, 0.004502],\n\t\t[1199,\t\t2,\t\t0.012822920004466005,\t\t0.6411460002233003,\t\t0, 0, 0],\n\t\t[1200,\t\t2,\t\t0.003512885294685969,\t\t0.17564426473429848,\t\t0, 0, 0],\n\t\t[1201,\t\t3,\t\t0.0016021597716395785,\t\t0.08010798858197893,\t\t2.22, 61.69, 0.004502],\n\t\t[1202,\t\t3,\t\t0.0031762475555186724,\t\t0.15881237777593363,\t\t2.22, 61.69, 0.004502],\n\t\t[1203,\t\t2,\t\t0.011626157559117188,\t\t0.5813078779558594,\t\t0, 0, 0],\n\t\t[1204,\t\t3,\t\t0.0030266063343556363,\t\t0.15133031671778183,\t\t2.22, 61.69, 0.004502],\n\t\t[1205,\t\t3,\t\t3.4940417699210975e-05,\t\t0.0017470208849605492,\t\t2.22, 61.69, 0.004502],\n\t\t[1206,\t\t3,\t\t0.00024235441128435216,\t\t0.012117720564217609,\t\t2.22, 61.69, 0.004502],\n\t\t[1207,\t\t3,\t\t0.00022762038155293296,\t\t0.011381019077646649,\t\t2.22, 61.69, 0.004502],\n\t\t[1208,\t\t3,\t\t0.0001427321512302434,\t\t0.007136607561512171,\t\t2.22, 61.69, 0.004502],\n\t\t[1209,\t\t3,\t\t3.712569506330662e-05,\t\t0.0018562847531653312,\t\t2.22, 61.69, 0.004502],\n\t\t[1210,\t\t3,\t\t0.00030747517943711223,\t\t0.015373758971855613,\t\t2.22, 61.69, 0.004502],\n\t\t[1211,\t\t3,\t\t0.0011462484513341364,\t\t0.057312422566706815,\t\t2.22, 61.69, 0.004502],\n\t\t[1212,\t\t2,\t\t0.005804182676892941,\t\t0.290209133844647,\t\t0, 0, 0],\n\t\t[1213,\t\t2,\t\t0.0036505499187602444,\t\t0.18252749593801224,\t\t0, 0, 0],\n\t\t[1214,\t\t3,\t\t0.0002868549194435664,\t\t0.014342745972178321,\t\t2.22, 61.69, 0.004502],\n\t\t[1215,\t\t3,\t\t0.00014342822681200328,\t\t0.0071714113406001635,\t\t2.22, 61.69, 0.004502],\n\t\t[1216,\t\t2,\t\t0.00431338348440427,\t\t0.21566917422021353,\t\t0, 0, 0],\n\t\t[1217,\t\t3,\t\t0.0022836580531031417,\t\t0.11418290265515707,\t\t2.22, 61.69, 0.004502],\n\t\t[1218,\t\t3,\t\t6.241945072080783e-05,\t\t0.003120972536040392,\t\t2.22, 61.69, 0.004502],\n\t\t[1219,\t\t3,\t\t0.00038380486709714475,\t\t0.01919024335485724,\t\t2.22, 61.69, 0.004502],\n\t\t[1220,\t\t3,\t\t0.0011850020268110609,\t\t0.05925010134055305,\t\t2.22, 61.69, 0.004502],\n\t\t[1221,\t\t2,\t\t0.0377662225422596,\t\t1.88831112711298,\t\t0, 0, 0],\n\t\t[1222,\t\t2,\t\t0.013436354905899806,\t\t0.6718177452949904,\t\t0, 0, 0],\n\t\t[1223,\t\t3,\t\t0.00024230393037435297,\t\t0.01211519651871765,\t\t2.22, 61.69, 0.004502],\n\t\t[1224,\t\t2,\t\t0.010219261097938644,\t\t0.5109630548969322,\t\t0, 0, 0],\n\t\t[1225,\t\t3,\t\t0.0022238071565315737,\t\t0.1111903578265787,\t\t2.22, 61.69, 0.004502],\n\t\t[1226,\t\t3,\t\t0.0002535566380389208,\t\t0.012677831901946041,\t\t2.22, 61.69, 0.004502],\n\t\t[1227,\t\t3,\t\t0.0011129900410750567,\t\t0.05564950205375283,\t\t2.22, 61.69, 0.004502],\n\t\t[1228,\t\t3,\t\t0.00019234621639044032,\t\t0.009617310819522017,\t\t2.22, 61.69, 0.004502],\n\t\t[1229,\t\t2,\t\t0.0030085590951324306,\t\t0.15042795475662155,\t\t0, 0, 0],\n\t\t[1230,\t\t3,\t\t8.1951485973486e-05,\t\t0.0040975742986743,\t\t2.22, 61.69, 0.004502],\n\t\t[1231,\t\t3,\t\t0.00154847626324508,\t\t0.077423813162254,\t\t2.22, 61.69, 0.004502],\n\t\t[1232,\t\t2,\t\t0.003813185361664286,\t\t0.19065926808321432,\t\t0, 0, 0],\n\t\t[1233,\t\t2,\t\t0.03662908231521014,\t\t1.831454115760507,\t\t0, 0, 0],\n\t\t[1235,\t\t3,\t\t0.0005753349157073776,\t\t0.028766745785368877,\t\t2.22, 61.69, 0.004502],\n\t\t[1236,\t\t2,\t\t0.005234608320670995,\t\t0.26173041603354974,\t\t0, 0, 0],\n\t\t[1237,\t\t3,\t\t0.0008890105844342532,\t\t0.04445052922171266,\t\t2.22, 61.69, 0.004502],\n\t\t[1238,\t\t2,\t\t0.012012445276594919,\t\t0.600622263829746,\t\t0, 0, 0],\n\t\t[1239,\t\t3,\t\t0.0001443666373276477,\t\t0.007218331866382386,\t\t2.22, 61.69, 0.004502],\n\t\t[1240,\t\t2,\t\t0.021613910382114798,\t\t1.08069551910574,\t\t0, 0, 0],\n\t\t[1241,\t\t2,\t\t0.024532881090784327,\t\t1.2266440545392163,\t\t0, 0, 0],\n\t\t[1242,\t\t3,\t\t0.0015615143972363894,\t\t0.07807571986181946,\t\t2.22, 61.69, 0.004502],\n\t\t[1243,\t\t2,\t\t0.005289026999236673,\t\t0.26445134996183367,\t\t0, 0, 0],\n\t\t[1244,\t\t2,\t\t0.020592901244747865,\t\t1.0296450622373932,\t\t0, 0, 0],\n\t\t[1245,\t\t3,\t\t0.0005144458090049472,\t\t0.025722290450247362,\t\t2.22, 61.69, 0.004502],\n\t\t[1246,\t\t2,\t\t0.003636870278584459,\t\t0.18184351392922293,\t\t0, 0, 0],\n\t\t[1247,\t\t3,\t\t0.0013899571448864774,\t\t0.06949785724432388,\t\t2.22, 61.69, 0.004502],\n\t\t[1248,\t\t2,\t\t0.004047804296417853,\t\t0.2023902148208927,\t\t0, 0, 0],\n\t\t[1249,\t\t2,\t\t0.004846915908139961,\t\t0.24234579540699805,\t\t0, 0, 0],\n\t\t[1250,\t\t3,\t\t0.0019627317861894665,\t\t0.09813658930947333,\t\t2.22, 61.69, 0.004502],\n\t\t[1251,\t\t3,\t\t0.0014899668826355728,\t\t0.07449834413177864,\t\t2.22, 61.69, 0.004502],\n\t\t[1252,\t\t3,\t\t0.0009477821555247328,\t\t0.047389107776236644,\t\t2.22, 61.69, 0.004502],\n\t\t[1253,\t\t2,\t\t0.004106369053307717,\t\t0.20531845266538587,\t\t0, 0, 0],\n\t\t[1254,\t\t2,\t\t0.005238024431161238,\t\t0.2619012215580619,\t\t0, 0, 0],\n\t\t[1255,\t\t3,\t\t0.0002430881191708174,\t\t0.01215440595854087,\t\t2.22, 61.69, 0.004502],\n\t\t[1256,\t\t3,\t\t0.0009607764830526361,\t\t0.048038824152631804,\t\t2.22, 61.69, 0.004502],\n\t\t[1257,\t\t2,\t\t0.005662916214121937,\t\t0.28314581070609685,\t\t0, 0, 0],\n\t\t[1258,\t\t2,\t\t0.014991588973313675,\t\t0.7495794486656838,\t\t0, 0, 0],\n\t\t[1259,\t\t2,\t\t0.00695753592752513,\t\t0.34787679637625657,\t\t0, 0, 0],\n\t\t[1260,\t\t3,\t\t0.0012839803779623614,\t\t0.06419901889811806,\t\t2.22, 61.69, 0.004502],\n\t\t[1261,\t\t2,\t\t0.012840592447306919,\t\t0.6420296223653459,\t\t0, 0, 0],\n\t\t[1262,\t\t3,\t\t3.3365758929065435e-05,\t\t0.0016682879464532717,\t\t2.22, 61.69, 0.004502],\n\t\t[1263,\t\t3,\t\t2.243579925674327e-05,\t\t0.0011217899628371635,\t\t2.22, 61.69, 0.004502],\n\t\t[1264,\t\t2,\t\t0.005222533303161435,\t\t0.2611266651580718,\t\t0, 0, 0],\n\t\t[1265,\t\t3,\t\t0.0004236530619172327,\t\t0.021182653095861634,\t\t2.22, 61.69, 0.004502],\n\t\t[1266,\t\t2,\t\t0.007621029313600565,\t\t0.38105146568002835,\t\t0, 0, 0],\n\t\t[1267,\t\t3,\t\t0.002512674942558201,\t\t0.12563374712791006,\t\t2.22, 61.69, 0.004502],\n\t\t[1268,\t\t3,\t\t0.0002183287451274897,\t\t0.010916437256374485,\t\t2.22, 61.69, 0.004502],\n\t\t[1269,\t\t3,\t\t0.0003250471975980552,\t\t0.01625235987990276,\t\t2.22, 61.69, 0.004502],\n\t\t[1270,\t\t3,\t\t0.0024796665722395645,\t\t0.12398332861197821,\t\t2.22, 61.69, 0.004502],\n\t\t[1271,\t\t3,\t\t0.0030157819134425234,\t\t0.15078909567212617,\t\t2.22, 61.69, 0.004502],\n\t\t[1272,\t\t3,\t\t7.840992648188318e-05,\t\t0.003920496324094159,\t\t2.22, 61.69, 0.004502],\n\t\t[1273,\t\t3,\t\t9.236768632941541e-05,\t\t0.00461838431647077,\t\t2.22, 61.69, 0.004502],\n\t\t[1274,\t\t2,\t\t0.0033801727100761705,\t\t0.1690086355038085,\t\t0, 0, 0],\n\t\t[1275,\t\t2,\t\t0.006307329492962109,\t\t0.3153664746481055,\t\t0, 0, 0],\n\t\t[1276,\t\t3,\t\t0.001633288835647369,\t\t0.08166444178236844,\t\t2.22, 61.69, 0.004502],\n\t\t[1277,\t\t2,\t\t0.004176942042758357,\t\t0.20884710213791788,\t\t0, 0, 0],\n\t\t[1278,\t\t2,\t\t0.010850406134369231,\t\t0.5425203067184615,\t\t0, 0, 0],\n\t\t[1279,\t\t3,\t\t1.2957727984992993e-07,\t\t6.478863992496497e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1280,\t\t3,\t\t2.5822901719599235e-05,\t\t0.001291145085979962,\t\t2.22, 61.69, 0.004502],\n\t\t[1281,\t\t3,\t\t0.00013291594727662026,\t\t0.006645797363831013,\t\t2.22, 61.69, 0.004502],\n\t\t[1282,\t\t3,\t\t0.00021130763141584551,\t\t0.010565381570792277,\t\t2.22, 61.69, 0.004502],\n\t\t[1283,\t\t2,\t\t0.08261824948992594,\t\t4.130912474496298,\t\t0, 0, 0],\n\t\t[1284,\t\t3,\t\t0.0018096758437742202,\t\t0.09048379218871101,\t\t2.22, 61.69, 0.004502],\n\t\t[1285,\t\t3,\t\t0.0001399477244734882,\t\t0.006997386223674409,\t\t2.22, 61.69, 0.004502],\n\t\t[1286,\t\t3,\t\t0.0011377796471657795,\t\t0.05688898235828898,\t\t2.22, 61.69, 0.004502],\n\t\t[1287,\t\t2,\t\t0.005933272587501368,\t\t0.29666362937506835,\t\t0, 0, 0],\n\t\t[1288,\t\t2,\t\t0.00944760882155904,\t\t0.472380441077952,\t\t0, 0, 0],\n\t\t[1289,\t\t2,\t\t0.011723304434111076,\t\t0.5861652217055537,\t\t0, 0, 0],\n\t\t[1290,\t\t3,\t\t0.0003120693634598793,\t\t0.015603468172993969,\t\t2.22, 61.69, 0.004502],\n\t\t[1291,\t\t2,\t\t0.0062575490505418305,\t\t0.31287745252709154,\t\t0, 0, 0],\n\t\t[1292,\t\t3,\t\t0.002653563231501149,\t\t0.13267816157505744,\t\t2.22, 61.69, 0.004502],\n\t\t[1293,\t\t3,\t\t0.00015292290721046804,\t\t0.007646145360523402,\t\t2.22, 61.69, 0.004502],\n\t\t[1294,\t\t3,\t\t0.0003436110439431119,\t\t0.017180552197155596,\t\t2.22, 61.69, 0.004502],\n\t\t[1295,\t\t3,\t\t0.00037392918854889465,\t\t0.01869645942744473,\t\t2.22, 61.69, 0.004502],\n\t\t[1296,\t\t3,\t\t0.0017415681822428924,\t\t0.08707840911214464,\t\t2.22, 61.69, 0.004502],\n\t\t[1297,\t\t2,\t\t0.011317746197608284,\t\t0.5658873098804141,\t\t0, 0, 0],\n\t\t[1298,\t\t3,\t\t0.00025557758136610396,\t\t0.0127788790683052,\t\t2.22, 61.69, 0.004502],\n\t\t[1299,\t\t3,\t\t0.00013739570556443013,\t\t0.006869785278221508,\t\t2.22, 61.69, 0.004502],\n\t\t[1300,\t\t3,\t\t0.001511593201166196,\t\t0.07557966005830981,\t\t2.22, 61.69, 0.004502],\n\t\t[1301,\t\t2,\t\t0.0038746782543149596,\t\t0.193733912715748,\t\t0, 0, 0],\n\t\t[1302,\t\t3,\t\t0.0003104985267932093,\t\t0.015524926339660468,\t\t2.22, 61.69, 0.004502],\n\t\t[1303,\t\t3,\t\t0.00027600750632746427,\t\t0.013800375316373212,\t\t2.22, 61.69, 0.004502],\n\t\t[1304,\t\t3,\t\t0.000610793340517708,\t\t0.030539667025885397,\t\t2.22, 61.69, 0.004502],\n\t\t[1305,\t\t3,\t\t2.9075695387122924e-07,\t\t1.4537847693561463e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1306,\t\t3,\t\t4.785298727192918e-05,\t\t0.002392649363596459,\t\t2.22, 61.69, 0.004502],\n\t\t[1307,\t\t3,\t\t7.607863985215967e-06,\t\t0.0003803931992607984,\t\t2.22, 61.69, 0.004502],\n\t\t[1308,\t\t3,\t\t0.00020870441847665842,\t\t0.010435220923832922,\t\t2.22, 61.69, 0.004502],\n\t\t[1309,\t\t3,\t\t0.0002132096944766602,\t\t0.01066048472383301,\t\t2.22, 61.69, 0.004502],\n\t\t[1310,\t\t3,\t\t0.00010478060392325507,\t\t0.005239030196162754,\t\t2.22, 61.69, 0.004502],\n\t\t[1311,\t\t3,\t\t0.00042867578463455237,\t\t0.02143378923172762,\t\t2.22, 61.69, 0.004502],\n\t\t[1312,\t\t2,\t\t0.016696303623916272,\t\t0.8348151811958137,\t\t0, 0, 0],\n\t\t[1313,\t\t3,\t\t0.0019631283227609974,\t\t0.09815641613804986,\t\t2.22, 61.69, 0.004502],\n\t\t[1314,\t\t3,\t\t0.0007641975650906521,\t\t0.038209878254532606,\t\t2.22, 61.69, 0.004502],\n\t\t[1315,\t\t3,\t\t0.0005015944131679134,\t\t0.02507972065839567,\t\t2.22, 61.69, 0.004502],\n\t\t[1316,\t\t3,\t\t0.00012376478287903607,\t\t0.006188239143951804,\t\t2.22, 61.69, 0.004502],\n\t\t[1317,\t\t3,\t\t0.0009711351173103039,\t\t0.048556755865515194,\t\t2.22, 61.69, 0.004502],\n\t\t[1318,\t\t3,\t\t0.00012454395408676328,\t\t0.0062271977043381645,\t\t2.22, 61.69, 0.004502],\n\t\t[1319,\t\t3,\t\t0.001127343871228203,\t\t0.05636719356141015,\t\t2.22, 61.69, 0.004502],\n\t\t[1320,\t\t3,\t\t0.0013215329138219017,\t\t0.06607664569109509,\t\t2.22, 61.69, 0.004502],\n\t\t[1321,\t\t3,\t\t1.025741798764967e-05,\t\t0.0005128708993824835,\t\t2.22, 61.69, 0.004502],\n\t\t[1322,\t\t3,\t\t5.919056262068799e-05,\t\t0.0029595281310344,\t\t2.22, 61.69, 0.004502],\n\t\t[1323,\t\t2,\t\t0.012675857799799822,\t\t0.6337928899899912,\t\t0, 0, 0],\n\t\t[1324,\t\t3,\t\t0.0008316328586631403,\t\t0.04158164293315702,\t\t2.22, 61.69, 0.004502],\n\t\t[1325,\t\t2,\t\t0.0057612535388438385,\t\t0.2880626769421919,\t\t0, 0, 0],\n\t\t[1326,\t\t2,\t\t0.0036242041289439157,\t\t0.1812102064471958,\t\t0, 0, 0],\n\t\t[1327,\t\t2,\t\t0.0032338308031027566,\t\t0.16169154015513784,\t\t0, 0, 0],\n\t\t[1328,\t\t3,\t\t0.0010226241895011407,\t\t0.05113120947505704,\t\t2.22, 61.69, 0.004502],\n\t\t[1329,\t\t2,\t\t0.013921309839652627,\t\t0.6960654919826315,\t\t0, 0, 0],\n\t\t[1330,\t\t3,\t\t0.0019182008434651947,\t\t0.09591004217325974,\t\t2.22, 61.69, 0.004502],\n\t\t[1332,\t\t3,\t\t0.0016738699394560756,\t\t0.08369349697280379,\t\t2.22, 61.69, 0.004502],\n\t\t[1333,\t\t3,\t\t0.0029061854047842247,\t\t0.14530927023921122,\t\t2.22, 61.69, 0.004502],\n\t\t[1334,\t\t3,\t\t5.136054459913027e-05,\t\t0.0025680272299565135,\t\t2.22, 61.69, 0.004502],\n\t\t[1335,\t\t3,\t\t0.00021052629514022267,\t\t0.010526314757011134,\t\t2.22, 61.69, 0.004502],\n\t\t[1336,\t\t3,\t\t0.0018954102795459078,\t\t0.0947705139772954,\t\t2.22, 61.69, 0.004502],\n\t\t[1337,\t\t2,\t\t0.006020338798098282,\t\t0.3010169399049141,\t\t0, 0, 0],\n\t\t[1338,\t\t3,\t\t5.300015004820578e-05,\t\t0.0026500075024102894,\t\t2.22, 61.69, 0.004502],\n\t\t[1339,\t\t3,\t\t0.0006421253879349708,\t\t0.032106269396748544,\t\t2.22, 61.69, 0.004502],\n\t\t[1340,\t\t2,\t\t0.003355330861775994,\t\t0.1677665430887997,\t\t0, 0, 0],\n\t\t[1341,\t\t2,\t\t0.010682483732650976,\t\t0.5341241866325488,\t\t0, 0, 0],\n\t\t[1342,\t\t3,\t\t2.101043175532592e-05,\t\t0.0010505215877662961,\t\t2.22, 61.69, 0.004502],\n\t\t[1343,\t\t3,\t\t3.130239915703848e-05,\t\t0.0015651199578519243,\t\t2.22, 61.69, 0.004502],\n\t\t[1344,\t\t3,\t\t1.4391232894862565e-05,\t\t0.0007195616447431282,\t\t2.22, 61.69, 0.004502],\n\t\t[1345,\t\t3,\t\t0.00025281368060892654,\t\t0.012640684030446329,\t\t2.22, 61.69, 0.004502],\n\t\t[1346,\t\t2,\t\t0.013669449762218379,\t\t0.6834724881109189,\t\t0, 0, 0],\n\t\t[1347,\t\t2,\t\t0.02636344185792537,\t\t1.3181720928962688,\t\t0, 0, 0],\n\t\t[1348,\t\t3,\t\t0.0014456315404578254,\t\t0.07228157702289127,\t\t2.22, 61.69, 0.004502],\n\t\t[1349,\t\t3,\t\t0.002610949541382524,\t\t0.13054747706912617,\t\t2.22, 61.69, 0.004502],\n\t\t[1350,\t\t3,\t\t3.859851934953823e-06,\t\t0.00019299259674769115,\t\t2.22, 61.69, 0.004502],\n\t\t[1351,\t\t3,\t\t4.5085071524642273e-07,\t\t2.2542535762321137e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1352,\t\t3,\t\t2.5677954031977487e-05,\t\t0.0012838977015988745,\t\t2.22, 61.69, 0.004502],\n\t\t[1355,\t\t3,\t\t0.0001074820707981226,\t\t0.005374103539906131,\t\t2.22, 61.69, 0.004502],\n\t\t[1356,\t\t2,\t\t0.004678278776831856,\t\t0.23391393884159278,\t\t0, 0, 0],\n\t\t[1357,\t\t2,\t\t0.003594349677217709,\t\t0.17971748386088549,\t\t0, 0, 0],\n\t\t[1358,\t\t3,\t\t1.57431431082847e-05,\t\t0.0007871571554142351,\t\t2.22, 61.69, 0.004502],\n\t\t[1359,\t\t2,\t\t0.004496673943395517,\t\t0.22483369716977586,\t\t0, 0, 0],\n\t\t[1363,\t\t3,\t\t1.5265322222078787e-06,\t\t7.632661111039394e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1364,\t\t3,\t\t2.8687227851091924e-06,\t\t0.0001434361392554596,\t\t2.22, 61.69, 0.004502],\n\t\t[1365,\t\t3,\t\t2.1560465484574657e-08,\t\t1.078023274228733e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1366,\t\t3,\t\t7.830373844390861e-05,\t\t0.003915186922195431,\t\t2.22, 61.69, 0.004502],\n\t\t[1367,\t\t3,\t\t0.0027735977386081564,\t\t0.1386798869304078,\t\t2.22, 61.69, 0.004502],\n\t\t[1368,\t\t3,\t\t0.0001048661049437223,\t\t0.0052433052471861155,\t\t2.22, 61.69, 0.004502],\n\t\t[1369,\t\t3,\t\t0.0005073133310147165,\t\t0.025365666550735824,\t\t2.22, 61.69, 0.004502],\n\t\t[1370,\t\t3,\t\t2.185563890765493e-05,\t\t0.0010927819453827466,\t\t2.22, 61.69, 0.004502],\n\t\t[1371,\t\t2,\t\t0.004857683053723355,\t\t0.24288415268616778,\t\t0, 0, 0],\n\t\t[1372,\t\t2,\t\t0.012284634505654547,\t\t0.6142317252827274,\t\t0, 0, 0],\n\t\t[1373,\t\t3,\t\t0.0022409179594482334,\t\t0.11204589797241167,\t\t2.22, 61.69, 0.004502],\n\t\t[1374,\t\t2,\t\t0.006889508467327262,\t\t0.3444754233663631,\t\t0, 0, 0],\n\t\t[1375,\t\t2,\t\t0.003897629175102736,\t\t0.1948814587551368,\t\t0, 0, 0],\n\t\t[1376,\t\t2,\t\t0.006830907337989802,\t\t0.3415453668994901,\t\t0, 0, 0],\n\t\t[1377,\t\t2,\t\t0.01492085689824784,\t\t0.7460428449123921,\t\t0, 0, 0],\n\t\t[1378,\t\t2,\t\t0.01566275025445262,\t\t0.783137512722631,\t\t0, 0, 0],\n\t\t[1379,\t\t3,\t\t2.062505175023466e-05,\t\t0.001031252587511733,\t\t2.22, 61.69, 0.004502],\n\t\t[1381,\t\t3,\t\t2.601825872991241e-05,\t\t0.0013009129364956204,\t\t2.22, 61.69, 0.004502],\n\t\t[1382,\t\t2,\t\t0.008838822964419164,\t\t0.4419411482209583,\t\t0, 0, 0],\n\t\t[1383,\t\t2,\t\t0.0069522653092041085,\t\t0.34761326546020543,\t\t0, 0, 0],\n\t\t[1387,\t\t3,\t\t8.89643885212391e-05,\t\t0.0044482194260619555,\t\t2.22, 61.69, 0.004502],\n\t\t[1390,\t\t3,\t\t9.505708471011321e-05,\t\t0.004752854235505661,\t\t2.22, 61.69, 0.004502],\n\t\t[1391,\t\t3,\t\t1.3594941515348555e-05,\t\t0.0006797470757674278,\t\t2.22, 61.69, 0.004502],\n\t\t[1393,\t\t3,\t\t3.4943392392534786e-05,\t\t0.0017471696196267393,\t\t2.22, 61.69, 0.004502],\n\t\t[1394,\t\t3,\t\t2.737439864388922e-05,\t\t0.001368719932194461,\t\t2.22, 61.69, 0.004502],\n\t\t[1395,\t\t3,\t\t1.9308633391493333e-06,\t\t9.654316695746669e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1396,\t\t3,\t\t7.028796859200431e-07,\t\t3.514398429600216e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1397,\t\t3,\t\t0.0006377592842944558,\t\t0.03188796421472279,\t\t2.22, 61.69, 0.004502],\n\t\t[1398,\t\t3,\t\t7.075339318186764e-05,\t\t0.003537669659093382,\t\t2.22, 61.69, 0.004502],\n\t\t[1399,\t\t3,\t\t0.0005693538555165958,\t\t0.02846769277582979,\t\t2.22, 61.69, 0.004502],\n\t\t[1400,\t\t3,\t\t3.292902158897971e-05,\t\t0.0016464510794489857,\t\t2.22, 61.69, 0.004502],\n\t\t[1401,\t\t2,\t\t0.0037280958540986705,\t\t0.18640479270493354,\t\t0, 0, 0],\n\t\t[1402,\t\t3,\t\t0.0009460030317753202,\t\t0.047300151588766014,\t\t2.22, 61.69, 0.004502],\n\t\t[1403,\t\t2,\t\t0.007617262031172502,\t\t0.38086310155862513,\t\t0, 0, 0],\n\t\t[1404,\t\t2,\t\t0.008581667499251882,\t\t0.42908337496259413,\t\t0, 0, 0],\n\t\t[1405,\t\t3,\t\t0.0013777254553245623,\t\t0.06888627276622811,\t\t2.22, 61.69, 0.004502],\n\t\t[1406,\t\t3,\t\t0.0005951329463718105,\t\t0.029756647318590523,\t\t2.22, 61.69, 0.004502],\n\t\t[1407,\t\t3,\t\t8.42762798103069e-06,\t\t0.00042138139905153457,\t\t2.22, 61.69, 0.004502],\n\t\t[1408,\t\t3,\t\t0.002615151153581973,\t\t0.13075755767909866,\t\t2.22, 61.69, 0.004502],\n\t\t[1409,\t\t3,\t\t0.0007652033584917757,\t\t0.038260167924588785,\t\t2.22, 61.69, 0.004502],\n\t\t[1410,\t\t3,\t\t0.002385192626051519,\t\t0.11925963130257596,\t\t2.22, 61.69, 0.004502],\n\t\t[1411,\t\t3,\t\t0.0025079869254713357,\t\t0.1253993462735668,\t\t2.22, 61.69, 0.004502],\n\t\t[1412,\t\t3,\t\t0.0003811825487857675,\t\t0.01905912743928838,\t\t2.22, 61.69, 0.004502],\n\t\t[1413,\t\t3,\t\t0.0003615867173212219,\t\t0.018079335866061096,\t\t2.22, 61.69, 0.004502],\n\t\t[1414,\t\t3,\t\t0.001654733253695335,\t\t0.08273666268476676,\t\t2.22, 61.69, 0.004502],\n\t\t[1415,\t\t3,\t\t0.0004745682686545623,\t\t0.023728413432728118,\t\t2.22, 61.69, 0.004502],\n\t\t[1416,\t\t3,\t\t0.0005066221121186196,\t\t0.025331105605930982,\t\t2.22, 61.69, 0.004502],\n\t\t[1417,\t\t3,\t\t7.324966052452151e-08,\t\t3.662483026226075e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1418,\t\t2,\t\t0.005619099755523237,\t\t0.28095498777616185,\t\t0, 0, 0],\n\t\t[1419,\t\t3,\t\t0.00211745485704481,\t\t0.10587274285224049,\t\t2.22, 61.69, 0.004502],\n\t\t[1420,\t\t3,\t\t8.91112970779674e-05,\t\t0.00445556485389837,\t\t2.22, 61.69, 0.004502],\n\t\t[1421,\t\t3,\t\t0.00044387476697737416,\t\t0.02219373834886871,\t\t2.22, 61.69, 0.004502],\n\t\t[1422,\t\t3,\t\t0.00030115264331514286,\t\t0.015057632165757144,\t\t2.22, 61.69, 0.004502],\n\t\t[1423,\t\t3,\t\t0.00012293234040278847,\t\t0.006146617020139425,\t\t2.22, 61.69, 0.004502],\n\t\t[1424,\t\t2,\t\t0.01394783725195249,\t\t0.6973918625976245,\t\t0, 0, 0],\n\t\t[1425,\t\t3,\t\t0.0013602274146640447,\t\t0.06801137073320224,\t\t2.22, 61.69, 0.004502],\n\t\t[1426,\t\t2,\t\t0.004377563184547638,\t\t0.2188781592273819,\t\t0, 0, 0],\n\t\t[1427,\t\t2,\t\t0.03060222784928668,\t\t1.5301113924643341,\t\t0, 0, 0],\n\t\t[1428,\t\t2,\t\t0.021319488529000553,\t\t1.0659744264500277,\t\t0, 0, 0],\n\t\t[1429,\t\t3,\t\t0.000845419991215321,\t\t0.04227099956076605,\t\t2.22, 61.69, 0.004502],\n\t\t[1430,\t\t3,\t\t1.4103786308871584e-06,\t\t7.051893154435792e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1431,\t\t2,\t\t0.014493414492796078,\t\t0.724670724639804,\t\t0, 0, 0],\n\t\t[1432,\t\t3,\t\t0.0007676953741931287,\t\t0.03838476870965644,\t\t2.22, 61.69, 0.004502],\n\t\t[1433,\t\t2,\t\t0.08207564315805406,\t\t4.103782157902703,\t\t0, 0, 0],\n\t\t[1434,\t\t2,\t\t0.004580630870615056,\t\t0.2290315435307528,\t\t0, 0, 0],\n\t\t[1435,\t\t2,\t\t0.005241557112195593,\t\t0.2620778556097797,\t\t0, 0, 0],\n\t\t[1436,\t\t2,\t\t0.006266510483771511,\t\t0.31332552418857557,\t\t0, 0, 0],\n\t\t[1437,\t\t2,\t\t0.015172047044780135,\t\t0.7586023522390068,\t\t0, 0, 0],\n\t\t[1438,\t\t2,\t\t0.025007389641183632,\t\t1.2503694820591817,\t\t0, 0, 0],\n\t\t[1439,\t\t2,\t\t0.0063091033600462575,\t\t0.3154551680023129,\t\t0, 0, 0],\n\t\t[1440,\t\t3,\t\t5.306917668409132e-05,\t\t0.0026534588342045657,\t\t2.22, 61.69, 0.004502],\n\t\t[1441,\t\t3,\t\t1.0923020560921105e-05,\t\t0.0005461510280460552,\t\t2.22, 61.69, 0.004502],\n\t\t[1442,\t\t3,\t\t4.555157486056611e-05,\t\t0.0022775787430283057,\t\t2.22, 61.69, 0.004502],\n\t\t[1443,\t\t2,\t\t0.006557506818224797,\t\t0.3278753409112398,\t\t0, 0, 0],\n\t\t[1444,\t\t3,\t\t0.0005717925297728792,\t\t0.028589626488643962,\t\t2.22, 61.69, 0.004502],\n\t\t[1445,\t\t3,\t\t0.0015938921576921367,\t\t0.07969460788460683,\t\t2.22, 61.69, 0.004502],\n\t\t[1446,\t\t2,\t\t0.04829066125331256,\t\t2.414533062665628,\t\t0, 0, 0],\n\t\t[1447,\t\t2,\t\t0.005696308888305882,\t\t0.2848154444152941,\t\t0, 0, 0],\n\t\t[1448,\t\t3,\t\t0.0002813656970216781,\t\t0.014068284851083905,\t\t2.22, 61.69, 0.004502],\n\t\t[1449,\t\t2,\t\t0.0029348829924128405,\t\t0.14674414962064206,\t\t0, 0, 0],\n\t\t[1450,\t\t2,\t\t0.003726900047088699,\t\t0.18634500235443496,\t\t0, 0, 0],\n\t\t[1451,\t\t2,\t\t0.0036467833176776375,\t\t0.18233916588388188,\t\t0, 0, 0],\n\t\t[1452,\t\t3,\t\t0.0009308941175129764,\t\t0.046544705875648816,\t\t2.22, 61.69, 0.004502],\n\t\t[1453,\t\t2,\t\t0.004134065549943135,\t\t0.20670327749715672,\t\t0, 0, 0],\n\t\t[1454,\t\t2,\t\t0.009875666531734596,\t\t0.49378332658672985,\t\t0, 0, 0],\n\t\t[1455,\t\t3,\t\t1.66950830801293e-05,\t\t0.000834754154006465,\t\t2.22, 61.69, 0.004502],\n\t\t[1456,\t\t2,\t\t0.0013664683513056725,\t\t0.06832341756528364,\t\t0, 0, 0],\n\t\t[1459,\t\t3,\t\t0.00013477613298625794,\t\t0.006738806649312897,\t\t2.22, 61.69, 0.004502],\n\t\t[1460,\t\t2,\t\t0.0037971068076197746,\t\t0.18985534038098878,\t\t0, 0, 0],\n\t\t[1461,\t\t3,\t\t0.00045503010222392685,\t\t0.022751505111196346,\t\t2.22, 61.69, 0.004502],\n\t\t[1463,\t\t3,\t\t1.810231431840124e-05,\t\t0.0009051157159200621,\t\t2.22, 61.69, 0.004502],\n\t\t[1464,\t\t2,\t\t0.013934601684842136,\t\t0.6967300842421068,\t\t0, 0, 0],\n\t\t[1466,\t\t3,\t\t0.0001450748986048064,\t\t0.00725374493024032,\t\t2.22, 61.69, 0.004502],\n\t\t[1467,\t\t3,\t\t5.434743301684746e-05,\t\t0.0027173716508423736,\t\t2.22, 61.69, 0.004502],\n\t\t[1468,\t\t3,\t\t0.0006047748176593424,\t\t0.03023874088296712,\t\t2.22, 61.69, 0.004502],\n\t\t[1469,\t\t2,\t\t0.003233867943910748,\t\t0.16169339719553738,\t\t0, 0, 0],\n\t\t[1470,\t\t2,\t\t0.005027084884666319,\t\t0.2513542442333159,\t\t0, 0, 0],\n\t\t[1471,\t\t2,\t\t0.010132763321185349,\t\t0.5066381660592674,\t\t0, 0, 0],\n\t\t[1472,\t\t3,\t\t0.00036895330016970505,\t\t0.018447665008485253,\t\t2.22, 61.69, 0.004502],\n\t\t[1473,\t\t3,\t\t0.00021195071858909128,\t\t0.010597535929454565,\t\t2.22, 61.69, 0.004502],\n\t\t[1474,\t\t3,\t\t3.568357370609641e-05,\t\t0.0017841786853048205,\t\t2.22, 61.69, 0.004502],\n\t\t[1475,\t\t3,\t\t9.952961021421813e-06,\t\t0.0004976480510710907,\t\t2.22, 61.69, 0.004502],\n\t\t[1476,\t\t2,\t\t0.015946059282369706,\t\t0.7973029641184852,\t\t0, 0, 0],\n\t\t[1477,\t\t3,\t\t0.0007717725169969112,\t\t0.03858862584984556,\t\t2.22, 61.69, 0.004502],\n\t\t[1479,\t\t3,\t\t0.00035603636123413484,\t\t0.01780181806170674,\t\t2.22, 61.69, 0.004502],\n\t\t[1480,\t\t3,\t\t0.0011893307912248102,\t\t0.05946653956124052,\t\t2.22, 61.69, 0.004502],\n\t\t[1481,\t\t3,\t\t3.3833873695351113e-06,\t\t0.00016916936847675558,\t\t2.22, 61.69, 0.004502],\n\t\t[1482,\t\t3,\t\t0.0011147740798471094,\t\t0.055738703992355476,\t\t2.22, 61.69, 0.004502],\n\t\t[1483,\t\t3,\t\t9.504850518132428e-05,\t\t0.004752425259066214,\t\t2.22, 61.69, 0.004502],\n\t\t[1484,\t\t3,\t\t9.303002951875421e-07,\t\t4.651501475937711e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1485,\t\t3,\t\t1.7528399459215098e-05,\t\t0.000876419972960755,\t\t2.22, 61.69, 0.004502],\n\t\t[1486,\t\t3,\t\t9.018017162430775e-05,\t\t0.0045090085812153876,\t\t2.22, 61.69, 0.004502],\n\t\t[1487,\t\t3,\t\t7.276038526853737e-05,\t\t0.0036380192634268686,\t\t2.22, 61.69, 0.004502],\n\t\t[1488,\t\t3,\t\t0.00022382432076245898,\t\t0.01119121603812295,\t\t2.22, 61.69, 0.004502],\n\t\t[1489,\t\t3,\t\t3.0263189463062935e-06,\t\t0.0001513159473153147,\t\t2.22, 61.69, 0.004502],\n\t\t[1490,\t\t2,\t\t0.04905115781427449,\t\t2.4525578907137247,\t\t0, 0, 0],\n\t\t[1491,\t\t2,\t\t0.005387257187745477,\t\t0.26936285938727383,\t\t0, 0, 0],\n\t\t[1492,\t\t2,\t\t0.014637639488319377,\t\t0.7318819744159688,\t\t0, 0, 0],\n\t\t[1493,\t\t2,\t\t0.005319414988695112,\t\t0.26597074943475557,\t\t0, 0, 0],\n\t\t[1494,\t\t2,\t\t0.0257504251653254,\t\t1.28752125826627,\t\t0, 0, 0],\n\t\t[1495,\t\t2,\t\t0.004260305180484296,\t\t0.2130152590242148,\t\t0, 0, 0],\n\t\t[1496,\t\t3,\t\t1.641562267503393e-08,\t\t8.207811337516965e-07,\t\t2.22, 61.69, 0.004502],\n\t\t[1497,\t\t2,\t\t0.005670372667342641,\t\t0.28351863336713207,\t\t0, 0, 0],\n\t\t[1498,\t\t2,\t\t0.006735488235440387,\t\t0.3367744117720194,\t\t0, 0, 0],\n\t\t[1499,\t\t3,\t\t0.00014557430965896176,\t\t0.0072787154829480885,\t\t2.22, 61.69, 0.004502],\n\t\t[1500,\t\t3,\t\t9.284328907409222e-06,\t\t0.0004642164453704611,\t\t2.22, 61.69, 0.004502],\n\t\t[1501,\t\t3,\t\t0.00037483587777994396,\t\t0.018741793888997202,\t\t2.22, 61.69, 0.004502],\n\t\t[1502,\t\t3,\t\t3.9491818320371174e-05,\t\t0.0019745909160185583,\t\t2.22, 61.69, 0.004502],\n\t\t[1503,\t\t3,\t\t0.0029266803181735935,\t\t0.14633401590867967,\t\t2.22, 61.69, 0.004502],\n\t\t[1504,\t\t2,\t\t0.012020835078490423,\t\t0.6010417539245212,\t\t0, 0, 0],\n\t\t[1505,\t\t3,\t\t0.0017039709532498102,\t\t0.08519854766249052,\t\t2.22, 61.69, 0.004502],\n\t\t[1506,\t\t2,\t\t0.0035909631390018642,\t\t0.17954815695009319,\t\t0, 0, 0],\n\t\t[1507,\t\t3,\t\t0.000982816273068341,\t\t0.04914081365341705,\t\t2.22, 61.69, 0.004502],\n\t\t[1508,\t\t3,\t\t4.154538017488063e-06,\t\t0.00020772690087440316,\t\t2.22, 61.69, 0.004502],\n\t\t[1510,\t\t2,\t\t0.00681234986437375,\t\t0.34061749321868756,\t\t0, 0, 0],\n\t\t[1511,\t\t2,\t\t0.00988173435818505,\t\t0.4940867179092525,\t\t0, 0, 0],\n\t\t[1512,\t\t2,\t\t0.004082645917281524,\t\t0.20413229586407625,\t\t0, 0, 0],\n\t\t[1513,\t\t3,\t\t0.001467522271804366,\t\t0.07337611359021831,\t\t2.22, 61.69, 0.004502],\n\t\t[1514,\t\t3,\t\t8.434708679035484e-07,\t\t4.217354339517742e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1516,\t\t3,\t\t1.8340973111507537e-06,\t\t9.170486555753769e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1517,\t\t3,\t\t8.192048507877762e-05,\t\t0.0040960242539388805,\t\t2.22, 61.69, 0.004502],\n\t\t[1518,\t\t3,\t\t1.7149947944714273e-05,\t\t0.0008574973972357136,\t\t2.22, 61.69, 0.004502],\n\t\t[1519,\t\t3,\t\t1.1903058584033917e-06,\t\t5.951529292016959e-05,\t\t2.22, 61.69, 0.004502]\n\t])\n\tppc[\"branch_switch\"] = array([\n\t\t[586,\t\t1,\t\t0\t\t],\n\t\t[589,\t\t108,\t\t0\t\t],\n\t\t[590,\t\t108,\t\t0\t\t],\n\t\t[593,\t\t112,\t\t0\t\t],\n\t\t[595,\t\t115,\t\t0\t\t],\n\t\t[598,\t\t118,\t\t0\t\t],\n\t\t[599,\t\t119,\t\t0\t\t],\n\t\t[602,\t\t121,\t\t0\t\t],\n\t\t[603,\t\t526,\t\t0\t\t],\n\t\t[607,\t\t127,\t\t0\t\t],\n\t\t[608,\t\t127,\t\t0\t\t],\n\t\t[609,\t\t529,\t\t0\t\t],\n\t\t[612,\t\t493,\t\t0\t\t],\n\t\t[614,\t\t130,\t\t0\t\t],\n\t\t[616,\t\t132,\t\t0\t\t],\n\t\t[617,\t\t133,\t\t0\t\t],\n\t\t[618,\t\t133,\t\t0\t\t],\n\t\t[619,\t\t134,\t\t0\t\t],\n\t\t[624,\t\t14,\t\t0\t\t],\n\t\t[629,\t\t145,\t\t0\t\t],\n\t\t[632,\t\t145,\t\t0\t\t],\n\t\t[637,\t\t148,\t\t0\t\t],\n\t\t[638,\t\t149,\t\t0\t\t],\n\t\t[640,\t\t153,\t\t0\t\t],\n\t\t[641,\t\t155,\t\t0\t\t],\n\t\t[642,\t\t533,\t\t0\t\t],\n\t\t[643,\t\t534,\t\t0\t\t],\n\t\t[647,\t\t536,\t\t0\t\t],\n\t\t[652,\t\t167,\t\t0\t\t],\n\t\t[655,\t\t170,\t\t0\t\t],\n\t\t[663,\t\t178,\t\t0\t\t],\n\t\t[666,\t\t180,\t\t0\t\t],\n\t\t[670,\t\t183,\t\t0\t\t],\n\t\t[672,\t\t185,\t\t0\t\t],\n\t\t[676,\t\t19,\t\t0\t\t],\n\t\t[681,\t\t197,\t\t0\t\t],\n\t\t[683,\t\t200,\t\t0\t\t],\n\t\t[687,\t\t202,\t\t0\t\t],\n\t\t[694,\t\t21,\t\t0\t\t],\n\t\t[695,\t\t210,\t\t0\t\t],\n\t\t[697,\t\t211,\t\t0\t\t],\n\t\t[698,\t\t212,\t\t0\t\t],\n\t\t[702,\t\t215,\t\t0\t\t],\n\t\t[705,\t\t217,\t\t0\t\t],\n\t\t[707,\t\t219,\t\t0\t\t],\n\t\t[714,\t\t225,\t\t0\t\t],\n\t\t[716,\t\t226,\t\t0\t\t],\n\t\t[717,\t\t227,\t\t0\t\t],\n\t\t[722,\t\t545,\t\t0\t\t],\n\t\t[724,\t\t238,\t\t0\t\t],\n\t\t[730,\t\t547,\t\t0\t\t],\n\t\t[732,\t\t247,\t\t0\t\t],\n\t\t[735,\t\t253,\t\t0\t\t],\n\t\t[741,\t\t264,\t\t0\t\t],\n\t\t[742,\t\t264,\t\t0\t\t],\n\t\t[743,\t\t500,\t\t0\t\t],\n\t\t[747,\t\t273,\t\t0\t\t],\n\t\t[749,\t\t274,\t\t0\t\t],\n\t\t[750,\t\t557,\t\t0\t\t],\n\t\t[753,\t\t28,\t\t0\t\t],\n\t\t[761,\t\t288,\t\t0\t\t],\n\t\t[762,\t\t289,\t\t0\t\t],\n\t\t[765,\t\t560,\t\t0\t\t],\n\t\t[767,\t\t292,\t\t0\t\t],\n\t\t[772,\t\t3,\t\t0\t\t],\n\t\t[774,\t\t300,\t\t0\t\t],\n\t\t[777,\t\t300,\t\t0\t\t],\n\t\t[778,\t\t300,\t\t0\t\t],\n\t\t[781,\t\t303,\t\t0\t\t],\n\t\t[784,\t\t563,\t\t0\t\t],\n\t\t[785,\t\t501,\t\t0\t\t],\n\t\t[788,\t\t311,\t\t0\t\t],\n\t\t[789,\t\t565,\t\t0\t\t],\n\t\t[791,\t\t314,\t\t0\t\t],\n\t\t[792,\t\t316,\t\t0\t\t],\n\t\t[795,\t\t319,\t\t0\t\t],\n\t\t[800,\t\t326,\t\t0\t\t],\n\t\t[801,\t\t327,\t\t0\t\t],\n\t\t[802,\t\t327,\t\t0\t\t],\n\t\t[805,\t\t328,\t\t0\t\t],\n\t\t[806,\t\t328,\t\t0\t\t],\n\t\t[808,\t\t329,\t\t0\t\t],\n\t\t[809,\t\t329,\t\t0\t\t],\n\t\t[811,\t\t568,\t\t0\t\t],\n\t\t[814,\t\t570,\t\t0\t\t],\n\t\t[816,\t\t335,\t\t0\t\t],\n\t\t[817,\t\t571,\t\t0\t\t],\n\t\t[821,\t\t338,\t\t0\t\t],\n\t\t[826,\t\t339,\t\t0\t\t],\n\t\t[834,\t\t572,\t\t0\t\t],\n\t\t[835,\t\t572,\t\t0\t\t],\n\t\t[836,\t\t572,\t\t0\t\t],\n\t\t[837,\t\t350,\t\t0\t\t],\n\t\t[839,\t\t350,\t\t0\t\t],\n\t\t[841,\t\t573,\t\t0\t\t],\n\t\t[843,\t\t352,\t\t0\t\t],\n\t\t[844,\t\t352,\t\t0\t\t],\n\t\t[850,\t\t574,\t\t0\t\t],\n\t\t[851,\t\t575,\t\t0\t\t],\n\t\t[853,\t\t362,\t\t0\t\t],\n\t\t[856,\t\t363,\t\t0\t\t],\n\t\t[857,\t\t365,\t\t0\t\t],\n\t\t[858,\t\t368,\t\t0\t\t],\n\t\t[860,\t\t371,\t\t0\t\t],\n\t\t[865,\t\t375,\t\t0\t\t],\n\t\t[867,\t\t376,\t\t0\t\t],\n\t\t[869,\t\t503,\t\t0\t\t],\n\t\t[870,\t\t503,\t\t0\t\t],\n\t\t[872,\t\t378,\t\t0\t\t],\n\t\t[874,\t\t576,\t\t0\t\t],\n\t\t[875,\t\t381,\t\t0\t\t],\n\t\t[882,\t\t388,\t\t0\t\t],\n\t\t[883,\t\t388,\t\t0\t\t],\n\t\t[885,\t\t393,\t\t0\t\t],\n\t\t[886,\t\t394,\t\t0\t\t],\n\t\t[889,\t\t397,\t\t0\t\t],\n\t\t[890,\t\t40,\t\t0\t\t],\n\t\t[893,\t\t400,\t\t0\t\t],\n\t\t[894,\t\t400,\t\t0\t\t],\n\t\t[895,\t\t580,\t\t0\t\t],\n\t\t[896,\t\t581,\t\t0\t\t],\n\t\t[898,\t\t403,\t\t0\t\t],\n\t\t[902,\t\t405,\t\t0\t\t],\n\t\t[903,\t\t406,\t\t0\t\t],\n\t\t[905,\t\t413,\t\t0\t\t],\n\t\t[906,\t\t414,\t\t0\t\t],\n\t\t[907,\t\t583,\t\t0\t\t],\n\t\t[909,\t\t417,\t\t0\t\t],\n\t\t[917,\t\t43,\t\t0\t\t],\n\t\t[918,\t\t424,\t\t0\t\t],\n\t\t[920,\t\t428,\t\t0\t\t],\n\t\t[921,\t\t428,\t\t0\t\t],\n\t\t[922,\t\t429,\t\t0\t\t],\n\t\t[923,\t\t432,\t\t0\t\t],\n\t\t[925,\t\t44,\t\t0\t\t],\n\t\t[931,\t\t439,\t\t0\t\t],\n\t\t[936,\t\t445,\t\t0\t\t],\n\t\t[937,\t\t447,\t\t0\t\t],\n\t\t[939,\t\t450,\t\t0\t\t],\n\t\t[940,\t\t451,\t\t0\t\t],\n\t\t[944,\t\t458,\t\t0\t\t],\n\t\t[950,\t\t462,\t\t0\t\t],\n\t\t[952,\t\t47,\t\t0\t\t],\n\t\t[958,\t\t478,\t\t0\t\t],\n\t\t[959,\t\t478,\t\t0\t\t],\n\t\t[960,\t\t479,\t\t0\t\t],\n\t\t[963,\t\t481,\t\t0\t\t],\n\t\t[965,\t\t49,\t\t0\t\t],\n\t\t[967,\t\t49,\t\t0\t\t],\n\t\t[969,\t\t486,\t\t0\t\t],\n\t\t[971,\t\t51,\t\t0\t\t],\n\t\t[978,\t\t491,\t\t0\t\t],\n\t\t[982,\t\t62,\t\t0\t\t],\n\t\t[983,\t\t62,\t\t0\t\t],\n\t\t[984,\t\t63,\t\t0\t\t],\n\t\t[985,\t\t63,\t\t0\t\t],\n\t\t[986,\t\t64,\t\t0\t\t],\n\t\t[987,\t\t65,\t\t0\t\t],\n\t\t[988,\t\t66,\t\t0\t\t],\n\t\t[993,\t\t67,\t\t0\t\t],\n\t\t[994,\t\t67,\t\t0\t\t],\n\t\t[995,\t\t509,\t\t0\t\t],\n\t\t[997,\t\t510,\t\t0\t\t],\n\t\t[999,\t\t70,\t\t0\t\t],\n\t\t[1002,\t\t71,\t\t0\t\t],\n\t\t[1007,\t\t511,\t\t0\t\t],\n\t\t[1010,\t\t79,\t\t0\t\t],\n\t\t[1011,\t\t79,\t\t0\t\t],\n\t\t[1012,\t\t81,\t\t0\t\t],\n\t\t[1014,\t\t83,\t\t0\t\t],\n\t\t[1027,\t\t218,\t\t0\t\t],\n\t\t[1028,\t\t221,\t\t0\t\t],\n\t\t[1029,\t\t268,\t\t0\t\t],\n\t\t[1030,\t\t269,\t\t0\t\t],\n\t\t[1031,\t\t498,\t\t0\t\t],\n\t\t[1032,\t\t1,\t\t0\t\t],\n\t\t[1033,\t\t3,\t\t0\t\t],\n\t\t[1034,\t\t4,\t\t0\t\t],\n\t\t[1035,\t\t6,\t\t0\t\t],\n\t\t[1036,\t\t7,\t\t0\t\t],\n\t\t[1037,\t\t8,\t\t0\t\t],\n\t\t[1038,\t\t9,\t\t0\t\t],\n\t\t[1039,\t\t11,\t\t0\t\t],\n\t\t[1040,\t\t14,\t\t0\t\t],\n\t\t[1041,\t\t16,\t\t0\t\t],\n\t\t[1042,\t\t17,\t\t0\t\t],\n\t\t[1043,\t\t19,\t\t0\t\t],\n\t\t[1044,\t\t21,\t\t0\t\t],\n\t\t[1045,\t\t23,\t\t0\t\t],\n\t\t[1046,\t\t25,\t\t0\t\t],\n\t\t[1047,\t\t27,\t\t0\t\t],\n\t\t[1048,\t\t28,\t\t0\t\t],\n\t\t[1049,\t\t29,\t\t0\t\t],\n\t\t[1050,\t\t31,\t\t0\t\t],\n\t\t[1051,\t\t33,\t\t0\t\t],\n\t\t[1052,\t\t34,\t\t0\t\t],\n\t\t[1053,\t\t35,\t\t0\t\t],\n\t\t[1054,\t\t36,\t\t0\t\t],\n\t\t[1055,\t\t38,\t\t0\t\t],\n\t\t[1056,\t\t39,\t\t0\t\t],\n\t\t[1057,\t\t40,\t\t0\t\t],\n\t\t[1058,\t\t41,\t\t0\t\t],\n\t\t[1059,\t\t43,\t\t0\t\t],\n\t\t[1060,\t\t44,\t\t0\t\t],\n\t\t[1061,\t\t45,\t\t0\t\t],\n\t\t[1062,\t\t47,\t\t0\t\t],\n\t\t[1063,\t\t48,\t\t0\t\t],\n\t\t[1064,\t\t49,\t\t0\t\t],\n\t\t[1065,\t\t50,\t\t0\t\t],\n\t\t[1066,\t\t51,\t\t0\t\t],\n\t\t[1067,\t\t53,\t\t0\t\t],\n\t\t[1068,\t\t54,\t\t0\t\t],\n\t\t[1069,\t\t55,\t\t0\t\t],\n\t\t[1070,\t\t57,\t\t0\t\t],\n\t\t[1071,\t\t58,\t\t0\t\t],\n\t\t[1072,\t\t59,\t\t0\t\t],\n\t\t[1073,\t\t60,\t\t0\t\t],\n\t\t[1074,\t\t62,\t\t0\t\t],\n\t\t[1075,\t\t63,\t\t0\t\t],\n\t\t[1076,\t\t64,\t\t0\t\t],\n\t\t[1077,\t\t65,\t\t0\t\t],\n\t\t[1078,\t\t66,\t\t0\t\t],\n\t\t[1079,\t\t67,\t\t0\t\t],\n\t\t[1080,\t\t70,\t\t0\t\t],\n\t\t[1081,\t\t71,\t\t0\t\t],\n\t\t[1082,\t\t72,\t\t0\t\t],\n\t\t[1083,\t\t73,\t\t0\t\t],\n\t\t[1084,\t\t75,\t\t0\t\t],\n\t\t[1085,\t\t76,\t\t0\t\t],\n\t\t[1086,\t\t77,\t\t0\t\t],\n\t\t[1087,\t\t79,\t\t0\t\t],\n\t\t[1088,\t\t80,\t\t0\t\t],\n\t\t[1089,\t\t81,\t\t0\t\t],\n\t\t[1090,\t\t82,\t\t0\t\t],\n\t\t[1091,\t\t83,\t\t0\t\t],\n\t\t[1092,\t\t84,\t\t0\t\t],\n\t\t[1093,\t\t85,\t\t0\t\t],\n\t\t[1096,\t\t90,\t\t0\t\t],\n\t\t[1097,\t\t91,\t\t0\t\t],\n\t\t[1098,\t\t92,\t\t0\t\t],\n\t\t[1099,\t\t93,\t\t0\t\t],\n\t\t[1100,\t\t97,\t\t0\t\t],\n\t\t[1101,\t\t98,\t\t0\t\t],\n\t\t[1102,\t\t101,\t\t0\t\t],\n\t\t[1103,\t\t102,\t\t0\t\t],\n\t\t[1105,\t\t108,\t\t0\t\t],\n\t\t[1106,\t\t109,\t\t0\t\t],\n\t\t[1107,\t\t110,\t\t0\t\t],\n\t\t[1108,\t\t111,\t\t0\t\t],\n\t\t[1109,\t\t112,\t\t0\t\t],\n\t\t[1110,\t\t113,\t\t0\t\t],\n\t\t[1111,\t\t114,\t\t0\t\t],\n\t\t[1113,\t\t116,\t\t0\t\t],\n\t\t[1114,\t\t118,\t\t0\t\t],\n\t\t[1115,\t\t119,\t\t0\t\t],\n\t\t[1116,\t\t121,\t\t0\t\t],\n\t\t[1117,\t\t122,\t\t0\t\t],\n\t\t[1118,\t\t126,\t\t0\t\t],\n\t\t[1119,\t\t127,\t\t0\t\t],\n\t\t[1120,\t\t130,\t\t0\t\t],\n\t\t[1121,\t\t131,\t\t0\t\t],\n\t\t[1122,\t\t132,\t\t0\t\t],\n\t\t[1123,\t\t133,\t\t0\t\t],\n\t\t[1124,\t\t134,\t\t0\t\t],\n\t\t[1125,\t\t135,\t\t0\t\t],\n\t\t[1126,\t\t136,\t\t0\t\t],\n\t\t[1127,\t\t137,\t\t0\t\t],\n\t\t[1128,\t\t139,\t\t0\t\t],\n\t\t[1129,\t\t140,\t\t0\t\t],\n\t\t[1130,\t\t141,\t\t0\t\t],\n\t\t[1131,\t\t142,\t\t0\t\t],\n\t\t[1133,\t\t145,\t\t0\t\t],\n\t\t[1134,\t\t146,\t\t0\t\t],\n\t\t[1135,\t\t147,\t\t0\t\t],\n\t\t[1136,\t\t148,\t\t0\t\t],\n\t\t[1137,\t\t149,\t\t0\t\t],\n\t\t[1138,\t\t150,\t\t0\t\t],\n\t\t[1139,\t\t151,\t\t0\t\t],\n\t\t[1140,\t\t152,\t\t0\t\t],\n\t\t[1142,\t\t154,\t\t0\t\t],\n\t\t[1143,\t\t155,\t\t0\t\t],\n\t\t[1144,\t\t158,\t\t0\t\t],\n\t\t[1145,\t\t161,\t\t0\t\t],\n\t\t[1146,\t\t162,\t\t0\t\t],\n\t\t[1147,\t\t163,\t\t0\t\t],\n\t\t[1148,\t\t164,\t\t0\t\t],\n\t\t[1149,\t\t166,\t\t0\t\t],\n\t\t[1150,\t\t167,\t\t0\t\t],\n\t\t[1151,\t\t168,\t\t0\t\t],\n\t\t[1152,\t\t169,\t\t0\t\t],\n\t\t[1155,\t\t172,\t\t0\t\t],\n\t\t[1157,\t\t174,\t\t0\t\t],\n\t\t[1160,\t\t177,\t\t0\t\t],\n\t\t[1161,\t\t178,\t\t0\t\t],\n\t\t[1162,\t\t179,\t\t0\t\t],\n\t\t[1163,\t\t180,\t\t0\t\t],\n\t\t[1164,\t\t181,\t\t0\t\t],\n\t\t[1165,\t\t182,\t\t0\t\t],\n\t\t[1166,\t\t183,\t\t0\t\t],\n\t\t[1168,\t\t186,\t\t0\t\t],\n\t\t[1169,\t\t187,\t\t0\t\t],\n\t\t[1171,\t\t189,\t\t0\t\t],\n\t\t[1172,\t\t190,\t\t0\t\t],\n\t\t[1173,\t\t192,\t\t0\t\t],\n\t\t[1175,\t\t194,\t\t0\t\t],\n\t\t[1176,\t\t196,\t\t0\t\t],\n\t\t[1177,\t\t197,\t\t0\t\t],\n\t\t[1178,\t\t198,\t\t0\t\t],\n\t\t[1179,\t\t199,\t\t0\t\t],\n\t\t[1181,\t\t202,\t\t0\t\t],\n\t\t[1182,\t\t203,\t\t0\t\t],\n\t\t[1183,\t\t204,\t\t0\t\t],\n\t\t[1184,\t\t205,\t\t0\t\t],\n\t\t[1186,\t\t207,\t\t0\t\t],\n\t\t[1187,\t\t208,\t\t0\t\t],\n\t\t[1188,\t\t209,\t\t0\t\t],\n\t\t[1189,\t\t210,\t\t0\t\t],\n\t\t[1190,\t\t211,\t\t0\t\t],\n\t\t[1191,\t\t212,\t\t0\t\t],\n\t\t[1192,\t\t213,\t\t0\t\t],\n\t\t[1193,\t\t214,\t\t0\t\t],\n\t\t[1194,\t\t215,\t\t0\t\t],\n\t\t[1195,\t\t216,\t\t0\t\t],\n\t\t[1196,\t\t217,\t\t0\t\t],\n\t\t[1197,\t\t218,\t\t0\t\t],\n\t\t[1198,\t\t219,\t\t0\t\t],\n\t\t[1199,\t\t221,\t\t0\t\t],\n\t\t[1200,\t\t222,\t\t0\t\t],\n\t\t[1201,\t\t223,\t\t0\t\t],\n\t\t[1202,\t\t224,\t\t0\t\t],\n\t\t[1203,\t\t225,\t\t0\t\t],\n\t\t[1204,\t\t226,\t\t0\t\t],\n\t\t[1205,\t\t227,\t\t0\t\t],\n\t\t[1206,\t\t228,\t\t0\t\t],\n\t\t[1207,\t\t229,\t\t0\t\t],\n\t\t[1208,\t\t230,\t\t0\t\t],\n\t\t[1209,\t\t234,\t\t0\t\t],\n\t\t[1210,\t\t235,\t\t0\t\t],\n\t\t[1211,\t\t237,\t\t0\t\t],\n\t\t[1212,\t\t238,\t\t0\t\t],\n\t\t[1213,\t\t239,\t\t0\t\t],\n\t\t[1214,\t\t240,\t\t0\t\t],\n\t\t[1215,\t\t241,\t\t0\t\t],\n\t\t[1216,\t\t242,\t\t0\t\t],\n\t\t[1217,\t\t243,\t\t0\t\t],\n\t\t[1218,\t\t244,\t\t0\t\t],\n\t\t[1219,\t\t247,\t\t0\t\t],\n\t\t[1220,\t\t251,\t\t0\t\t],\n\t\t[1221,\t\t252,\t\t0\t\t],\n\t\t[1222,\t\t253,\t\t0\t\t],\n\t\t[1223,\t\t254,\t\t0\t\t],\n\t\t[1224,\t\t255,\t\t0\t\t],\n\t\t[1225,\t\t256,\t\t0\t\t],\n\t\t[1226,\t\t257,\t\t0\t\t],\n\t\t[1227,\t\t258,\t\t0\t\t],\n\t\t[1228,\t\t260,\t\t0\t\t],\n\t\t[1229,\t\t263,\t\t0\t\t],\n\t\t[1230,\t\t264,\t\t0\t\t],\n\t\t[1231,\t\t266,\t\t0\t\t],\n\t\t[1232,\t\t267,\t\t0\t\t],\n\t\t[1233,\t\t268,\t\t0\t\t],\n\t\t[1235,\t\t271,\t\t0\t\t],\n\t\t[1236,\t\t272,\t\t0\t\t],\n\t\t[1237,\t\t273,\t\t0\t\t],\n\t\t[1238,\t\t274,\t\t0\t\t],\n\t\t[1239,\t\t275,\t\t0\t\t],\n\t\t[1240,\t\t276,\t\t0\t\t],\n\t\t[1241,\t\t278,\t\t0\t\t],\n\t\t[1242,\t\t281,\t\t0\t\t],\n\t\t[1243,\t\t282,\t\t0\t\t],\n\t\t[1244,\t\t283,\t\t0\t\t],\n\t\t[1245,\t\t284,\t\t0\t\t],\n\t\t[1246,\t\t285,\t\t0\t\t],\n\t\t[1247,\t\t286,\t\t0\t\t],\n\t\t[1248,\t\t287,\t\t0\t\t],\n\t\t[1249,\t\t288,\t\t0\t\t],\n\t\t[1250,\t\t289,\t\t0\t\t],\n\t\t[1251,\t\t291,\t\t0\t\t],\n\t\t[1252,\t\t292,\t\t0\t\t],\n\t\t[1253,\t\t293,\t\t0\t\t],\n\t\t[1254,\t\t294,\t\t0\t\t],\n\t\t[1255,\t\t295,\t\t0\t\t],\n\t\t[1256,\t\t296,\t\t0\t\t],\n\t\t[1257,\t\t297,\t\t0\t\t],\n\t\t[1258,\t\t298,\t\t0\t\t],\n\t\t[1259,\t\t299,\t\t0\t\t],\n\t\t[1260,\t\t300,\t\t0\t\t],\n\t\t[1261,\t\t302,\t\t0\t\t],\n\t\t[1262,\t\t303,\t\t0\t\t],\n\t\t[1263,\t\t304,\t\t0\t\t],\n\t\t[1264,\t\t307,\t\t0\t\t],\n\t\t[1265,\t\t308,\t\t0\t\t],\n\t\t[1266,\t\t309,\t\t0\t\t],\n\t\t[1267,\t\t311,\t\t0\t\t],\n\t\t[1268,\t\t312,\t\t0\t\t],\n\t\t[1269,\t\t314,\t\t0\t\t],\n\t\t[1270,\t\t316,\t\t0\t\t],\n\t\t[1271,\t\t317,\t\t0\t\t],\n\t\t[1272,\t\t318,\t\t0\t\t],\n\t\t[1273,\t\t319,\t\t0\t\t],\n\t\t[1274,\t\t321,\t\t0\t\t],\n\t\t[1275,\t\t322,\t\t0\t\t],\n\t\t[1276,\t\t323,\t\t0\t\t],\n\t\t[1277,\t\t324,\t\t0\t\t],\n\t\t[1278,\t\t325,\t\t0\t\t],\n\t\t[1279,\t\t326,\t\t0\t\t],\n\t\t[1280,\t\t327,\t\t0\t\t],\n\t\t[1281,\t\t328,\t\t0\t\t],\n\t\t[1282,\t\t329,\t\t0\t\t],\n\t\t[1283,\t\t331,\t\t0\t\t],\n\t\t[1284,\t\t333,\t\t0\t\t],\n\t\t[1285,\t\t335,\t\t0\t\t],\n\t\t[1286,\t\t337,\t\t0\t\t],\n\t\t[1287,\t\t338,\t\t0\t\t],\n\t\t[1288,\t\t339,\t\t0\t\t],\n\t\t[1289,\t\t340,\t\t0\t\t],\n\t\t[1290,\t\t341,\t\t0\t\t],\n\t\t[1291,\t\t342,\t\t0\t\t],\n\t\t[1292,\t\t343,\t\t0\t\t],\n\t\t[1293,\t\t344,\t\t0\t\t],\n\t\t[1294,\t\t345,\t\t0\t\t],\n\t\t[1295,\t\t346,\t\t0\t\t],\n\t\t[1296,\t\t347,\t\t0\t\t],\n\t\t[1297,\t\t348,\t\t0\t\t],\n\t\t[1298,\t\t350,\t\t0\t\t],\n\t\t[1299,\t\t352,\t\t0\t\t],\n\t\t[1300,\t\t353,\t\t0\t\t],\n\t\t[1301,\t\t354,\t\t0\t\t],\n\t\t[1302,\t\t355,\t\t0\t\t],\n\t\t[1303,\t\t356,\t\t0\t\t],\n\t\t[1304,\t\t357,\t\t0\t\t],\n\t\t[1305,\t\t359,\t\t0\t\t],\n\t\t[1306,\t\t361,\t\t0\t\t],\n\t\t[1307,\t\t362,\t\t0\t\t],\n\t\t[1308,\t\t363,\t\t0\t\t],\n\t\t[1309,\t\t364,\t\t0\t\t],\n\t\t[1310,\t\t365,\t\t0\t\t],\n\t\t[1311,\t\t366,\t\t0\t\t],\n\t\t[1312,\t\t367,\t\t0\t\t],\n\t\t[1313,\t\t368,\t\t0\t\t],\n\t\t[1314,\t\t369,\t\t0\t\t],\n\t\t[1315,\t\t370,\t\t0\t\t],\n\t\t[1316,\t\t371,\t\t0\t\t],\n\t\t[1317,\t\t372,\t\t0\t\t],\n\t\t[1318,\t\t373,\t\t0\t\t],\n\t\t[1319,\t\t374,\t\t0\t\t],\n\t\t[1320,\t\t375,\t\t0\t\t],\n\t\t[1321,\t\t376,\t\t0\t\t],\n\t\t[1322,\t\t377,\t\t0\t\t],\n\t\t[1323,\t\t378,\t\t0\t\t],\n\t\t[1324,\t\t379,\t\t0\t\t],\n\t\t[1325,\t\t381,\t\t0\t\t],\n\t\t[1326,\t\t384,\t\t0\t\t],\n\t\t[1327,\t\t385,\t\t0\t\t],\n\t\t[1328,\t\t386,\t\t0\t\t],\n\t\t[1329,\t\t387,\t\t0\t\t],\n\t\t[1330,\t\t388,\t\t0\t\t],\n\t\t[1332,\t\t391,\t\t0\t\t],\n\t\t[1333,\t\t392,\t\t0\t\t],\n\t\t[1334,\t\t393,\t\t0\t\t],\n\t\t[1335,\t\t394,\t\t0\t\t],\n\t\t[1336,\t\t395,\t\t0\t\t],\n\t\t[1337,\t\t396,\t\t0\t\t],\n\t\t[1338,\t\t397,\t\t0\t\t],\n\t\t[1339,\t\t398,\t\t0\t\t],\n\t\t[1340,\t\t399,\t\t0\t\t],\n\t\t[1341,\t\t400,\t\t0\t\t],\n\t\t[1342,\t\t403,\t\t0\t\t],\n\t\t[1343,\t\t404,\t\t0\t\t],\n\t\t[1344,\t\t405,\t\t0\t\t],\n\t\t[1345,\t\t406,\t\t0\t\t],\n\t\t[1346,\t\t407,\t\t0\t\t],\n\t\t[1347,\t\t408,\t\t0\t\t],\n\t\t[1348,\t\t410,\t\t0\t\t],\n\t\t[1349,\t\t411,\t\t0\t\t],\n\t\t[1350,\t\t412,\t\t0\t\t],\n\t\t[1351,\t\t413,\t\t0\t\t],\n\t\t[1352,\t\t414,\t\t0\t\t],\n\t\t[1355,\t\t418,\t\t0\t\t],\n\t\t[1356,\t\t419,\t\t0\t\t],\n\t\t[1357,\t\t420,\t\t0\t\t],\n\t\t[1358,\t\t421,\t\t0\t\t],\n\t\t[1359,\t\t422,\t\t0\t\t],\n\t\t[1363,\t\t426,\t\t0\t\t],\n\t\t[1364,\t\t427,\t\t0\t\t],\n\t\t[1365,\t\t428,\t\t0\t\t],\n\t\t[1366,\t\t429,\t\t0\t\t],\n\t\t[1367,\t\t430,\t\t0\t\t],\n\t\t[1368,\t\t431,\t\t0\t\t],\n\t\t[1369,\t\t432,\t\t0\t\t],\n\t\t[1370,\t\t433,\t\t0\t\t],\n\t\t[1371,\t\t434,\t\t0\t\t],\n\t\t[1372,\t\t435,\t\t0\t\t],\n\t\t[1373,\t\t436,\t\t0\t\t],\n\t\t[1374,\t\t437,\t\t0\t\t],\n\t\t[1375,\t\t438,\t\t0\t\t],\n\t\t[1376,\t\t439,\t\t0\t\t],\n\t\t[1377,\t\t440,\t\t0\t\t],\n\t\t[1378,\t\t441,\t\t0\t\t],\n\t\t[1379,\t\t442,\t\t0\t\t],\n\t\t[1381,\t\t445,\t\t0\t\t],\n\t\t[1382,\t\t446,\t\t0\t\t],\n\t\t[1383,\t\t447,\t\t0\t\t],\n\t\t[1387,\t\t451,\t\t0\t\t],\n\t\t[1390,\t\t455,\t\t0\t\t],\n\t\t[1391,\t\t456,\t\t0\t\t],\n\t\t[1393,\t\t458,\t\t0\t\t],\n\t\t[1394,\t\t459,\t\t0\t\t],\n\t\t[1395,\t\t460,\t\t0\t\t],\n\t\t[1396,\t\t461,\t\t0\t\t],\n\t\t[1397,\t\t462,\t\t0\t\t],\n\t\t[1398,\t\t463,\t\t0\t\t],\n\t\t[1399,\t\t464,\t\t0\t\t],\n\t\t[1400,\t\t465,\t\t0\t\t],\n\t\t[1401,\t\t466,\t\t0\t\t],\n\t\t[1402,\t\t467,\t\t0\t\t],\n\t\t[1403,\t\t468,\t\t0\t\t],\n\t\t[1404,\t\t469,\t\t0\t\t],\n\t\t[1405,\t\t470,\t\t0\t\t],\n\t\t[1406,\t\t471,\t\t0\t\t],\n\t\t[1407,\t\t472,\t\t0\t\t],\n\t\t[1408,\t\t473,\t\t0\t\t],\n\t\t[1409,\t\t474,\t\t0\t\t],\n\t\t[1410,\t\t475,\t\t0\t\t],\n\t\t[1411,\t\t476,\t\t0\t\t],\n\t\t[1412,\t\t477,\t\t0\t\t],\n\t\t[1413,\t\t478,\t\t0\t\t],\n\t\t[1414,\t\t479,\t\t0\t\t],\n\t\t[1415,\t\t480,\t\t0\t\t],\n\t\t[1416,\t\t481,\t\t0\t\t],\n\t\t[1417,\t\t482,\t\t0\t\t],\n\t\t[1418,\t\t483,\t\t0\t\t],\n\t\t[1419,\t\t484,\t\t0\t\t],\n\t\t[1420,\t\t485,\t\t0\t\t],\n\t\t[1421,\t\t486,\t\t0\t\t],\n\t\t[1422,\t\t487,\t\t0\t\t],\n\t\t[1423,\t\t488,\t\t0\t\t],\n\t\t[1424,\t\t489,\t\t0\t\t],\n\t\t[1425,\t\t490,\t\t0\t\t],\n\t\t[1426,\t\t491,\t\t0\t\t],\n\t\t[1427,\t\t492,\t\t0\t\t],\n\t\t[1428,\t\t493,\t\t0\t\t],\n\t\t[1429,\t\t494,\t\t0\t\t],\n\t\t[1430,\t\t495,\t\t0\t\t],\n\t\t[1431,\t\t496,\t\t0\t\t],\n\t\t[1432,\t\t497,\t\t0\t\t],\n\t\t[1433,\t\t498,\t\t0\t\t],\n\t\t[1434,\t\t499,\t\t0\t\t],\n\t\t[1435,\t\t500,\t\t0\t\t],\n\t\t[1436,\t\t501,\t\t0\t\t],\n\t\t[1437,\t\t502,\t\t0\t\t],\n\t\t[1438,\t\t503,\t\t0\t\t],\n\t\t[1439,\t\t504,\t\t0\t\t],\n\t\t[1440,\t\t505,\t\t0\t\t],\n\t\t[1441,\t\t506,\t\t0\t\t],\n\t\t[1442,\t\t507,\t\t0\t\t],\n\t\t[1443,\t\t508,\t\t0\t\t],\n\t\t[1444,\t\t509,\t\t0\t\t],\n\t\t[1445,\t\t510,\t\t0\t\t],\n\t\t[1446,\t\t511,\t\t0\t\t],\n\t\t[1447,\t\t512,\t\t0\t\t],\n\t\t[1448,\t\t513,\t\t0\t\t],\n\t\t[1449,\t\t514,\t\t0\t\t],\n\t\t[1450,\t\t515,\t\t0\t\t],\n\t\t[1451,\t\t516,\t\t0\t\t],\n\t\t[1452,\t\t517,\t\t0\t\t],\n\t\t[1453,\t\t518,\t\t0\t\t],\n\t\t[1454,\t\t519,\t\t0\t\t],\n\t\t[1455,\t\t520,\t\t0\t\t],\n\t\t[1456,\t\t521,\t\t0\t\t],\n\t\t[1459,\t\t524,\t\t0\t\t],\n\t\t[1460,\t\t525,\t\t0\t\t],\n\t\t[1461,\t\t526,\t\t0\t\t],\n\t\t[1463,\t\t528,\t\t0\t\t],\n\t\t[1464,\t\t529,\t\t0\t\t],\n\t\t[1466,\t\t531,\t\t0\t\t],\n\t\t[1467,\t\t532,\t\t0\t\t],\n\t\t[1468,\t\t533,\t\t0\t\t],\n\t\t[1469,\t\t534,\t\t0\t\t],\n\t\t[1470,\t\t535,\t\t0\t\t],\n\t\t[1471,\t\t536,\t\t0\t\t],\n\t\t[1472,\t\t537,\t\t0\t\t],\n\t\t[1473,\t\t538,\t\t0\t\t],\n\t\t[1474,\t\t539,\t\t0\t\t],\n\t\t[1475,\t\t540,\t\t0\t\t],\n\t\t[1476,\t\t541,\t\t0\t\t],\n\t\t[1477,\t\t542,\t\t0\t\t],\n\t\t[1479,\t\t544,\t\t0\t\t],\n\t\t[1480,\t\t545,\t\t0\t\t],\n\t\t[1481,\t\t546,\t\t0\t\t],\n\t\t[1482,\t\t547,\t\t0\t\t],\n\t\t[1483,\t\t548,\t\t0\t\t],\n\t\t[1484,\t\t549,\t\t0\t\t],\n\t\t[1485,\t\t550,\t\t0\t\t],\n\t\t[1486,\t\t551,\t\t0\t\t],\n\t\t[1487,\t\t552,\t\t0\t\t],\n\t\t[1488,\t\t554,\t\t0\t\t],\n\t\t[1489,\t\t555,\t\t0\t\t],\n\t\t[1490,\t\t556,\t\t0\t\t],\n\t\t[1491,\t\t557,\t\t0\t\t],\n\t\t[1492,\t\t558,\t\t0\t\t],\n\t\t[1493,\t\t559,\t\t0\t\t],\n\t\t[1494,\t\t560,\t\t0\t\t],\n\t\t[1495,\t\t561,\t\t0\t\t],\n\t\t[1496,\t\t562,\t\t0\t\t],\n\t\t[1497,\t\t563,\t\t0\t\t],\n\t\t[1498,\t\t564,\t\t0\t\t],\n\t\t[1499,\t\t565,\t\t0\t\t],\n\t\t[1500,\t\t566,\t\t0\t\t],\n\t\t[1501,\t\t567,\t\t0\t\t],\n\t\t[1502,\t\t568,\t\t0\t\t],\n\t\t[1503,\t\t569,\t\t0\t\t],\n\t\t[1504,\t\t570,\t\t0\t\t],\n\t\t[1505,\t\t571,\t\t0\t\t],\n\t\t[1506,\t\t572,\t\t0\t\t],\n\t\t[1507,\t\t573,\t\t0\t\t],\n\t\t[1508,\t\t574,\t\t0\t\t],\n\t\t[1510,\t\t576,\t\t0\t\t],\n\t\t[1511,\t\t577,\t\t0\t\t],\n\t\t[1512,\t\t578,\t\t0\t\t],\n\t\t[1513,\t\t579,\t\t0\t\t],\n\t\t[1514,\t\t580,\t\t0\t\t],\n\t\t[1516,\t\t582,\t\t0\t\t],\n\t\t[1517,\t\t583,\t\t0\t\t],\n\t\t[1518,\t\t584,\t\t0\t\t],\n\t\t[1519,\t\t585,\t\t0\t\t],\n\t\t[1,\t\t490,\t\t0\t\t],\n\t\t[3,\t\t4,\t\t1\t\t],\n\t\t[491,\t\t6,\t\t0\t\t],\n\t\t[7,\t\t5,\t\t0\t\t],\n\t\t[8,\t\t9,\t\t0\t\t],\n\t\t[492,\t\t11,\t\t0\t\t],\n\t\t[11,\t\t493,\t\t0\t\t],\n\t\t[492,\t\t493,\t\t1\t\t],\n\t\t[494,\t\t14,\t\t0\t\t],\n\t\t[13,\t\t15,\t\t0\t\t],\n\t\t[16,\t\t5,\t\t0\t\t],\n\t\t[17,\t\t18,\t\t1\t\t],\n\t\t[17,\t\t12,\t\t0\t\t],\n\t\t[14,\t\t495,\t\t0\t\t],\n\t\t[494,\t\t19,\t\t0\t\t],\n\t\t[20,\t\t21,\t\t0\t\t],\n\t\t[20,\t\t22,\t\t1\t\t],\n\t\t[497,\t\t23,\t\t0\t\t],\n\t\t[23,\t\t499,\t\t1\t\t],\n\t\t[25,\t\t26,\t\t0\t\t],\n\t\t[25,\t\t22,\t\t0\t\t],\n\t\t[23,\t\t27,\t\t0\t\t],\n\t\t[28,\t\t23,\t\t0\t\t],\n\t\t[8,\t\t21,\t\t0\t\t],\n\t\t[9,\t\t29,\t\t0\t\t],\n\t\t[30,\t\t25,\t\t1\t\t],\n\t\t[31,\t\t32,\t\t1\t\t],\n\t\t[32,\t\t33,\t\t1\t\t],\n\t\t[34,\t\t35,\t\t0\t\t],\n\t\t[35,\t\t36,\t\t0\t\t],\n\t\t[490,\t\t6,\t\t1\t\t],\n\t\t[37,\t\t10,\t\t1\t\t],\n\t\t[10,\t\t38,\t\t0\t\t],\n\t\t[37,\t\t38,\t\t1\t\t],\n\t\t[39,\t\t40,\t\t1\t\t],\n\t\t[39,\t\t41,\t\t1\t\t],\n\t\t[42,\t\t41,\t\t1\t\t],\n\t\t[18,\t\t42,\t\t1\t\t],\n\t\t[492,\t\t43,\t\t1\t\t],\n\t\t[44,\t\t45,\t\t0\t\t],\n\t\t[44,\t\t505,\t\t0\t\t],\n\t\t[46,\t\t12,\t\t0\t\t],\n\t\t[47,\t\t48,\t\t0\t\t],\n\t\t[49,\t\t50,\t\t0\t\t],\n\t\t[31,\t\t33,\t\t1\t\t],\n\t\t[31,\t\t51,\t\t0\t\t],\n\t\t[52,\t\t53,\t\t1\t\t],\n\t\t[52,\t\t54,\t\t0\t\t],\n\t\t[506,\t\t55,\t\t0\t\t],\n\t\t[506,\t\t507,\t\t1\t\t],\n\t\t[57,\t\t506,\t\t0\t\t],\n\t\t[57,\t\t58,\t\t0\t\t],\n\t\t[58,\t\t506,\t\t0\t\t],\n\t\t[59,\t\t60,\t\t1\t\t],\n\t\t[508,\t\t62,\t\t0\t\t],\n\t\t[30,\t\t61,\t\t1\t\t],\n\t\t[63,\t\t506,\t\t0\t\t],\n\t\t[13,\t\t64,\t\t0\t\t],\n\t\t[65,\t\t66,\t\t1\t\t],\n\t\t[59,\t\t67,\t\t0\t\t],\n\t\t[61,\t\t67,\t\t0\t\t],\n\t\t[68,\t\t69,\t\t1\t\t],\n\t\t[70,\t\t69,\t\t1\t\t],\n\t\t[71,\t\t72,\t\t1\t\t],\n\t\t[73,\t\t74,\t\t1\t\t],\n\t\t[37,\t\t75,\t\t1\t\t],\n\t\t[72,\t\t75,\t\t0\t\t],\n\t\t[37,\t\t72,\t\t1\t\t],\n\t\t[76,\t\t77,\t\t1\t\t],\n\t\t[77,\t\t51,\t\t0\t\t],\n\t\t[73,\t\t72,\t\t1\t\t],\n\t\t[18,\t\t40,\t\t1\t\t],\n\t\t[492,\t\t45,\t\t1\t\t],\n\t\t[10,\t\t74,\t\t1\t\t],\n\t\t[45,\t\t511,\t\t1\t\t],\n\t\t[78,\t\t32,\t\t1\t\t],\n\t\t[79,\t\t80,\t\t0\t\t],\n\t\t[81,\t\t79,\t\t1\t\t],\n\t\t[34,\t\t82,\t\t0\t\t],\n\t\t[83,\t\t84,\t\t0\t\t],\n\t\t[83,\t\t499,\t\t0\t\t],\n\t\t[85,\t\t86,\t\t0\t\t],\n\t\t[87,\t\t86,\t\t1\t\t],\n\t\t[88,\t\t89,\t\t0\t\t],\n\t\t[90,\t\t86,\t\t1\t\t],\n\t\t[91,\t\t86,\t\t0\t\t],\n\t\t[86,\t\t92,\t\t0\t\t],\n\t\t[86,\t\t93,\t\t0\t\t],\n\t\t[94,\t\t86,\t\t1\t\t],\n\t\t[86,\t\t95,\t\t1\t\t],\n\t\t[513,\t\t517,\t\t0\t\t],\n\t\t[97,\t\t66,\t\t1\t\t],\n\t\t[42,\t\t98,\t\t0\t\t],\n\t\t[99,\t\t100,\t\t1\t\t],\n\t\t[42,\t\t101,\t\t0\t\t],\n\t\t[102,\t\t42,\t\t1\t\t],\n\t\t[103,\t\t87,\t\t0\t\t],\n\t\t[104,\t\t103,\t\t0\t\t],\n\t\t[105,\t\t87,\t\t0\t\t],\n\t\t[106,\t\t107,\t\t0\t\t],\n\t\t[108,\t\t107,\t\t0\t\t],\n\t\t[109,\t\t106,\t\t0\t\t],\n\t\t[110,\t\t111,\t\t1\t\t],\n\t\t[87,\t\t112,\t\t0\t\t],\n\t\t[113,\t\t87,\t\t0\t\t],\n\t\t[87,\t\t85,\t\t1\t\t],\n\t\t[110,\t\t114,\t\t1\t\t],\n\t\t[115,\t\t116,\t\t0\t\t],\n\t\t[117,\t\t118,\t\t0\t\t],\n\t\t[117,\t\t119,\t\t0\t\t],\n\t\t[117,\t\t120,\t\t1\t\t],\n\t\t[121,\t\t122,\t\t0\t\t],\n\t\t[123,\t\t124,\t\t0\t\t],\n\t\t[125,\t\t126,\t\t0\t\t],\n\t\t[127,\t\t119,\t\t0\t\t],\n\t\t[118,\t\t128,\t\t0\t\t],\n\t\t[121,\t\t119,\t\t0\t\t],\n\t\t[530,\t\t527,\t\t0\t\t],\n\t\t[125,\t\t130,\t\t0\t\t],\n\t\t[125,\t\t123,\t\t0\t\t],\n\t\t[131,\t\t132,\t\t0\t\t],\n\t\t[133,\t\t123,\t\t0\t\t],\n\t\t[524,\t\t134,\t\t0\t\t],\n\t\t[135,\t\t136,\t\t0\t\t],\n\t\t[123,\t\t131,\t\t0\t\t],\n\t\t[117,\t\t128,\t\t1\t\t],\n\t\t[137,\t\t521,\t\t0\t\t],\n\t\t[531,\t\t514,\t\t0\t\t],\n\t\t[139,\t\t521,\t\t0\t\t],\n\t\t[140,\t\t514,\t\t0\t\t],\n\t\t[522,\t\t141,\t\t0\t\t],\n\t\t[142,\t\t523,\t\t0\t\t],\n\t\t[530,\t\t526,\t\t0\t\t],\n\t\t[140,\t\t532,\t\t0\t\t],\n\t\t[142,\t\t144,\t\t0\t\t],\n\t\t[140,\t\t522,\t\t0\t\t],\n\t\t[145,\t\t146,\t\t0\t\t],\n\t\t[147,\t\t523,\t\t0\t\t],\n\t\t[144,\t\t523,\t\t0\t\t],\n\t\t[139,\t\t523,\t\t0\t\t],\n\t\t[140,\t\t141,\t\t0\t\t],\n\t\t[528,\t\t526,\t\t0\t\t],\n\t\t[528,\t\t148,\t\t0\t\t],\n\t\t[149,\t\t150,\t\t0\t\t],\n\t\t[145,\t\t528,\t\t0\t\t],\n\t\t[530,\t\t151,\t\t0\t\t],\n\t\t[524,\t\t152,\t\t0\t\t],\n\t\t[149,\t\t525,\t\t1\t\t],\n\t\t[139,\t\t514,\t\t0\t\t],\n\t\t[126,\t\t120,\t\t1\t\t],\n\t\t[530,\t\t153,\t\t0\t\t],\n\t\t[528,\t\t147,\t\t1\t\t],\n\t\t[528,\t\t154,\t\t0\t\t],\n\t\t[130,\t\t120,\t\t1\t\t],\n\t\t[528,\t\t155,\t\t1\t\t],\n\t\t[524,\t\t533,\t\t0\t\t],\n\t\t[524,\t\t149,\t\t0\t\t],\n\t\t[154,\t\t150,\t\t0\t\t],\n\t\t[157,\t\t110,\t\t1\t\t],\n\t\t[119,\t\t158,\t\t0\t\t],\n\t\t[159,\t\t60,\t\t0\t\t],\n\t\t[536,\t\t161,\t\t0\t\t],\n\t\t[115,\t\t151,\t\t0\t\t],\n\t\t[162,\t\t134,\t\t0\t\t],\n\t\t[115,\t\t526,\t\t0\t\t],\n\t\t[138,\t\t87,\t\t0\t\t],\n\t\t[123,\t\t163,\t\t0\t\t],\n\t\t[112,\t\t164,\t\t0\t\t],\n\t\t[112,\t\t165,\t\t0\t\t],\n\t\t[166,\t\t165,\t\t0\t\t],\n\t\t[167,\t\t537,\t\t0\t\t],\n\t\t[168,\t\t104,\t\t0\t\t],\n\t\t[531,\t\t520,\t\t0\t\t],\n\t\t[139,\t\t520,\t\t0\t\t],\n\t\t[520,\t\t169,\t\t0\t\t],\n\t\t[168,\t\t105,\t\t0\t\t],\n\t\t[520,\t\t170,\t\t0\t\t],\n\t\t[171,\t\t89,\t\t0\t\t],\n\t\t[521,\t\t172,\t\t0\t\t],\n\t\t[123,\t\t173,\t\t0\t\t],\n\t\t[521,\t\t174,\t\t0\t\t],\n\t\t[37,\t\t39,\t\t0\t\t],\n\t\t[530,\t\t175,\t\t0\t\t],\n\t\t[530,\t\t176,\t\t0\t\t],\n\t\t[88,\t\t530,\t\t0\t\t],\n\t\t[177,\t\t496,\t\t1\t\t],\n\t\t[178,\t\t525,\t\t0\t\t],\n\t\t[179,\t\t493,\t\t1\t\t],\n\t\t[180,\t\t181,\t\t1\t\t],\n\t\t[182,\t\t180,\t\t0\t\t],\n\t\t[179,\t\t181,\t\t0\t\t],\n\t\t[180,\t\t493,\t\t1\t\t],\n\t\t[183,\t\t30,\t\t0\t\t],\n\t\t[183,\t\t21,\t\t0\t\t],\n\t\t[538,\t\t185,\t\t0\t\t],\n\t\t[538,\t\t89,\t\t0\t\t],\n\t\t[184,\t\t186,\t\t0\t\t],\n\t\t[184,\t\t187,\t\t0\t\t],\n\t\t[520,\t\t172,\t\t0\t\t],\n\t\t[89,\t\t175,\t\t0\t\t],\n\t\t[185,\t\t89,\t\t0\t\t],\n\t\t[89,\t\t188,\t\t0\t\t],\n\t\t[189,\t\t190,\t\t0\t\t],\n\t\t[539,\t\t172,\t\t0\t\t],\n\t\t[504,\t\t192,\t\t0\t\t],\n\t\t[105,\t\t186,\t\t0\t\t],\n\t\t[105,\t\t187,\t\t0\t\t],\n\t\t[539,\t\t193,\t\t0\t\t],\n\t\t[187,\t\t194,\t\t0\t\t],\n\t\t[539,\t\t540,\t\t0\t\t],\n\t\t[539,\t\t196,\t\t0\t\t],\n\t\t[197,\t\t540,\t\t0\t\t],\n\t\t[110,\t\t198,\t\t0\t\t],\n\t\t[197,\t\t539,\t\t0\t\t],\n\t\t[199,\t\t537,\t\t0\t\t],\n\t\t[134,\t\t526,\t\t0\t\t],\n\t\t[200,\t\t193,\t\t0\t\t],\n\t\t[4,\t\t201,\t\t1\t\t],\n\t\t[202,\t\t86,\t\t0\t\t],\n\t\t[85,\t\t203,\t\t0\t\t],\n\t\t[147,\t\t204,\t\t0\t\t],\n\t\t[147,\t\t205,\t\t0\t\t],\n\t\t[123,\t\t206,\t\t0\t\t],\n\t\t[537,\t\t207,\t\t0\t\t],\n\t\t[165,\t\t208,\t\t0\t\t],\n\t\t[4,\t\t94,\t\t1\t\t],\n\t\t[4,\t\t2,\t\t0\t\t],\n\t\t[209,\t\t4,\t\t0\t\t],\n\t\t[119,\t\t163,\t\t0\t\t],\n\t\t[210,\t\t3,\t\t0\t\t],\n\t\t[99,\t\t211,\t\t0\t\t],\n\t\t[99,\t\t69,\t\t1\t\t],\n\t\t[212,\t\t99,\t\t0\t\t],\n\t\t[213,\t\t214,\t\t0\t\t],\n\t\t[510,\t\t215,\t\t0\t\t],\n\t\t[128,\t\t69,\t\t1\t\t],\n\t\t[216,\t\t69,\t\t1\t\t],\n\t\t[217,\t\t98,\t\t0\t\t],\n\t\t[504,\t\t218,\t\t0\t\t],\n\t\t[177,\t\t504,\t\t1\t\t],\n\t\t[219,\t\t209,\t\t0\t\t],\n\t\t[219,\t\t220,\t\t0\t\t],\n\t\t[94,\t\t95,\t\t1\t\t],\n\t\t[159,\t\t221,\t\t1\t\t],\n\t\t[34,\t\t161,\t\t0\t\t],\n\t\t[222,\t\t221,\t\t0\t\t],\n\t\t[211,\t\t52,\t\t1\t\t],\n\t\t[215,\t\t223,\t\t1\t\t],\n\t\t[224,\t\t215,\t\t0\t\t],\n\t\t[225,\t\t224,\t\t1\t\t],\n\t\t[224,\t\t223,\t\t0\t\t],\n\t\t[226,\t\t6,\t\t0\t\t],\n\t\t[7,\t\t3,\t\t1\t\t],\n\t\t[216,\t\t227,\t\t1\t\t],\n\t\t[228,\t\t229,\t\t0\t\t],\n\t\t[227,\t\t230,\t\t0\t\t],\n\t\t[231,\t\t53,\t\t1\t\t],\n\t\t[544,\t\t545,\t\t0\t\t],\n\t\t[234,\t\t235,\t\t1\t\t],\n\t\t[546,\t\t214,\t\t1\t\t],\n\t\t[233,\t\t227,\t\t0\t\t],\n\t\t[237,\t\t238,\t\t0\t\t],\n\t\t[212,\t\t100,\t\t0\t\t],\n\t\t[519,\t\t239,\t\t0\t\t],\n\t\t[238,\t\t519,\t\t0\t\t],\n\t\t[213,\t\t240,\t\t0\t\t],\n\t\t[241,\t\t242,\t\t1\t\t],\n\t\t[70,\t\t241,\t\t0\t\t],\n\t\t[509,\t\t213,\t\t0\t\t],\n\t\t[68,\t\t243,\t\t0\t\t],\n\t\t[243,\t\t244,\t\t0\t\t],\n\t\t[68,\t\t244,\t\t0\t\t],\n\t\t[544,\t\t547,\t\t1\t\t],\n\t\t[245,\t\t227,\t\t1\t\t],\n\t\t[246,\t\t208,\t\t0\t\t],\n\t\t[112,\t\t208,\t\t0\t\t],\n\t\t[165,\t\t247,\t\t0\t\t],\n\t\t[537,\t\t549,\t\t0\t\t],\n\t\t[537,\t\t550,\t\t0\t\t],\n\t\t[537,\t\t551,\t\t0\t\t],\n\t\t[110,\t\t251,\t\t0\t\t],\n\t\t[510,\t\t252,\t\t1\t\t],\n\t\t[529,\t\t253,\t\t1\t\t],\n\t\t[237,\t\t239,\t\t1\t\t],\n\t\t[254,\t\t238,\t\t1\t\t],\n\t\t[69,\t\t255,\t\t0\t\t],\n\t\t[510,\t\t225,\t\t1\t\t],\n\t\t[256,\t\t257,\t\t0\t\t],\n\t\t[258,\t\t190,\t\t0\t\t],\n\t\t[258,\t\t259,\t\t0\t\t],\n\t\t[260,\t\t261,\t\t1\t\t],\n\t\t[554,\t\t553,\t\t1\t\t],\n\t\t[515,\t\t263,\t\t0\t\t],\n\t\t[14,\t\t264,\t\t1\t\t],\n\t\t[116,\t\t555,\t\t0\t\t],\n\t\t[151,\t\t116,\t\t0\t\t],\n\t\t[111,\t\t114,\t\t1\t\t],\n\t\t[77,\t\t111,\t\t0\t\t],\n\t\t[266,\t\t525,\t\t0\t\t],\n\t\t[267,\t\t120,\t\t1\t\t],\n\t\t[268,\t\t269,\t\t0\t\t],\n\t\t[556,\t\t271,\t\t0\t\t],\n\t\t[556,\t\t272,\t\t0\t\t],\n\t\t[529,\t\t273,\t\t0\t\t],\n\t\t[128,\t\t274,\t\t0\t\t],\n\t\t[34,\t\t275,\t\t0\t\t],\n\t\t[503,\t\t276,\t\t0\t\t],\n\t\t[503,\t\t504,\t\t1\t\t],\n\t\t[177,\t\t218,\t\t1\t\t],\n\t\t[277,\t\t278,\t\t1\t\t],\n\t\t[557,\t\t558,\t\t1\t\t],\n\t\t[557,\t\t559,\t\t1\t\t],\n\t\t[559,\t\t558,\t\t1\t\t],\n\t\t[277,\t\t78,\t\t1\t\t],\n\t\t[277,\t\t279,\t\t1\t\t],\n\t\t[78,\t\t279,\t\t0\t\t],\n\t\t[281,\t\t282,\t\t0\t\t],\n\t\t[283,\t\t161,\t\t1\t\t],\n\t\t[268,\t\t161,\t\t1\t\t],\n\t\t[256,\t\t284,\t\t0\t\t],\n\t\t[515,\t\t516,\t\t1\t\t],\n\t\t[263,\t\t516,\t\t0\t\t],\n\t\t[516,\t\t285,\t\t0\t\t],\n\t\t[63,\t\t286,\t\t0\t\t],\n\t\t[287,\t\t516,\t\t0\t\t],\n\t\t[8,\t\t102,\t\t1\t\t],\n\t\t[8,\t\t101,\t\t1\t\t],\n\t\t[80,\t\t288,\t\t0\t\t],\n\t\t[80,\t\t289,\t\t0\t\t],\n\t\t[276,\t\t560,\t\t0\t\t],\n\t\t[37,\t\t290,\t\t0\t\t],\n\t\t[290,\t\t74,\t\t1\t\t],\n\t\t[512,\t\t291,\t\t0\t\t],\n\t\t[78,\t\t292,\t\t1\t\t],\n\t\t[199,\t\t548,\t\t0\t\t],\n\t\t[491,\t\t293,\t\t0\t\t],\n\t\t[4,\t\t294,\t\t0\t\t],\n\t\t[490,\t\t541,\t\t1\t\t],\n\t\t[491,\t\t295,\t\t0\t\t],\n\t\t[491,\t\t296,\t\t0\t\t],\n\t\t[295,\t\t297,\t\t0\t\t],\n\t\t[508,\t\t161,\t\t0\t\t],\n\t\t[117,\t\t123,\t\t0\t\t],\n\t\t[133,\t\t117,\t\t0\t\t],\n\t\t[71,\t\t74,\t\t1\t\t],\n\t\t[74,\t\t278,\t\t1\t\t],\n\t\t[298,\t\t515,\t\t0\t\t],\n\t\t[5,\t\t299,\t\t0\t\t],\n\t\t[32,\t\t292,\t\t1\t\t],\n\t\t[5,\t\t29,\t\t1\t\t],\n\t\t[503,\t\t560,\t\t0\t\t],\n\t\t[300,\t\t301,\t\t1\t\t],\n\t\t[51,\t\t300,\t\t0\t\t],\n\t\t[244,\t\t302,\t\t1\t\t],\n\t\t[31,\t\t302,\t\t1\t\t],\n\t\t[51,\t\t282,\t\t1\t\t],\n\t\t[303,\t\t304,\t\t0\t\t],\n\t\t[305,\t\t304,\t\t0\t\t],\n\t\t[305,\t\t259,\t\t0\t\t],\n\t\t[306,\t\t307,\t\t1\t\t],\n\t\t[305,\t\t308,\t\t0\t\t],\n\t\t[305,\t\t309,\t\t0\t\t],\n\t\t[310,\t\t309,\t\t1\t\t],\n\t\t[306,\t\t309,\t\t1\t\t],\n\t\t[311,\t\t280,\t\t0\t\t],\n\t\t[280,\t\t278,\t\t1\t\t],\n\t\t[311,\t\t32,\t\t1\t\t],\n\t\t[13,\t\t312,\t\t1\t\t],\n\t\t[313,\t\t314,\t\t0\t\t],\n\t\t[312,\t\t313,\t\t1\t\t],\n\t\t[547,\t\t566,\t\t1\t\t],\n\t\t[245,\t\t315,\t\t1\t\t],\n\t\t[312,\t\t316,\t\t0\t\t],\n\t\t[312,\t\t314,\t\t0\t\t],\n\t\t[554,\t\t546,\t\t1\t\t],\n\t\t[262,\t\t216,\t\t1\t\t],\n\t\t[317,\t\t233,\t\t0\t\t],\n\t\t[318,\t\t317,\t\t0\t\t],\n\t\t[231,\t\t52,\t\t1\t\t],\n\t\t[319,\t\t567,\t\t0\t\t],\n\t\t[557,\t\t321,\t\t0\t\t],\n\t\t[277,\t\t65,\t\t1\t\t],\n\t\t[322,\t\t288,\t\t1\t\t],\n\t\t[322,\t\t323,\t\t0\t\t],\n\t\t[277,\t\t324,\t\t1\t\t],\n\t\t[324,\t\t325,\t\t0\t\t],\n\t\t[277,\t\t325,\t\t0\t\t],\n\t\t[326,\t\t327,\t\t0\t\t],\n\t\t[328,\t\t326,\t\t1\t\t],\n\t\t[328,\t\t327,\t\t1\t\t],\n\t\t[326,\t\t329,\t\t0\t\t],\n\t\t[568,\t\t329,\t\t1\t\t],\n\t\t[568,\t\t326,\t\t0\t\t],\n\t\t[332,\t\t78,\t\t1\t\t],\n\t\t[333,\t\t306,\t\t0\t\t],\n\t\t[332,\t\t333,\t\t0\t\t],\n\t\t[332,\t\t334,\t\t0\t\t],\n\t\t[66,\t\t334,\t\t1\t\t],\n\t\t[330,\t\t335,\t\t1\t\t],\n\t\t[336,\t\t66,\t\t0\t\t],\n\t\t[330,\t\t336,\t\t1\t\t],\n\t\t[68,\t\t70,\t\t0\t\t],\n\t\t[509,\t\t337,\t\t1\t\t],\n\t\t[324,\t\t288,\t\t0\t\t],\n\t\t[338,\t\t559,\t\t0\t\t],\n\t\t[339,\t\t559,\t\t0\t\t],\n\t\t[339,\t\t340,\t\t1\t\t],\n\t\t[559,\t\t340,\t\t1\t\t],\n\t\t[341,\t\t292,\t\t0\t\t],\n\t\t[557,\t\t342,\t\t0\t\t],\n\t\t[558,\t\t343,\t\t0\t\t],\n\t\t[502,\t\t340,\t\t1\t\t],\n\t\t[72,\t\t32,\t\t1\t\t],\n\t\t[344,\t\t345,\t\t0\t\t],\n\t\t[346,\t\t47,\t\t0\t\t],\n\t\t[46,\t\t47,\t\t0\t\t],\n\t\t[346,\t\t345,\t\t0\t\t],\n\t\t[347,\t\t328,\t\t0\t\t],\n\t\t[347,\t\t348,\t\t1\t\t],\n\t\t[571,\t\t348,\t\t1\t\t],\n\t\t[347,\t\t572,\t\t0\t\t],\n\t\t[571,\t\t570,\t\t1\t\t],\n\t\t[14,\t\t350,\t\t0\t\t],\n\t\t[350,\t\t573,\t\t0\t\t],\n\t\t[15,\t\t351,\t\t1\t\t],\n\t\t[352,\t\t15,\t\t0\t\t],\n\t\t[15,\t\t335,\t\t1\t\t],\n\t\t[232,\t\t227,\t\t0\t\t],\n\t\t[565,\t\t544,\t\t1\t\t],\n\t\t[235,\t\t567,\t\t1\t\t],\n\t\t[567,\t\t286,\t\t0\t\t],\n\t\t[353,\t\t519,\t\t0\t\t],\n\t\t[354,\t\t353,\t\t0\t\t],\n\t\t[355,\t\t354,\t\t0\t\t],\n\t\t[354,\t\t356,\t\t0\t\t],\n\t\t[357,\t\t358,\t\t0\t\t],\n\t\t[574,\t\t359,\t\t0\t\t],\n\t\t[235,\t\t575,\t\t0\t\t],\n\t\t[167,\t\t361,\t\t0\t\t],\n\t\t[528,\t\t362,\t\t0\t\t],\n\t\t[363,\t\t344,\t\t0\t\t],\n\t\t[259,\t\t364,\t\t1\t\t],\n\t\t[54,\t\t56,\t\t0\t\t],\n\t\t[365,\t\t364,\t\t0\t\t],\n\t\t[231,\t\t366,\t\t0\t\t],\n\t\t[30,\t\t367,\t\t0\t\t],\n\t\t[61,\t\t367,\t\t1\t\t],\n\t\t[254,\t\t368,\t\t0\t\t],\n\t\t[254,\t\t369,\t\t0\t\t],\n\t\t[254,\t\t370,\t\t0\t\t],\n\t\t[99,\t\t358,\t\t0\t\t],\n\t\t[354,\t\t519,\t\t0\t\t],\n\t\t[571,\t\t371,\t\t0\t\t],\n\t\t[207,\t\t372,\t\t0\t\t],\n\t\t[57,\t\t373,\t\t0\t\t],\n\t\t[209,\t\t374,\t\t0\t\t],\n\t\t[375,\t\t376,\t\t0\t\t],\n\t\t[376,\t\t377,\t\t0\t\t],\n\t\t[16,\t\t49,\t\t0\t\t],\n\t\t[318,\t\t377,\t\t0\t\t],\n\t\t[378,\t\t297,\t\t0\t\t],\n\t\t[562,\t\t379,\t\t0\t\t],\n\t\t[576,\t\t563,\t\t0\t\t],\n\t\t[576,\t\t381,\t\t0\t\t],\n\t\t[577,\t\t576,\t\t1\t\t],\n\t\t[244,\t\t383,\t\t0\t\t],\n\t\t[244,\t\t306,\t\t1\t\t],\n\t\t[383,\t\t306,\t\t1\t\t],\n\t\t[380,\t\t306,\t\t0\t\t],\n\t\t[252,\t\t225,\t\t0\t\t],\n\t\t[220,\t\t76,\t\t0\t\t],\n\t\t[542,\t\t384,\t\t0\t\t],\n\t\t[385,\t\t384,\t\t0\t\t],\n\t\t[542,\t\t385,\t\t0\t\t],\n\t\t[386,\t\t385,\t\t0\t\t],\n\t\t[387,\t\t578,\t\t0\t\t],\n\t\t[332,\t\t388,\t\t1\t\t],\n\t\t[382,\t\t332,\t\t1\t\t],\n\t\t[382,\t\t388,\t\t0\t\t],\n\t\t[579,\t\t578,\t\t0\t\t],\n\t\t[577,\t\t387,\t\t1\t\t],\n\t\t[144,\t\t390,\t\t0\t\t],\n\t\t[37,\t\t49,\t\t0\t\t],\n\t\t[391,\t\t233,\t\t0\t\t],\n\t\t[392,\t\t310,\t\t0\t\t],\n\t\t[260,\t\t393,\t\t0\t\t],\n\t\t[394,\t\t230,\t\t0\t\t],\n\t\t[395,\t\t282,\t\t1\t\t],\n\t\t[395,\t\t244,\t\t0\t\t],\n\t\t[25,\t\t396,\t\t1\t\t],\n\t\t[81,\t\t74,\t\t0\t\t],\n\t\t[278,\t\t80,\t\t1\t\t],\n\t\t[81,\t\t278,\t\t1\t\t],\n\t\t[569,\t\t570,\t\t0\t\t],\n\t\t[397,\t\t552,\t\t0\t\t],\n\t\t[542,\t\t398,\t\t0\t\t],\n\t\t[398,\t\t385,\t\t0\t\t],\n\t\t[399,\t\t499,\t\t0\t\t],\n\t\t[83,\t\t399,\t\t0\t\t],\n\t\t[498,\t\t400,\t\t0\t\t],\n\t\t[518,\t\t239,\t\t1\t\t],\n\t\t[575,\t\t543,\t\t0\t\t],\n\t\t[401,\t\t360,\t\t0\t\t],\n\t\t[580,\t\t581,\t\t0\t\t],\n\t\t[401,\t\t402,\t\t0\t\t],\n\t\t[403,\t\t231,\t\t0\t\t],\n\t\t[189,\t\t360,\t\t1\t\t],\n\t\t[234,\t\t404,\t\t0\t\t],\n\t\t[235,\t\t404,\t\t1\t\t],\n\t\t[235,\t\t580,\t\t0\t\t],\n\t\t[216,\t\t259,\t\t0\t\t],\n\t\t[405,\t\t259,\t\t0\t\t],\n\t\t[405,\t\t318,\t\t0\t\t],\n\t\t[406,\t\t230,\t\t0\t\t],\n\t\t[542,\t\t407,\t\t0\t\t],\n\t\t[23,\t\t408,\t\t0\t\t],\n\t\t[577,\t\t348,\t\t0\t\t],\n\t\t[562,\t\t564,\t\t1\t\t],\n\t\t[582,\t\t507,\t\t0\t\t],\n\t\t[27,\t\t410,\t\t0\t\t],\n\t\t[501,\t\t27,\t\t0\t\t],\n\t\t[27,\t\t411,\t\t0\t\t],\n\t\t[411,\t\t410,\t\t0\t\t],\n\t\t[403,\t\t360,\t\t0\t\t],\n\t\t[412,\t\t360,\t\t0\t\t],\n\t\t[326,\t\t413,\t\t0\t\t],\n\t\t[414,\t\t413,\t\t0\t\t],\n\t\t[6,\t\t297,\t\t0\t\t],\n\t\t[554,\t\t580,\t\t1\t\t],\n\t\t[262,\t\t401,\t\t1\t\t],\n\t\t[499,\t\t556,\t\t1\t\t],\n\t\t[224,\t\t229,\t\t0\t\t],\n\t\t[583,\t\t507,\t\t0\t\t],\n\t\t[415,\t\t307,\t\t0\t\t],\n\t\t[416,\t\t507,\t\t0\t\t],\n\t\t[284,\t\t561,\t\t0\t\t],\n\t\t[543,\t\t417,\t\t0\t\t],\n\t\t[418,\t\t506,\t\t0\t\t],\n\t\t[220,\t\t157,\t\t0\t\t],\n\t\t[295,\t\t419,\t\t0\t\t],\n\t\t[295,\t\t420,\t\t0\t\t],\n\t\t[541,\t\t62,\t\t0\t\t],\n\t\t[52,\t\t421,\t\t0\t\t],\n\t\t[60,\t\t160,\t\t0\t\t],\n\t\t[535,\t\t161,\t\t0\t\t],\n\t\t[267,\t\t282,\t\t0\t\t],\n\t\t[52,\t\t365,\t\t0\t\t],\n\t\t[28,\t\t27,\t\t0\t\t],\n\t\t[30,\t\t201,\t\t1\t\t],\n\t\t[422,\t\t81,\t\t0\t\t],\n\t\t[119,\t\t425,\t\t0\t\t],\n\t\t[423,\t\t425,\t\t0\t\t],\n\t\t[424,\t\t425,\t\t0\t\t],\n\t\t[426,\t\t428,\t\t0\t\t],\n\t\t[427,\t\t428,\t\t0\t\t],\n\t\t[19,\t\t428,\t\t1\t\t],\n\t\t[45,\t\t429,\t\t0\t\t],\n\t\t[44,\t\t429,\t\t0\t\t],\n\t\t[505,\t\t429,\t\t0\t\t],\n\t\t[231,\t\t431,\t\t1\t\t],\n\t\t[190,\t\t431,\t\t1\t\t],\n\t\t[430,\t\t431,\t\t0\t\t],\n\t\t[286,\t\t433,\t\t0\t\t],\n\t\t[432,\t\t433,\t\t0\t\t],\n\t\t[506,\t\t433,\t\t0\t\t],\n\t\t[23,\t\t434,\t\t0\t\t],\n\t\t[400,\t\t434,\t\t0\t\t],\n\t\t[500,\t\t434,\t\t0\t\t],\n\t\t[32,\t\t436,\t\t0\t\t],\n\t\t[435,\t\t436,\t\t0\t\t],\n\t\t[78,\t\t436,\t\t1\t\t],\n\t\t[86,\t\t438,\t\t1\t\t],\n\t\t[437,\t\t438,\t\t0\t\t],\n\t\t[221,\t\t438,\t\t0\t\t],\n\t\t[207,\t\t439,\t\t0\t\t],\n\t\t[516,\t\t439,\t\t0\t\t],\n\t\t[513,\t\t439,\t\t0\t\t],\n\t\t[181,\t\t441,\t\t1\t\t],\n\t\t[440,\t\t441,\t\t0\t\t],\n\t\t[504,\t\t441,\t\t1\t\t],\n\t\t[135,\t\t442,\t\t0\t\t],\n\t\t[109,\t\t442,\t\t0\t\t],\n\t\t[112,\t\t442,\t\t0\t\t],\n\t\t[113,\t\t443,\t\t0\t\t],\n\t\t[132,\t\t443,\t\t0\t\t],\n\t\t[107,\t\t443,\t\t0\t\t],\n\t\t[444,\t\t445,\t\t0\t\t],\n\t\t[112,\t\t445,\t\t0\t\t],\n\t\t[109,\t\t445,\t\t0\t\t],\n\t\t[119,\t\t447,\t\t1\t\t],\n\t\t[100,\t\t447,\t\t1\t\t],\n\t\t[446,\t\t447,\t\t0\t\t],\n\t\t[124,\t\t448,\t\t0\t\t],\n\t\t[125,\t\t448,\t\t0\t\t],\n\t\t[131,\t\t448,\t\t0\t\t],\n\t\t[449,\t\t450,\t\t0\t\t],\n\t\t[173,\t\t450,\t\t0\t\t],\n\t\t[184,\t\t450,\t\t0\t\t],\n\t\t[144,\t\t451,\t\t0\t\t],\n\t\t[140,\t\t451,\t\t0\t\t],\n\t\t[514,\t\t451,\t\t0\t\t],\n\t\t[537,\t\t585,\t\t1\t\t],\n\t\t[141,\t\t585,\t\t0\t\t],\n\t\t[584,\t\t585,\t\t0\t\t],\n\t\t[522,\t\t454,\t\t0\t\t],\n\t\t[144,\t\t454,\t\t0\t\t],\n\t\t[453,\t\t454,\t\t0\t\t],\n\t\t[199,\t\t456,\t\t0\t\t],\n\t\t[140,\t\t456,\t\t0\t\t],\n\t\t[455,\t\t456,\t\t0\t\t],\n\t\t[537,\t\t456,\t\t0\t\t],\n\t\t[538,\t\t457,\t\t0\t\t],\n\t\t[153,\t\t457,\t\t0\t\t],\n\t\t[176,\t\t457,\t\t0\t\t],\n\t\t[524,\t\t459,\t\t0\t\t],\n\t\t[458,\t\t459,\t\t0\t\t],\n\t\t[134,\t\t459,\t\t0\t\t],\n\t\t[460,\t\t461,\t\t0\t\t],\n\t\t[150,\t\t461,\t\t0\t\t],\n\t\t[149,\t\t461,\t\t0\t\t],\n\t\t[521,\t\t463,\t\t0\t\t],\n\t\t[462,\t\t463,\t\t0\t\t],\n\t\t[538,\t\t463,\t\t0\t\t],\n\t\t[110,\t\t464,\t\t0\t\t],\n\t\t[90,\t\t464,\t\t0\t\t],\n\t\t[165,\t\t464,\t\t0\t\t],\n\t\t[458,\t\t465,\t\t0\t\t],\n\t\t[134,\t\t465,\t\t0\t\t],\n\t\t[524,\t\t465,\t\t0\t\t],\n\t\t[466,\t\t467,\t\t0\t\t],\n\t\t[110,\t\t467,\t\t0\t\t],\n\t\t[165,\t\t467,\t\t0\t\t],\n\t\t[468,\t\t469,\t\t0\t\t],\n\t\t[541,\t\t469,\t\t0\t\t],\n\t\t[490,\t\t469,\t\t0\t\t],\n\t\t[263,\t\t471,\t\t0\t\t],\n\t\t[470,\t\t471,\t\t0\t\t],\n\t\t[534,\t\t471,\t\t0\t\t],\n\t\t[136,\t\t472,\t\t0\t\t],\n\t\t[110,\t\t472,\t\t0\t\t],\n\t\t[251,\t\t472,\t\t0\t\t],\n\t\t[226,\t\t474,\t\t0\t\t],\n\t\t[473,\t\t474,\t\t0\t\t],\n\t\t[257,\t\t474,\t\t0\t\t],\n\t\t[6,\t\t474,\t\t1\t\t],\n\t\t[299,\t\t475,\t\t1\t\t],\n\t\t[3,\t\t475,\t\t0\t\t],\n\t\t[210,\t\t475,\t\t0\t\t],\n\t\t[297,\t\t476,\t\t0\t\t],\n\t\t[296,\t\t476,\t\t0\t\t],\n\t\t[295,\t\t476,\t\t0\t\t],\n\t\t[313,\t\t478,\t\t1\t\t],\n\t\t[477,\t\t478,\t\t0\t\t],\n\t\t[245,\t\t478,\t\t0\t\t],\n\t\t[479,\t\t481,\t\t0\t\t],\n\t\t[565,\t\t481,\t\t0\t\t],\n\t\t[480,\t\t481,\t\t0\t\t],\n\t\t[415,\t\t482,\t\t0\t\t],\n\t\t[56,\t\t482,\t\t0\t\t],\n\t\t[409,\t\t482,\t\t0\t\t],\n\t\t[483,\t\t484,\t\t0\t\t],\n\t\t[3,\t\t484,\t\t0\t\t],\n\t\t[301,\t\t484,\t\t0\t\t],\n\t\t[233,\t\t485,\t\t0\t\t],\n\t\t[392,\t\t485,\t\t0\t\t],\n\t\t[391,\t\t485,\t\t0\t\t],\n\t\t[579,\t\t488,\t\t0\t\t],\n\t\t[486,\t\t488,\t\t0\t\t],\n\t\t[487,\t\t488,\t\t0\t\t],\n\t\t[270,\t\t489,\t\t0\t\t],\n\t\t[331,\t\t489,\t\t0\t\t],\n\t\t[396,\t\t489,\t\t1\t\t],\n\t\t[519,\t\t253,\t\t0\t\t],\n\t\t[382,\t\t349,\t\t1\t\t],\n\t\t[349,\t\t351,\t\t0\t\t],\n\t\t[459,\t\t465,\t\t0\t\t],\n\t\t[549,\t\t550,\t\t0\t\t],\n\t\t[550,\t\t551,\t\t0\t\t],\n\t\t[194,\t\t195,\t\t0\t\t],\n\t\t[247,\t\t248,\t\t0\t\t],\n\t\t[2,\t\t294,\t\t0\t\t],\n\t\t[549,\t\t551,\t\t0\t\t],\n\t\t[54,\t\t365,\t\t0\t\t],\n\t\t[131,\t\t265,\t\t0\t\t],\n\t\t[91,\t\t92,\t\t0\t\t],\n\t\t[247,\t\t249,\t\t0\t\t],\n\t\t[186,\t\t191,\t\t0\t\t],\n\t\t[129,\t\t173,\t\t0\t\t],\n\t\t[96,\t\t202,\t\t0\t\t],\n\t\t[53,\t\t320,\t\t0\t\t],\n\t\t[24,\t\t396,\t\t0\t\t],\n\t\t[133,\t\t156,\t\t0\t\t],\n\t\t[442,\t\t452,\t\t0\t\t],\n\t\t[445,\t\t452,\t\t0\t\t],\n\t\t[247,\t\t250,\t\t0\t\t],\n\t\t[187,\t\t195,\t\t0\t\t],\n\t\t[216,\t\t236,\t\t0\t\t],\n\t\t[244,\t\t389,\t\t0\t\t],\n\t\t[394,\t\t406,\t\t0\t\t],\n\t\t[442,\t\t445,\t\t0\t\t],\n\t\t[442,\t\t444,\t\t0\t\t],\n\t\t[198,\t\t472,\t\t0\t\t],\n\t\t[464,\t\t467,\t\t0\t\t],\n\t\t[198,\t\t251,\t\t0\t\t],\n\t\t[112,\t\t143,\t\t0\t\t],\n\t\t[2,\t\t490,\t\t0\t\t],\n\t\t[5,\t\t491,\t\t0\t\t],\n\t\t[10,\t\t492,\t\t0\t\t],\n\t\t[12,\t\t493,\t\t0\t\t],\n\t\t[13,\t\t494,\t\t0\t\t],\n\t\t[15,\t\t495,\t\t0\t\t],\n\t\t[18,\t\t496,\t\t0\t\t],\n\t\t[20,\t\t497,\t\t0\t\t],\n\t\t[22,\t\t498,\t\t0\t\t],\n\t\t[24,\t\t499,\t\t0\t\t],\n\t\t[26,\t\t500,\t\t0\t\t],\n\t\t[30,\t\t501,\t\t0\t\t],\n\t\t[32,\t\t502,\t\t0\t\t],\n\t\t[37,\t\t503,\t\t0\t\t],\n\t\t[42,\t\t504,\t\t0\t\t],\n\t\t[46,\t\t505,\t\t0\t\t],\n\t\t[52,\t\t506,\t\t0\t\t],\n\t\t[56,\t\t507,\t\t0\t\t],\n\t\t[61,\t\t508,\t\t0\t\t],\n\t\t[68,\t\t509,\t\t0\t\t],\n\t\t[69,\t\t510,\t\t0\t\t],\n\t\t[74,\t\t511,\t\t0\t\t],\n\t\t[78,\t\t512,\t\t0\t\t],\n\t\t[86,\t\t513,\t\t0\t\t],\n\t\t[87,\t\t514,\t\t0\t\t],\n\t\t[94,\t\t515,\t\t0\t\t],\n\t\t[95,\t\t516,\t\t0\t\t],\n\t\t[96,\t\t517,\t\t0\t\t],\n\t\t[99,\t\t518,\t\t0\t\t],\n\t\t[100,\t\t519,\t\t0\t\t],\n\t\t[104,\t\t520,\t\t0\t\t],\n\t\t[105,\t\t521,\t\t0\t\t],\n\t\t[106,\t\t522,\t\t0\t\t],\n\t\t[107,\t\t523,\t\t0\t\t],\n\t\t[117,\t\t524,\t\t0\t\t],\n\t\t[120,\t\t525,\t\t0\t\t],\n\t\t[123,\t\t526,\t\t0\t\t],\n\t\t[124,\t\t527,\t\t0\t\t],\n\t\t[125,\t\t528,\t\t0\t\t],\n\t\t[128,\t\t529,\t\t0\t\t],\n\t\t[129,\t\t530,\t\t0\t\t],\n\t\t[138,\t\t531,\t\t0\t\t],\n\t\t[143,\t\t532,\t\t0\t\t],\n\t\t[156,\t\t533,\t\t0\t\t],\n\t\t[157,\t\t534,\t\t0\t\t],\n\t\t[159,\t\t535,\t\t0\t\t],\n\t\t[160,\t\t536,\t\t0\t\t],\n\t\t[165,\t\t537,\t\t0\t\t],\n\t\t[184,\t\t538,\t\t0\t\t],\n\t\t[191,\t\t539,\t\t0\t\t],\n\t\t[195,\t\t540,\t\t0\t\t],\n\t\t[201,\t\t541,\t\t0\t\t],\n\t\t[220,\t\t542,\t\t0\t\t],\n\t\t[231,\t\t543,\t\t0\t\t],\n\t\t[232,\t\t544,\t\t0\t\t],\n\t\t[233,\t\t545,\t\t0\t\t],\n\t\t[236,\t\t546,\t\t0\t\t],\n\t\t[245,\t\t547,\t\t0\t\t],\n\t\t[246,\t\t548,\t\t0\t\t],\n\t\t[248,\t\t549,\t\t0\t\t],\n\t\t[249,\t\t550,\t\t0\t\t],\n\t\t[250,\t\t551,\t\t0\t\t],\n\t\t[259,\t\t552,\t\t0\t\t],\n\t\t[261,\t\t553,\t\t0\t\t],\n\t\t[262,\t\t554,\t\t0\t\t],\n\t\t[265,\t\t555,\t\t0\t\t],\n\t\t[270,\t\t556,\t\t0\t\t],\n\t\t[277,\t\t557,\t\t0\t\t],\n\t\t[279,\t\t558,\t\t0\t\t],\n\t\t[280,\t\t559,\t\t0\t\t],\n\t\t[290,\t\t560,\t\t0\t\t],\n\t\t[301,\t\t561,\t\t0\t\t],\n\t\t[305,\t\t562,\t\t0\t\t],\n\t\t[306,\t\t563,\t\t0\t\t],\n\t\t[310,\t\t564,\t\t0\t\t],\n\t\t[313,\t\t565,\t\t0\t\t],\n\t\t[315,\t\t566,\t\t0\t\t],\n\t\t[320,\t\t567,\t\t0\t\t],\n\t\t[330,\t\t568,\t\t0\t\t],\n\t\t[332,\t\t569,\t\t0\t\t],\n\t\t[334,\t\t570,\t\t0\t\t],\n\t\t[336,\t\t571,\t\t0\t\t],\n\t\t[349,\t\t572,\t\t0\t\t],\n\t\t[351,\t\t573,\t\t0\t\t],\n\t\t[358,\t\t574,\t\t0\t\t],\n\t\t[360,\t\t575,\t\t0\t\t],\n\t\t[380,\t\t576,\t\t0\t\t],\n\t\t[382,\t\t577,\t\t0\t\t],\n\t\t[383,\t\t578,\t\t0\t\t],\n\t\t[389,\t\t579,\t\t0\t\t],\n\t\t[401,\t\t580,\t\t0\t\t],\n\t\t[402,\t\t581,\t\t0\t\t],\n\t\t[409,\t\t582,\t\t0\t\t],\n\t\t[415,\t\t583,\t\t0\t\t],\n\t\t[444,\t\t584,\t\t0\t\t],\n\t\t[452,\t\t585,\t\t0\t\t]\n\t])\n\tppc[\"parameters\"] = {\n\t\t\"x_trans_sg\": 0.003, \n\t\t\"x_trans_fm\": 0.001, \n\t\t\"x_trans_fl\": 0.001, \n\t\t\"d_l\": 1e-3, \n\t\t\"d_l_perturb\": 1e-5, \n\t\t\"w_1_ij\": 1, \n\t\t\"w_2_ij\": 1, \n\t\t\"w_3_ij\": 1, \n\t\t\"w_4_ij\": 1, \n\t\t\"b_r\": 238, \n\t\t\"b_c\": 248 }\n\treturn ppc", "from numpy import array\ndef scigrid_2011_01_07_22():\n\tppc = {\"version\": '2'}\n\tppc[\"baseMVA\"] = 100.0\n\tppc[\"bus\"] = array([\n\t\t[586,\t\t3,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[589,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[590,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[593,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[594,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[595,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[598,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[599,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[601,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[602,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[603,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[607,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[608,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[609,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[612,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[613,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[614,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[616,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[617,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[618,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[619,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[621,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[624,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[629,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[632,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[637,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[638,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[640,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[641,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[642,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[643,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[647,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[650,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[652,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[655,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[661,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[663,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[666,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[668,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[670,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[672,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[676,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[681,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[683,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[687,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[691,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[693,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[694,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[695,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[696,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[697,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[698,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[702,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[704,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[705,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[707,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[713,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[714,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[716,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[717,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[719,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[722,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[724,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[727,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[728,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[730,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[732,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[735,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[738,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[741,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[742,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[743,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[746,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[747,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[748,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[749,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[750,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[753,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[758,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[760,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[762,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[763,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[765,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[767,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[769,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[771,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[772,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[774,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[777,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[778,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[781,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[784,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[785,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[787,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[788,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[789,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[791,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[792,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[795,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[800,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[801,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[802,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[805,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[806,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[808,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[809,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[811,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[814,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[816,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[817,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[821,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[822,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[826,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[830,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[834,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[835,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[836,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[837,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[839,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[841,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[843,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[844,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[845,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[849,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[850,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[851,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[853,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[855,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[856,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[857,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[858,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[860,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[862,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[863,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[864,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[865,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[867,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[869,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[870,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[872,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[874,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[875,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[877,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[882,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[883,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[885,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[886,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[889,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[890,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[893,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[894,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[895,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[896,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[898,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[900,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[902,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[903,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[905,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[906,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[907,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[909,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[913,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[915,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[917,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[918,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[920,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[921,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[922,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[923,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[925,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[928,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[931,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[935,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[936,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[937,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[939,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[940,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[942,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[944,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[945,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[950,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[952,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[958,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[959,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[960,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[963,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[965,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[966,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[967,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[968,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999608,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[969,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999608,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[971,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[973,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[976,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[978,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[980,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[981,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[982,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[983,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[984,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[985,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[986,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[987,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[988,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[993,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[994,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[995,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[997,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[999,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1000,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1002,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1003,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1007,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1008,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1010,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1011,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1012,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1014,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1026,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1027,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1028,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1029,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1030,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1031,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1032,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1033,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1034,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1035,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1036,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1037,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1038,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1039,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1040,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1041,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1042,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1043,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1044,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1045,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1046,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1047,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1048,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1049,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1050,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1051,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1052,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1053,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1054,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1055,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1056,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1057,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1058,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1059,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1060,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1061,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1062,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1063,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1064,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1065,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1066,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1067,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1068,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1069,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1070,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1071,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1072,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1073,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1074,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1075,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1076,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1077,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1078,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1079,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1080,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1081,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1082,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1083,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1084,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1085,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1086,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1087,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1088,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1089,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1090,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1091,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1092,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1093,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1094,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1095,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1096,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1097,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1098,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1099,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1100,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1101,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1102,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1103,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1104,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1105,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1106,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1107,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1108,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1109,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1110,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1111,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1112,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1113,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1114,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1115,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1116,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1117,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1118,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1119,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1120,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1121,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1122,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1123,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1124,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1125,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1126,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1127,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1128,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1129,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1130,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1131,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1132,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1133,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1134,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1135,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1136,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1137,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1138,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1139,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1140,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1141,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1142,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1143,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1144,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1145,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1146,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1147,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1148,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1149,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1150,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1151,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1152,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1153,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1154,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1155,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1156,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1157,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1158,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1159,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1160,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1161,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1162,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1163,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1164,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1165,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1166,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1167,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1168,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1169,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1170,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1171,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1172,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1173,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1174,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1175,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1176,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1177,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1178,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1179,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1180,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1181,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1182,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1183,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1184,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1185,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1186,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1187,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1188,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1189,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1190,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1191,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1192,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1193,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1194,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1195,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1196,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1197,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1198,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1199,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1200,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1201,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1202,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1203,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1204,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1205,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1206,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1207,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1208,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1209,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1210,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1211,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1212,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1213,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1214,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1215,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1216,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1217,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1218,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1219,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1220,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1221,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1222,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1223,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1224,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1225,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1226,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1227,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1228,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1229,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1230,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1231,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1232,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1233,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1235,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1236,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1237,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1238,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1239,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1240,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1241,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1242,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1243,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1244,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1245,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1246,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1247,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1248,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1250,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1251,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1252,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1253,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1254,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1255,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1256,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1257,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1258,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1259,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1260,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1261,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1262,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1263,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1264,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1265,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1266,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1267,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1268,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1269,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1270,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1271,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1272,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1273,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1274,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1277,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1278,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1280,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1281,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1282,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1283,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1284,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1285,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1286,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1287,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1288,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1289,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1290,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1291,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1292,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1293,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1294,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1295,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1296,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1297,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1298,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1299,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1300,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1301,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1302,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1303,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1304,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1305,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1306,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1307,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1308,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1309,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1310,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1311,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1312,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1313,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1314,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1315,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1316,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1317,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1318,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1319,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1320,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1321,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1322,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1323,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1324,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1325,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1326,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1327,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1328,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1329,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1330,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1331,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1332,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1333,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1334,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1335,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1336,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1337,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1338,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1339,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1340,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1341,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1342,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1344,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1345,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1346,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1347,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1348,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1349,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1350,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1351,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1352,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1355,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1356,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1357,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1358,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1359,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1360,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1361,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1362,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1363,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1364,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1365,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1366,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1367,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1368,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1369,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1370,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1371,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1372,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1373,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1374,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1375,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1376,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1377,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1378,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1379,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1380,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1381,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1382,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1383,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1384,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1385,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1386,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1387,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1388,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1389,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1390,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1391,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1392,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1393,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1394,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1395,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1396,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1397,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1398,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1399,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1400,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1401,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1402,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1403,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1404,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1405,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1406,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1407,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1408,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1409,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1410,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1411,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1412,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1413,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1414,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1415,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1416,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1417,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1418,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1419,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1420,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1421,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999608,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1422,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1423,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1424,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1425,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1426,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1427,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1428,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1429,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1430,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1431,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1432,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1433,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1434,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1435,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1436,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1437,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1438,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1439,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1440,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1441,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1442,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1443,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1444,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1445,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1446,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1447,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1448,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1449,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1450,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1451,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1452,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1453,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1454,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1455,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1456,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1457,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1458,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1459,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1460,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1461,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1462,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1463,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1464,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1465,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1466,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1467,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1468,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1469,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1470,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1471,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1472,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1473,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1474,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1475,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1476,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1477,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1479,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1480,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1481,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1482,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1483,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1484,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1485,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1486,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1487,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1488,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1489,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1490,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1491,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1492,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1493,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1494,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1495,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1496,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1497,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1498,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1499,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1500,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1501,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1502,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1503,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1504,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1505,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1506,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1507,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1508,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1510,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1511,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1512,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1513,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1516,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1517,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1518,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1519,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1,\t\t1,\t\t265.754804,\t\t53.150961,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[2,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000014,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[3,\t\t1,\t\t46.579669,\t\t9.315934,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[4,\t\t1,\t\t76.601811,\t\t15.320362,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[5,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999905,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[6,\t\t1,\t\t224.934669,\t\t44.986934,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[7,\t\t1,\t\t169.516244,\t\t33.903249,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[8,\t\t1,\t\t141.839082,\t\t28.367816,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[9,\t\t1,\t\t95.923554,\t\t19.184711,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[10,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999478,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[11,\t\t1,\t\t84.045385,\t\t16.809077,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[12,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00064,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[13,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000282,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[14,\t\t1,\t\t201.00573,\t\t40.201146,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[15,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000578,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[16,\t\t1,\t\t342.807939,\t\t68.561588,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[17,\t\t1,\t\t80.740275,\t\t16.148055,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[18,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.002167,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[19,\t\t1,\t\t199.478784,\t\t39.895757,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[20,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.997773,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[21,\t\t1,\t\t857.789367,\t\t171.557873,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[22,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000132,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[23,\t\t1,\t\t112.313712,\t\t22.462742,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[24,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999981,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[25,\t\t1,\t\t53.720431,\t\t10.744086,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[26,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000546,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[27,\t\t1,\t\t65.943317,\t\t13.188663,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[28,\t\t1,\t\t194.842745,\t\t38.968549,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[29,\t\t1,\t\t71.569797,\t\t14.313959,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[30,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.9994,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[31,\t\t1,\t\t140.847512,\t\t28.169502,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[32,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998067,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[33,\t\t1,\t\t176.596312,\t\t35.319262,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[34,\t\t1,\t\t35.03588,\t\t7.007176,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[35,\t\t1,\t\t2.31956,\t\t0.463912,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[36,\t\t1,\t\t7.679731,\t\t1.535946,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[37,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.003313,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[38,\t\t1,\t\t185.021867,\t\t37.004373,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[39,\t\t1,\t\t60.585129,\t\t12.117026,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[40,\t\t1,\t\t63.283062,\t\t12.656612,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[41,\t\t1,\t\t68.014949,\t\t13.60299,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[42,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001385,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[43,\t\t1,\t\t104.303989,\t\t20.860798,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[44,\t\t1,\t\t133.441489,\t\t26.688298,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[45,\t\t1,\t\t70.833727,\t\t14.166745,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[46,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000168,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[47,\t\t1,\t\t307.99073,\t\t61.598146,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[48,\t\t1,\t\t211.702612,\t\t42.340522,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[49,\t\t1,\t\t53.55008,\t\t10.710016,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[50,\t\t1,\t\t77.976145,\t\t15.595229,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[51,\t\t1,\t\t101.051994,\t\t20.210399,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[52,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000196,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[53,\t\t1,\t\t153.330216,\t\t30.666043,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[54,\t\t1,\t\t77.900677,\t\t15.580135,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[55,\t\t1,\t\t76.397799,\t\t15.27956,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[56,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99972,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[57,\t\t1,\t\t91.195106,\t\t18.239021,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[58,\t\t1,\t\t208.896262,\t\t41.779252,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[59,\t\t1,\t\t59.662049,\t\t11.93241,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[60,\t\t1,\t\t31.455488,\t\t6.291098,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[61,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000046,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[62,\t\t1,\t\t239.809696,\t\t47.961939,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[63,\t\t1,\t\t141.55761,\t\t28.311522,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[64,\t\t1,\t\t1502.213121,\t\t300.442624,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[65,\t\t1,\t\t5.005399,\t\t1.00108,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[66,\t\t1,\t\t158.815613,\t\t31.763123,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[67,\t\t1,\t\t340.686242,\t\t68.137248,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[68,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998789,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[69,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000576,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[70,\t\t1,\t\t644.500664,\t\t128.900133,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[71,\t\t1,\t\t149.773653,\t\t29.954731,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[72,\t\t1,\t\t245.308691,\t\t49.061738,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[73,\t\t1,\t\t78.532556,\t\t15.706511,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[74,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001343,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[75,\t\t1,\t\t97.879205,\t\t19.575841,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[76,\t\t1,\t\t94.474908,\t\t18.894982,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[77,\t\t1,\t\t91.505404,\t\t18.301081,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[78,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.996827,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[79,\t\t1,\t\t94.486382,\t\t18.897276,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[80,\t\t1,\t\t100.359117,\t\t20.071823,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[81,\t\t1,\t\t113.291775,\t\t22.658355,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[82,\t\t1,\t\t3.770417,\t\t0.754083,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[83,\t\t1,\t\t252.268688,\t\t50.453738,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[84,\t\t1,\t\t24.834296,\t\t4.966859,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[85,\t\t1,\t\t86.120517,\t\t17.224103,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[86,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999994,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[87,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999215,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[88,\t\t1,\t\t69.510671,\t\t13.902134,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[89,\t\t1,\t\t86.238627,\t\t17.247725,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[90,\t\t1,\t\t99.601806,\t\t19.920361,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[91,\t\t1,\t\t34.596708,\t\t6.919342,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[92,\t\t1,\t\t37.757146,\t\t7.551429,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[93,\t\t1,\t\t37.032196,\t\t7.406439,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[94,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00075,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[95,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001027,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[96,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[97,\t\t1,\t\t5.208302,\t\t1.04166,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[98,\t\t1,\t\t95.75972,\t\t19.151944,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[99,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00099,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[100,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001421,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[101,\t\t1,\t\t67.807647,\t\t13.561529,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[102,\t\t1,\t\t131.244861,\t\t26.248972,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[103,\t\t1,\t\t153.450639,\t\t30.690128,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[104,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999894,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[105,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99987,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[106,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999742,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[107,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999994,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[108,\t\t1,\t\t108.240718,\t\t21.648144,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[109,\t\t1,\t\t43.824819,\t\t8.764964,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[110,\t\t1,\t\t56.886372,\t\t11.377274,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[111,\t\t1,\t\t100.249159,\t\t20.049832,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[112,\t\t1,\t\t50.738711,\t\t10.147742,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[113,\t\t1,\t\t79.98259,\t\t15.996518,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[114,\t\t1,\t\t117.794796,\t\t23.558959,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[115,\t\t1,\t\t75.93538,\t\t15.187076,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[116,\t\t1,\t\t127.067416,\t\t25.413483,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[117,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000579,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[118,\t\t1,\t\t196.745711,\t\t39.349142,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[119,\t\t1,\t\t38.137398,\t\t7.62748,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[120,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0013,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[121,\t\t1,\t\t51.790604,\t\t10.358121,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[122,\t\t1,\t\t45.342148,\t\t9.06843,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[123,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00021,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[124,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000004,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[125,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999745,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[126,\t\t1,\t\t237.730007,\t\t47.546001,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[127,\t\t1,\t\t183.790305,\t\t36.758061,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[128,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00135,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[129,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[130,\t\t1,\t\t253.413397,\t\t50.682679,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[131,\t\t1,\t\t55.953459,\t\t11.190692,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[132,\t\t1,\t\t145.694347,\t\t29.138869,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[133,\t\t1,\t\t48.801899,\t\t9.76038,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[134,\t\t1,\t\t48.602056,\t\t9.720411,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[135,\t\t1,\t\t48.666494,\t\t9.733299,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[136,\t\t1,\t\t47.144668,\t\t9.428934,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[137,\t\t1,\t\t37.711395,\t\t7.542279,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[138,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999198,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[139,\t\t1,\t\t73.872802,\t\t14.77456,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[140,\t\t1,\t\t51.086205,\t\t10.217241,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[141,\t\t1,\t\t60.528136,\t\t12.105627,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[142,\t\t1,\t\t66.602557,\t\t13.320511,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[143,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999974,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[144,\t\t1,\t\t60.668043,\t\t12.133609,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[145,\t\t1,\t\t176.484943,\t\t35.296989,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[146,\t\t1,\t\t227.522291,\t\t45.504458,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[147,\t\t1,\t\t139.457767,\t\t27.891553,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[148,\t\t1,\t\t196.800511,\t\t39.360102,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[149,\t\t1,\t\t126.875865,\t\t25.375173,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[150,\t\t1,\t\t165.649616,\t\t33.129923,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[151,\t\t1,\t\t39.035079,\t\t7.807016,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[152,\t\t1,\t\t81.032776,\t\t16.206555,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[153,\t\t1,\t\t144.575651,\t\t28.91513,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[154,\t\t1,\t\t148.507884,\t\t29.701577,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[155,\t\t1,\t\t154.684086,\t\t30.936817,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[156,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999991,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[157,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001051,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[158,\t\t1,\t\t40.754105,\t\t8.150821,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[159,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001208,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[160,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[161,\t\t1,\t\t126.518159,\t\t25.303632,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[162,\t\t1,\t\t189.107152,\t\t37.82143,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[163,\t\t1,\t\t37.819645,\t\t7.563929,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[164,\t\t1,\t\t37.97174,\t\t7.594348,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[165,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999954,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[166,\t\t1,\t\t44.395107,\t\t8.879021,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[167,\t\t1,\t\t62.452741,\t\t12.490548,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[168,\t\t1,\t\t42.623199,\t\t8.52464,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[169,\t\t1,\t\t145.911498,\t\t29.1823,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[170,\t\t1,\t\t109.640187,\t\t21.928037,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[171,\t\t1,\t\t93.577858,\t\t18.715572,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[172,\t\t1,\t\t45.925463,\t\t9.185093,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[173,\t\t1,\t\t43.872411,\t\t8.774482,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[174,\t\t1,\t\t65.836768,\t\t13.167354,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[175,\t\t1,\t\t43.843656,\t\t8.768731,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[176,\t\t1,\t\t152.778864,\t\t30.555773,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[177,\t\t1,\t\t24.91282,\t\t4.982564,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[178,\t\t1,\t\t131.944404,\t\t26.388881,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[179,\t\t1,\t\t48.616959,\t\t9.723392,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[180,\t\t1,\t\t42.735551,\t\t8.54711,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[181,\t\t1,\t\t32.255563,\t\t6.451113,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[182,\t\t1,\t\t1.461192,\t\t0.292238,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[183,\t\t1,\t\t437.380751,\t\t87.47615,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[184,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999909,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[185,\t\t1,\t\t93.531344,\t\t18.706269,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[186,\t\t1,\t\t50.366143,\t\t10.073229,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[187,\t\t1,\t\t29.459065,\t\t5.891813,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[188,\t\t1,\t\t43.843656,\t\t8.768731,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[189,\t\t1,\t\t160.878738,\t\t32.175748,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[190,\t\t1,\t\t212.792231,\t\t42.558446,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[191,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[192,\t\t1,\t\t51.246815,\t\t10.249363,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[193,\t\t1,\t\t43.772933,\t\t8.754587,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[194,\t\t1,\t\t30.217157,\t\t6.043431,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[195,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[196,\t\t1,\t\t42.392909,\t\t8.478582,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[197,\t\t1,\t\t67.165938,\t\t13.433188,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[198,\t\t1,\t\t39.745205,\t\t7.949041,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[199,\t\t1,\t\t51.170629,\t\t10.234126,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[200,\t\t1,\t\t43.844674,\t\t8.768935,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[201,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998327,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[202,\t\t1,\t\t44.928345,\t\t8.985669,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[203,\t\t1,\t\t5.919712,\t\t1.183942,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[204,\t\t1,\t\t173.50558,\t\t34.701116,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[205,\t\t1,\t\t86.760601,\t\t17.35212,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[206,\t\t1,\t\t41.639025,\t\t8.327805,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[207,\t\t1,\t\t123.816527,\t\t24.763305,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[208,\t\t1,\t\t36.459084,\t\t7.291817,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[209,\t\t1,\t\t50.665387,\t\t10.133077,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[210,\t\t1,\t\t58.205047,\t\t11.641009,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[211,\t\t1,\t\t204.545581,\t\t40.909116,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[212,\t\t1,\t\t51.266465,\t\t10.253293,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[213,\t\t1,\t\t240.325727,\t\t48.065145,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[214,\t\t1,\t\t161.708751,\t\t32.34175,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[215,\t\t1,\t\t341.941224,\t\t68.388245,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[216,\t\t1,\t\t115.298044,\t\t23.059609,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[217,\t\t1,\t\t36.945588,\t\t7.389118,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[218,\t\t1,\t\t112.55602,\t\t22.511204,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[219,\t\t1,\t\t180.891242,\t\t36.178248,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[220,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999661,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[221,\t\t1,\t\t103.189972,\t\t20.637994,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[222,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[223,\t\t1,\t\t102.267649,\t\t20.45353,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[224,\t\t1,\t\t118.923188,\t\t23.784638,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[225,\t\t1,\t\t213.533406,\t\t42.706681,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[226,\t\t1,\t\t74.593817,\t\t14.918763,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[227,\t\t1,\t\t92.928768,\t\t18.585754,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[228,\t\t1,\t\t91.113818,\t\t18.222764,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[229,\t\t1,\t\t201.61934,\t\t40.323868,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[230,\t\t1,\t\t48.359832,\t\t9.671966,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[231,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000754,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[232,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999972,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[233,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999804,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[234,\t\t1,\t\t172.263099,\t\t34.45262,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[235,\t\t1,\t\t56.017664,\t\t11.203533,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[236,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999976,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[237,\t\t1,\t\t0.46361,\t\t0.092722,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[238,\t\t1,\t\t63.385006,\t\t12.677001,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[239,\t\t1,\t\t87.574333,\t\t17.514867,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[240,\t\t1,\t\t552.4021,\t\t110.48042,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[241,\t\t1,\t\t408.758364,\t\t81.751673,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[242,\t\t1,\t\t148.836318,\t\t29.767264,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[243,\t\t1,\t\t120.081229,\t\t24.016246,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[244,\t\t1,\t\t143.067864,\t\t28.613573,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[245,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0015,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[246,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999894,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[247,\t\t1,\t\t28.391009,\t\t5.678202,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[248,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[249,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999997,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[250,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999995,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[251,\t\t1,\t\t70.460045,\t\t14.092009,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[252,\t\t1,\t\t180.697782,\t\t36.139556,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[253,\t\t1,\t\t79.333221,\t\t15.866644,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[254,\t\t1,\t\t25.329781,\t\t5.065956,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[255,\t\t1,\t\t124.569753,\t\t24.913951,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[256,\t\t1,\t\t142.859831,\t\t28.571966,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[257,\t\t1,\t\t68.947314,\t\t13.789463,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[258,\t\t1,\t\t224.690971,\t\t44.938194,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[259,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999452,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[260,\t\t1,\t\t139.838834,\t\t27.967767,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[261,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.002014,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[262,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999838,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[263,\t\t1,\t\t200.598625,\t\t40.119725,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[264,\t\t1,\t\t259.685739,\t\t51.937148,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[265,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000008,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[266,\t\t1,\t\t125.151228,\t\t25.030246,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[267,\t\t1,\t\t158.28915,\t\t31.65783,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[268,\t\t1,\t\t55.043845,\t\t11.008769,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[269,\t\t1,\t\t44.202271,\t\t8.840454,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[270,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[271,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[272,\t\t1,\t\t0.901889,\t\t0.180378,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[273,\t\t1,\t\t123.333764,\t\t24.666753,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[274,\t\t1,\t\t239.74459,\t\t47.948918,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[275,\t\t1,\t\t44.881496,\t\t8.976299,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[276,\t\t1,\t\t174.959481,\t\t34.991896,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[277,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998871,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[278,\t\t1,\t\t136.584478,\t\t27.316896,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[279,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998969,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[280,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999412,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[281,\t\t1,\t\t180.411738,\t\t36.082348,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[282,\t\t1,\t\t255.130137,\t\t51.026027,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[283,\t\t1,\t\t102.267237,\t\t20.453447,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[284,\t\t1,\t\t155.144135,\t\t31.028827,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[285,\t\t1,\t\t69.188842,\t\t13.837768,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[286,\t\t1,\t\t145.008636,\t\t29.001727,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[287,\t\t1,\t\t89.125494,\t\t17.825099,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[288,\t\t1,\t\t57.324897,\t\t11.464979,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[289,\t\t1,\t\t90.155437,\t\t18.031087,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[290,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.004642,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[291,\t\t1,\t\t59.330228,\t\t11.866046,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[292,\t\t1,\t\t116.966826,\t\t23.393365,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[293,\t\t1,\t\t103.087287,\t\t20.617457,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[294,\t\t1,\t\t27.471204,\t\t5.494241,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[295,\t\t1,\t\t57.479328,\t\t11.495866,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[296,\t\t1,\t\t163.183946,\t\t32.636789,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[297,\t\t1,\t\t171.508158,\t\t34.301632,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[298,\t\t1,\t\t90.559717,\t\t18.111943,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[299,\t\t1,\t\t87.706484,\t\t17.541297,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[300,\t\t1,\t\t238.93621,\t\t47.787242,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[301,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999316,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[302,\t\t1,\t\t201.274528,\t\t40.254906,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[303,\t\t1,\t\t103.380435,\t\t20.676087,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[304,\t\t1,\t\t88.772852,\t\t17.75457,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[305,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999676,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[306,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001449,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[307,\t\t1,\t\t105.292852,\t\t21.05857,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[308,\t\t1,\t\t129.812058,\t\t25.962412,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[309,\t\t1,\t\t212.390782,\t\t42.478156,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[310,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000076,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[311,\t\t1,\t\t180.406637,\t\t36.081327,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[312,\t\t1,\t\t81.133884,\t\t16.226777,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[313,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000482,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[314,\t\t1,\t\t251.301129,\t\t50.260226,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[315,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001493,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[316,\t\t1,\t\t98.463049,\t\t19.69261,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[317,\t\t1,\t\t132.576889,\t\t26.515378,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[318,\t\t1,\t\t217.872772,\t\t43.574554,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[319,\t\t1,\t\t7.805074,\t\t1.561015,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[320,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[321,\t\t1,\t\t184.632027,\t\t36.926405,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[322,\t\t1,\t\t23.504846,\t\t4.700969,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[323,\t\t1,\t\t2.445479,\t\t0.489096,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[324,\t\t1,\t\t432.30154,\t\t86.460308,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[325,\t\t1,\t\t140.82409,\t\t28.164818,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[326,\t\t1,\t\t11.41758,\t\t2.283516,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[327,\t\t1,\t\t98.256073,\t\t19.651215,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[328,\t\t1,\t\t167.443449,\t\t33.48869,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[329,\t\t1,\t\t251.849874,\t\t50.369975,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[330,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001743,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[331,\t\t1,\t\t19.996023,\t\t3.999205,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[332,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.996402,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[333,\t\t1,\t\t210.103514,\t\t42.020703,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[334,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999458,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[335,\t\t1,\t\t214.426487,\t\t42.885297,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[336,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998173,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[337,\t\t1,\t\t85.292569,\t\t17.058514,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[338,\t\t1,\t\t231.496153,\t\t46.299231,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[339,\t\t1,\t\t143.177169,\t\t28.635434,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[340,\t\t1,\t\t121.053403,\t\t24.210681,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[341,\t\t1,\t\t109.434694,\t\t21.886939,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[342,\t\t1,\t\t189.833185,\t\t37.966637,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[343,\t\t1,\t\t104.145253,\t\t20.829051,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[344,\t\t1,\t\t261.117094,\t\t52.223419,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[345,\t\t1,\t\t285.521261,\t\t57.104252,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[346,\t\t1,\t\t283.449821,\t\t56.689964,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[347,\t\t1,\t\t99.127322,\t\t19.825464,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[348,\t\t1,\t\t259.125349,\t\t51.82507,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[349,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001289,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[350,\t\t1,\t\t135.94094,\t\t27.188188,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[351,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001099,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[352,\t\t1,\t\t899.833088,\t\t179.966618,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[353,\t\t1,\t\t2.705199,\t\t0.54104,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[354,\t\t1,\t\t18.378887,\t\t3.675777,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[355,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[356,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[357,\t\t1,\t\t0.04607,\t\t0.009214,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[358,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001384,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[359,\t\t1,\t\t2.689868,\t\t0.537974,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[360,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00074,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[361,\t\t1,\t\t68.844751,\t\t13.76895,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[362,\t\t1,\t\t196.243172,\t\t39.248634,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[363,\t\t1,\t\t288.933548,\t\t57.78671,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[364,\t\t1,\t\t68.169893,\t\t13.633979,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[365,\t\t1,\t\t61.186099,\t\t12.23722,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[366,\t\t1,\t\t121.270652,\t\t24.25413,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[367,\t\t1,\t\t58.617196,\t\t11.723439,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[368,\t\t1,\t\t28.864071,\t\t5.772814,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[369,\t\t1,\t\t23.718575,\t\t4.743715,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[370,\t\t1,\t\t69.828164,\t\t13.965633,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[371,\t\t1,\t\t351.344575,\t\t70.268915,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[372,\t\t1,\t\t203.749767,\t\t40.749953,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[373,\t\t1,\t\t137.49049,\t\t27.498098,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[374,\t\t1,\t\t70.502796,\t\t14.100559,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[375,\t\t1,\t\t231.273649,\t\t46.25473,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[376,\t\t1,\t\t253.663635,\t\t50.732727,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[377,\t\t1,\t\t181.517779,\t\t36.303556,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[378,\t\t1,\t\t181.168395,\t\t36.233679,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[379,\t\t1,\t\t62.440986,\t\t12.488197,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[380,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001368,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[381,\t\t1,\t\t208.806464,\t\t41.761293,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[382,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000453,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[383,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999306,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[384,\t\t1,\t\t73.682614,\t\t14.736523,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[385,\t\t1,\t\t93.00192,\t\t18.600384,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[386,\t\t1,\t\t74.724254,\t\t14.944851,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[387,\t\t1,\t\t152.178997,\t\t30.435799,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[388,\t\t1,\t\t817.198986,\t\t163.439797,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[389,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999933,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[390,\t\t1,\t\t67.474209,\t\t13.494842,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[391,\t\t1,\t\t76.858878,\t\t15.371776,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[392,\t\t1,\t\t147.491415,\t\t29.498283,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[393,\t\t1,\t\t184.189182,\t\t36.837836,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[394,\t\t1,\t\t66.247553,\t\t13.249511,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[395,\t\t1,\t\t91.815016,\t\t18.363003,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[396,\t\t1,\t\t65.031636,\t\t13.006327,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[397,\t\t1,\t\t521.482087,\t\t104.296417,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[398,\t\t1,\t\t225.865156,\t\t45.173031,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[399,\t\t1,\t\t96.235007,\t\t19.247001,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[400,\t\t1,\t\t51.272399,\t\t10.25448,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[401,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000629,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[402,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000421,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[403,\t\t1,\t\t25.457939,\t\t5.091588,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[404,\t\t1,\t\t89.689893,\t\t17.937979,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[405,\t\t1,\t\t676.173123,\t\t135.234625,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[406,\t\t1,\t\t51.231806,\t\t10.246361,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[407,\t\t1,\t\t101.414483,\t\t20.282897,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[408,\t\t1,\t\t293.233814,\t\t58.646763,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[409,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999958,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[410,\t\t1,\t\t37.964953,\t\t7.592991,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[411,\t\t1,\t\t35.897418,\t\t7.179484,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[412,\t\t1,\t\t2.521401,\t\t0.50428,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[413,\t\t1,\t\t125.872872,\t\t25.174574,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[414,\t\t1,\t\t10.687969,\t\t2.137594,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[415,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000332,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[416,\t\t1,\t\t152.207919,\t\t30.441584,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[417,\t\t1,\t\t5.955605,\t\t1.191121,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[418,\t\t1,\t\t124.111229,\t\t24.822246,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[419,\t\t1,\t\t66.33657,\t\t13.267314,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[420,\t\t1,\t\t66.787445,\t\t13.357489,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[421,\t\t1,\t\t96.205612,\t\t19.241122,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[422,\t\t1,\t\t70.483455,\t\t14.096691,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[423,\t\t1,\t\t148.030832,\t\t29.606166,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[424,\t\t1,\t\t10.672642,\t\t2.134528,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[425,\t\t1,\t\t87.649316,\t\t17.529863,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[426,\t\t1,\t\t7.262015,\t\t1.452403,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[427,\t\t1,\t\t61.030098,\t\t12.20602,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[428,\t\t1,\t\t27.364002,\t\t5.4728,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[429,\t\t1,\t\t308.79627,\t\t61.759254,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[430,\t\t1,\t\t164.485152,\t\t32.89703,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[431,\t\t1,\t\t109.993747,\t\t21.998749,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[432,\t\t1,\t\t128.575943,\t\t25.715189,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[433,\t\t1,\t\t65.724594,\t\t13.144919,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[434,\t\t1,\t\t34.20628,\t\t6.841256,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[435,\t\t1,\t\t136.803586,\t\t27.360717,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[436,\t\t1,\t\t73.037139,\t\t14.607428,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[437,\t\t1,\t\t16.633442,\t\t3.326688,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[438,\t\t1,\t\t44.639604,\t\t8.927921,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[439,\t\t1,\t\t83.113172,\t\t16.622634,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[440,\t\t1,\t\t70.239123,\t\t14.047825,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[441,\t\t1,\t\t53.847699,\t\t10.76954,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[442,\t\t1,\t\t71.258734,\t\t14.251747,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[443,\t\t1,\t\t154.495642,\t\t30.899128,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[444,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999996,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[445,\t\t1,\t\t70.201034,\t\t14.040207,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[446,\t\t1,\t\t32.55159,\t\t6.510318,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[447,\t\t1,\t\t61.886933,\t\t12.377387,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[448,\t\t1,\t\t45.480611,\t\t9.096122,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[449,\t\t1,\t\t229.32864,\t\t45.865728,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[450,\t\t1,\t\t140.338185,\t\t28.067637,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[451,\t\t1,\t\t59.967198,\t\t11.99344,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[452,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[453,\t\t1,\t\t40.189657,\t\t8.037931,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[454,\t\t1,\t\t28.038956,\t\t5.607791,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[455,\t\t1,\t\t45.715158,\t\t9.143032,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[456,\t\t1,\t\t45.715158,\t\t9.143032,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[457,\t\t1,\t\t140.196927,\t\t28.039385,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[458,\t\t1,\t\t133.344955,\t\t26.668991,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[459,\t\t1,\t\t162.285771,\t\t32.457154,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[460,\t\t1,\t\t213.276939,\t\t42.655388,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[461,\t\t1,\t\t221.854265,\t\t44.370853,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[462,\t\t1,\t\t67.86637,\t\t13.573274,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[463,\t\t1,\t\t34.775153,\t\t6.955031,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[464,\t\t1,\t\t34.817188,\t\t6.963438,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[465,\t\t1,\t\t56.239274,\t\t11.247855,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[466,\t\t1,\t\t45.659176,\t\t9.131835,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[467,\t\t1,\t\t42.135859,\t\t8.427172,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[468,\t\t1,\t\t69.086154,\t\t13.817231,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[469,\t\t1,\t\t42.811305,\t\t8.562261,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[470,\t\t1,\t\t109.023964,\t\t21.804793,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[471,\t\t1,\t\t107.343924,\t\t21.468785,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[472,\t\t1,\t\t37.545669,\t\t7.509134,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[473,\t\t1,\t\t68.942801,\t\t13.78856,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[474,\t\t1,\t\t35.608236,\t\t7.121647,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[475,\t\t1,\t\t34.944086,\t\t6.988817,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[476,\t\t1,\t\t39.492566,\t\t7.898513,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[477,\t\t1,\t\t63.73246,\t\t12.746492,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[478,\t\t1,\t\t80.059585,\t\t16.011917,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[479,\t\t1,\t\t145.085748,\t\t29.01715,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[480,\t\t1,\t\t63.593712,\t\t12.718742,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[481,\t\t1,\t\t55.227724,\t\t11.045545,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[482,\t\t1,\t\t62.708704,\t\t12.541741,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[483,\t\t1,\t\t53.329157,\t\t10.665831,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[484,\t\t1,\t\t41.807465,\t\t8.361493,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[485,\t\t1,\t\t62.449288,\t\t12.489858,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[486,\t\t1,\t\t574.502942,\t\t114.900588,\t\t0,\t\t0,\t\t0,\t\t0.999608,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[487,\t\t1,\t\t145.57639,\t\t29.115278,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[488,\t\t1,\t\t419.471487,\t\t83.894297,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[489,\t\t1,\t\t110.403702,\t\t22.08074,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[490,\t\t1,\t\t34.353515,\t\t6.870703,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[491,\t\t1,\t\t47.236524,\t\t9.447305,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[492,\t\t1,\t\t73.661127,\t\t14.732225,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[493,\t\t1,\t\t94.940376,\t\t18.988075,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[494,\t\t1,\t\t129.757449,\t\t25.95149,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[495,\t\t1,\t\t102.142303,\t\t20.428461,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[496,\t\t1,\t\t7.23491,\t\t1.446982,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[497,\t\t1,\t\t904.723205,\t\t180.944641,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[498,\t\t1,\t\t42.430703,\t\t8.486141,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[499,\t\t1,\t\t59.226309,\t\t11.845262,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[500,\t\t1,\t\t32.425708,\t\t6.485142,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[501,\t\t1,\t\t54.858706,\t\t10.971741,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[502,\t\t1,\t\t216.515953,\t\t43.303191,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[503,\t\t1,\t\t66.31039,\t\t13.262078,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[504,\t\t1,\t\t43.423158,\t\t8.684632,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[505,\t\t1,\t\t307.99073,\t\t61.598146,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[506,\t\t1,\t\t96.6745,\t\t19.3349,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[507,\t\t1,\t\t91.957908,\t\t18.391582,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[508,\t\t1,\t\t133.686672,\t\t26.737334,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[509,\t\t1,\t\t176.172518,\t\t35.234504,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[510,\t\t1,\t\t111.298704,\t\t22.259741,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[511,\t\t1,\t\t97.086474,\t\t19.417295,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[512,\t\t1,\t\t64.13161,\t\t12.826322,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[513,\t\t1,\t\t35.329674,\t\t7.065935,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[514,\t\t1,\t\t87.932138,\t\t17.586428,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[515,\t\t1,\t\t78.440691,\t\t15.688138,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[516,\t\t1,\t\t87.756676,\t\t17.551335,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[517,\t\t1,\t\t41.221411,\t\t8.244282,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[518,\t\t1,\t\t232.161599,\t\t46.43232,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[519,\t\t1,\t\t22.848952,\t\t4.56979,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[520,\t\t1,\t\t92.250063,\t\t18.450013,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[521,\t\t1,\t\t83.333134,\t\t16.666627,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[522,\t\t1,\t\t71.350503,\t\t14.270101,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[523,\t\t1,\t\t38.407165,\t\t7.681433,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[524,\t\t1,\t\t111.476459,\t\t22.295292,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[525,\t\t1,\t\t132.806221,\t\t26.561244,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[526,\t\t1,\t\t40.264348,\t\t8.05287,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[527,\t\t1,\t\t44.207425,\t\t8.841485,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[528,\t\t1,\t\t96.486839,\t\t19.297368,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[529,\t\t1,\t\t123.681839,\t\t24.736368,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[530,\t\t1,\t\t52.411312,\t\t10.482262,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[531,\t\t1,\t\t53.288456,\t\t10.657691,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[532,\t\t1,\t\t51.147629,\t\t10.229526,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[533,\t\t1,\t\t45.834447,\t\t9.166889,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[534,\t\t1,\t\t126.437057,\t\t25.287411,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[535,\t\t1,\t\t158.29108,\t\t31.658216,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[536,\t\t1,\t\t124.767483,\t\t24.953497,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[537,\t\t1,\t\t41.505,\t\t8.301,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[538,\t\t1,\t\t31.026306,\t\t6.205261,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[539,\t\t1,\t\t32.920819,\t\t6.584164,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[540,\t\t1,\t\t29.643751,\t\t5.92875,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[541,\t\t1,\t\t76.572367,\t\t15.314473,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[542,\t\t1,\t\t105.186765,\t\t21.037353,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[543,\t\t1,\t\t57.452494,\t\t11.490499,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[544,\t\t1,\t\t107.006076,\t\t21.401215,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[545,\t\t1,\t\t230.40163,\t\t46.080326,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[546,\t\t1,\t\t115.480777,\t\t23.096155,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[547,\t\t1,\t\t149.266492,\t\t29.853298,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[548,\t\t1,\t\t48.318181,\t\t9.663636,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[549,\t\t1,\t\t41.316176,\t\t8.263235,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[550,\t\t1,\t\t34.092871,\t\t6.818574,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[551,\t\t1,\t\t32.864705,\t\t6.572941,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[552,\t\t1,\t\t163.202426,\t\t32.640485,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[553,\t\t1,\t\t1.129108,\t\t0.225822,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[554,\t\t1,\t\t165.341096,\t\t33.068219,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[555,\t\t1,\t\t62.996788,\t\t12.599358,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[556,\t\t1,\t\t97.458127,\t\t19.491625,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[557,\t\t1,\t\t207.063459,\t\t41.412692,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[558,\t\t1,\t\t122.096769,\t\t24.419354,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[559,\t\t1,\t\t65.345015,\t\t13.069003,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[560,\t\t1,\t\t102.084372,\t\t20.416874,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[561,\t\t1,\t\t55.98009,\t\t11.196018,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[562,\t\t1,\t\t152.933411,\t\t30.586682,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[563,\t\t1,\t\t107.524652,\t\t21.50493,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[564,\t\t1,\t\t212.307724,\t\t42.461545,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[565,\t\t1,\t\t160.196698,\t\t32.03934,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[566,\t\t1,\t\t0.25731,\t\t0.051462,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[567,\t\t1,\t\t260.406917,\t\t52.081383,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[568,\t\t1,\t\t240.813392,\t\t48.162678,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[569,\t\t1,\t\t169.437994,\t\t33.887599,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[570,\t\t1,\t\t264.52322,\t\t52.904644,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[571,\t\t1,\t\t194.762124,\t\t38.952425,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[572,\t\t1,\t\t343.527869,\t\t68.705574,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[573,\t\t1,\t\t99.996459,\t\t19.999292,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[574,\t\t1,\t\t190.53144,\t\t38.106288,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[575,\t\t1,\t\t3.580427,\t\t0.716085,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[576,\t\t1,\t\t231.684953,\t\t46.336991,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[577,\t\t1,\t\t255.408508,\t\t51.081702,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[578,\t\t1,\t\t243.855491,\t\t48.771098,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[579,\t\t1,\t\t88.965138,\t\t17.793028,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[580,\t\t1,\t\t18.521218,\t\t3.704244,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[581,\t\t1,\t\t0.106425,\t\t0.021285,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[582,\t\t1,\t\t67.009861,\t\t13.401972,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[583,\t\t1,\t\t76.857848,\t\t15.37157,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[584,\t\t1,\t\t44.097352,\t\t8.81947,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[585,\t\t1,\t\t76.55843,\t\t15.311686,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t]\n\t])\n\tppc[\"gen\"] = array([\n\t\t[586,\t\t272.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t272.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[589,\t\t63.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t63.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[590,\t\t38.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[593,\t\t11.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[594,\t\t19.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[595,\t\t1431.104431,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4730.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[598,\t\t12.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[599,\t\t9.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[601,\t\t61.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[602,\t\t24.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[603,\t\t1300.808419,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3455.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[607,\t\t1800.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1800.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[608,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[609,\t\t36.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[612,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[613,\t\t85.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t85.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[614,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[616,\t\t29.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[617,\t\t137.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t137.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[618,\t\t33.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[619,\t\t118.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t118.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[621,\t\t765.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t765.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[624,\t\t27.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[629,\t\t75.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t75.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[632,\t\t45.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[637,\t\t53.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t53.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[638,\t\t128.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t128.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[640,\t\t12.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[641,\t\t12.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[642,\t\t28.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[643,\t\t857.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t857.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[647,\t\t14.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[650,\t\t1324.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1324.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[652,\t\t46.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t46.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[655,\t\t61.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[661,\t\t14.69768,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t32.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[663,\t\t15.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[666,\t\t28.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[668,\t\t766.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t766.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[670,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[672,\t\t33.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[676,\t\t370.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t370.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[681,\t\t40.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t40.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[683,\t\t27.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[687,\t\t1329.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1329.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[691,\t\t26.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[693,\t\t194.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t194.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[694,\t\t16.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[695,\t\t14.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[696,\t\t721.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t721.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[697,\t\t11.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[698,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[702,\t\t73.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[704,\t\t508.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t508.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[705,\t\t17.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[707,\t\t34.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[713,\t\t13.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[714,\t\t15.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[716,\t\t0.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[717,\t\t11.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[719,\t\t1283.090107,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1958.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[722,\t\t20.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[724,\t\t12.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[727,\t\t61.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[728,\t\t510.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t510.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[730,\t\t633.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t633.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[732,\t\t14.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[735,\t\t84.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[738,\t\t138.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t138.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[741,\t\t214.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t214.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[742,\t\t9.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[743,\t\t735.855589,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1410.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[746,\t\t100.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t100.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[747,\t\t12.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[748,\t\t110.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t110.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[749,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[750,\t\t90.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[753,\t\t311.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t311.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[758,\t\t18.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[760,\t\t316.385985,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t794.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[762,\t\t867.633296,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1105.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[763,\t\t20.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[765,\t\t59.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t59.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[767,\t\t11.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[769,\t\t43.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t43.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[771,\t\t690.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t690.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[772,\t\t18.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[774,\t\t33.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[777,\t\t79.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[778,\t\t14.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[781,\t\t958.893296,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1310.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[784,\t\t774.642898,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1275.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[785,\t\t3.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[787,\t\t778.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t778.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[788,\t\t875.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t875.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[789,\t\t77.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t77.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[791,\t\t10.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[792,\t\t62.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t62.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[795,\t\t13.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[800,\t\t36.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[801,\t\t50.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[802,\t\t500.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t500.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[805,\t\t731.931728,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1410.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[806,\t\t35.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[808,\t\t217.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t217.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[809,\t\t12.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[811,\t\t25.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[814,\t\t89.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[816,\t\t80.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t80.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[817,\t\t54.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t54.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[821,\t\t82.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[822,\t\t134.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t134.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[826,\t\t58.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t58.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[830,\t\t89.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[834,\t\t23.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[835,\t\t63.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t63.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[836,\t\t25.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[837,\t\t472.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t472.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[839,\t\t73.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[841,\t\t23.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[843,\t\t333.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t333.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[844,\t\t40.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t40.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[845,\t\t318.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t318.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[849,\t\t779.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t779.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[850,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[851,\t\t79.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[853,\t\t11.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[855,\t\t688.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t688.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[856,\t\t36.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[857,\t\t1402.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1402.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[858,\t\t56.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[860,\t\t25.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[862,\t\t199.767782,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t725.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[863,\t\t0.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[864,\t\t875.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t875.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[865,\t\t11.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[867,\t\t769.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t769.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[869,\t\t1360.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1360.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[870,\t\t58.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t58.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[872,\t\t22.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[874,\t\t20.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[875,\t\t24.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[877,\t\t24.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[882,\t\t17.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[883,\t\t18.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[885,\t\t76.988036,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t490.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[886,\t\t2572.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2572.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[889,\t\t9.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[890,\t\t48.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t48.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[893,\t\t60.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[894,\t\t158.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t158.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[895,\t\t19.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[896,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[898,\t\t84.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[900,\t\t112.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t112.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[902,\t\t19.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[903,\t\t20.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[905,\t\t137.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t137.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[906,\t\t66.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[907,\t\t67.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[909,\t\t36.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[913,\t\t74.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t74.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[915,\t\t12.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[917,\t\t17.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[918,\t\t38.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[920,\t\t12.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[921,\t\t124.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t124.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[922,\t\t164.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t164.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[923,\t\t146.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t146.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[925,\t\t26.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[928,\t\t61.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[931,\t\t217.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t217.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[935,\t\t23.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[936,\t\t104.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t104.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[937,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[939,\t\t0.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[940,\t\t29.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[942,\t\t51.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t51.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[944,\t\t25.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[945,\t\t35.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[950,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[952,\t\t31.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t31.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[958,\t\t66.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[959,\t\t45.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[960,\t\t26.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[963,\t\t730.05781,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t875.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[965,\t\t352.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t352.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[966,\t\t66.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[967,\t\t37.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t37.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[968,\t\t54.0,\t\t0,\t\t9999,\t\t-9999,\t\t0.999608,\t\t100,\t\t1,\t\t54.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[969,\t\t56.9,\t\t0,\t\t9999,\t\t-9999,\t\t0.999608,\t\t100,\t\t1,\t\t56.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[971,\t\t20.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[973,\t\t1347.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1347.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[976,\t\t26.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[978,\t\t4.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[980,\t\t32.205943,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t350.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[981,\t\t119.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[982,\t\t9.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[983,\t\t44.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t44.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[984,\t\t465.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t465.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[985,\t\t22.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[986,\t\t11.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[987,\t\t164.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t164.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[988,\t\t5.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[993,\t\t392.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t392.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[994,\t\t33.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[995,\t\t4.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[997,\t\t18.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[999,\t\t15.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1000,\t\t49.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1002,\t\t9.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1003,\t\t900.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t900.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1007,\t\t23.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1008,\t\t49.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1010,\t\t750.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t750.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1011,\t\t18.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1012,\t\t2835.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2835.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1014,\t\t220.979725,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t750.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1026,\t\t655.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t655.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1027,\t\t31.860296,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t48.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1028,\t\t341.006126,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t400.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1029,\t\t45.540676,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1030,\t\t539.568802,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1018.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1031,\t\t1412.186038,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1447.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1032,\t\t34.056314,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t153.510391,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1033,\t\t8.158047,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.164506,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1034,\t\t19.818663,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.262779,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1035,\t\t13.816202,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.886469,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1036,\t\t17.822731,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.223077,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1037,\t\t13.038638,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t94.684044,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1038,\t\t12.553569,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t85.798525,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1039,\t\t18.197695,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t132.724114,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1040,\t\t0.000137,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.064179,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1041,\t\t44.396391,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t204.187624,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1042,\t\t6.229792,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.70053,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1043,\t\t0.354773,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t6.035538,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1044,\t\t3.997792,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.163532,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1045,\t\t7.778758,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.836204,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1046,\t\t21.931194,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t106.787063,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1047,\t\t1.568219,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.029581,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1048,\t\t9.776271,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t71.656883,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1049,\t\t70.594976,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t293.755375,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1050,\t\t5.129003,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.781606,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1051,\t\t44.772044,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t304.42978,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1052,\t\t5.244562,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.66869,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1053,\t\t4.332494,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.368087,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1054,\t\t75.353956,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t273.855776,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1055,\t\t0.374839,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.856069,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1056,\t\t110.965718,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t603.943953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1057,\t\t110.345358,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t426.979979,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1058,\t\t254.849592,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1055.735174,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1059,\t\t91.279101,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t414.871332,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1060,\t\t1.068693,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.351632,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1061,\t\t24.022448,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t161.862597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1062,\t\t0.316925,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.878561,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1063,\t\t0.939671,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.670916,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1064,\t\t52.316245,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t209.786524,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1065,\t\t100.4109,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t339.421643,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1066,\t\t14.12043,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t134.399019,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1067,\t\t6.869879,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t32.653526,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1068,\t\t0.706446,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.009022,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1069,\t\t0.342094,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.190759,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1070,\t\t0.16876,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.788599,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1071,\t\t1.309754,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.328696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1072,\t\t27.160176,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t112.606433,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1073,\t\t19.387617,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t77.81765,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1074,\t\t34.910446,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t153.592986,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1075,\t\t5.295374,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.783448,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1076,\t\t0.072968,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.29551,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1077,\t\t4.763159,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.120041,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1078,\t\t3.337408,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.413246,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1079,\t\t16.625888,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t72.327992,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1080,\t\t5.192951,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t132.149983,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1081,\t\t127.10693,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t405.642115,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1082,\t\t160.232667,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t510.054159,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1083,\t\t133.317311,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t633.681488,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1084,\t\t146.400981,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t602.719371,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1085,\t\t19.917295,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t113.714399,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1086,\t\t32.470059,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t225.59917,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1087,\t\t42.915516,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t116.66597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1088,\t\t18.333754,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.782492,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1089,\t\t50.480642,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t384.449592,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1090,\t\t24.233183,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.140897,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1091,\t\t7.601782,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.7939,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1092,\t\t18.22163,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t54.002032,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1093,\t\t35.033617,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.605298,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1094,\t\t0.609167,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.759038,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1095,\t\t0.032268,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.204951,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1096,\t\t17.659113,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.50612,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1097,\t\t1.101648,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.601122,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1098,\t\t18.793463,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t71.025499,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1099,\t\t89.169488,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t290.937198,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1100,\t\t0.001343,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.026696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1101,\t\t26.125158,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.930665,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1102,\t\t102.506547,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t350.979988,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1103,\t\t60.497467,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t245.381701,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1104,\t\t0.027212,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.206918,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1105,\t\t0.419851,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.178593,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1106,\t\t0.350241,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.289793,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1107,\t\t11.883237,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t76.221615,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1108,\t\t45.850264,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t320.422751,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1109,\t\t0.13189,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.77821,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1110,\t\t0.283157,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.654557,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1111,\t\t10.436995,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.637993,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1112,\t\t11.143033,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t69.53429,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1113,\t\t0.537142,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.536361,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1114,\t\t1.573243,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.446889,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1115,\t\t9.774387,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.575278,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1116,\t\t6.483651,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t32.601142,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1117,\t\t20.497973,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.792541,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1118,\t\t1.04063,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.725012,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1119,\t\t8.601536,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t43.254023,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1120,\t\t0.302076,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.416001,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1121,\t\t0.057077,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.540589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1122,\t\t0.182243,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.462883,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1123,\t\t0.125522,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.464336,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1124,\t\t0.156067,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.288283,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1125,\t\t4.603673,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.818899,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1126,\t\t5.304305,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.154893,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1127,\t\t23.470867,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t105.296621,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1128,\t\t0.544843,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.06139,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1129,\t\t0.820999,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.738747,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1130,\t\t0.169258,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.025754,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1131,\t\t0.501311,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.897078,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1132,\t\t0.058714,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.359497,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1133,\t\t0.075977,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.719597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1134,\t\t0.053684,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.508453,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1135,\t\t1.099594,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.117819,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1136,\t\t0.043002,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.4027,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1137,\t\t0.29536,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.669012,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1138,\t\t0.121487,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.254278,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1139,\t\t3.150265,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.822769,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1140,\t\t3.84557,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.389457,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1141,\t\t23.879534,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.46456,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1142,\t\t0.12545,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.215733,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1143,\t\t4.08877,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.239356,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1144,\t\t10.032202,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.527382,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1145,\t\t44.191144,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t175.889627,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1146,\t\t0.09094,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.861317,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1147,\t\t7.934964,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.703707,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1148,\t\t3.383079,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.645529,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1149,\t\t1.467096,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.556784,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1150,\t\t0.562729,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.62256,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1151,\t\t2.625947,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.036113,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1152,\t\t0.020938,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.116518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1153,\t\t0.007727,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.068788,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1154,\t\t0.018044,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.160625,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1155,\t\t0.120181,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.609451,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1156,\t\t2.566653,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.022334,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1157,\t\t0.875325,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.354147,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1158,\t\t0.167997,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.04304,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1159,\t\t2.172411,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.498087,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1160,\t\t98.284281,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t238.377761,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1161,\t\t1.165963,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.263391,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1162,\t\t89.908237,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t502.409178,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1163,\t\t52.513443,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t330.03194,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1164,\t\t68.248512,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t285.625412,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1165,\t\t12.22226,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.188579,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1166,\t\t15.079399,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.277163,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1167,\t\t0.819103,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.05378,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1168,\t\t0.245367,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.345774,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1169,\t\t0.545579,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.721845,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1170,\t\t0.042191,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.26599,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1171,\t\t0.721447,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.029885,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1172,\t\t0.095682,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.584043,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1173,\t\t64.905646,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t254.253327,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1174,\t\t0.203985,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.260082,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1175,\t\t0.170482,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.855454,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1176,\t\t0.045445,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.23222,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1177,\t\t5.896307,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.87401,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1178,\t\t0.567892,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.167999,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1179,\t\t0.220259,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.306293,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1180,\t\t0.109874,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.688545,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1181,\t\t21.738205,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t85.739557,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1182,\t\t29.161106,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.319579,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1183,\t\t4.583971,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.222575,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1184,\t\t0.581526,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.219005,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1185,\t\t1.811511,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.343971,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1186,\t\t7.78122,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.916368,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1187,\t\t1.75504,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.814574,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1188,\t\t40.216365,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t179.712741,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1189,\t\t3.375902,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.261805,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1190,\t\t41.008871,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t220.533673,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1191,\t\t7.616813,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.079413,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1192,\t\t1.08803,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.454569,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1193,\t\t0.121303,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.399953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1194,\t\t0.435885,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.986036,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1195,\t\t0.030667,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.202359,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1196,\t\t48.964674,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t160.697956,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1197,\t\t32.15246,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.592266,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1198,\t\t8.50883,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.819157,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1199,\t\t55.039676,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t201.421956,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1200,\t\t19.150277,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.012408,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1201,\t\t4.594234,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.166667,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1202,\t\t3.068612,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.89238,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1203,\t\t20.585681,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t182.623256,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1204,\t\t8.72922,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t47.541821,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1205,\t\t0.027035,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.548843,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1206,\t\t0.195396,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.806894,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1207,\t\t0.180717,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.575453,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1208,\t\t0.264959,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.242031,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1209,\t\t0.000769,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.268261,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1210,\t\t0.01203,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.02599,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1211,\t\t2.212413,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.005229,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1212,\t\t9.319203,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t91.171888,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1213,\t\t6.391875,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.342704,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1214,\t\t0.221517,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.505907,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1215,\t\t0.112584,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.252965,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1216,\t\t2.663233,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.754469,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1217,\t\t2.07975,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.871617,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1218,\t\t0.054216,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.980482,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1219,\t\t2.259213,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.33953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1220,\t\t4.677261,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.597849,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1221,\t\t16.790544,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t593.230436,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1222,\t\t21.780368,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t211.057769,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1223,\t\t0.679764,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.806101,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1224,\t\t6.317899,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t160.523778,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1225,\t\t2.275419,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.931481,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1226,\t\t0.385137,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.982858,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1227,\t\t6.014633,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.482807,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1228,\t\t0.477433,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.021367,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1229,\t\t13.159991,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t51.244222,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1230,\t\t0.010813,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.681276,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1231,\t\t1.418744,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.55478,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1232,\t\t4.009549,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t75.075088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1233,\t\t191.837075,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t575.36828,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1235,\t\t6.953981,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.03734,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1236,\t\t62.06292,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.225035,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1237,\t\t1.679968,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.605409,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1238,\t\t7.956983,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t188.691049,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1239,\t\t0.575417,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.267706,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1240,\t\t93.338546,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t339.51051,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1241,\t\t121.89205,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t385.361595,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1242,\t\t1.691698,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.074038,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1243,\t\t7.667532,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.079842,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1244,\t\t84.606464,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t323.472536,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1245,\t\t0.869455,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.080896,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1246,\t\t16.39395,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.127825,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1247,\t\t4.152013,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.833396,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1248,\t\t20.422775,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t91.958275,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1250,\t\t14.517088,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.830519,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1251,\t\t7.4752,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.404345,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1252,\t\t3.160954,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.887727,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1253,\t\t17.039208,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.502694,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1254,\t\t18.33537,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.278695,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1255,\t\t1.149931,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.818419,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1256,\t\t4.644607,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.091842,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1257,\t\t24.971473,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t88.95288,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1258,\t\t56.945747,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t235.487329,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1259,\t\t31.895302,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t109.288719,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1260,\t\t2.185335,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.168717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1261,\t\t11.919691,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t201.699555,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1262,\t\t0.120238,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.524108,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1263,\t\t0.092782,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.352421,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1264,\t\t12.475376,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.035361,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1265,\t\t1.4349,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t6.654727,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1266,\t\t31.444784,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.710849,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1267,\t\t19.701283,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.469006,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1268,\t\t0.098796,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.4295,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1269,\t\t0.167664,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.105829,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1270,\t\t4.173165,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.950511,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1271,\t\t3.37547,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t47.371792,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1272,\t\t0.116349,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.23166,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1273,\t\t0.250267,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.169201,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1274,\t\t27.678774,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t53.095629,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1277,\t\t20.769872,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t65.611252,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1278,\t\t58.974502,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t170.437781,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1280,\t\t0.00321,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.626494,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1281,\t\t0.097784,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.51246,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1282,\t\t0.052895,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.363037,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1283,\t\t879.819682,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1297.764428,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1284,\t\t2.357147,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.426322,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1285,\t\t0.076928,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.937048,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1286,\t\t1.55096,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.872201,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1287,\t\t32.447061,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t93.199628,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1288,\t\t66.934239,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t148.402692,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1289,\t\t20.139717,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t184.149235,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1290,\t\t0.512186,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.901974,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1291,\t\t42.300128,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t98.293351,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1292,\t\t13.119012,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t41.682074,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1293,\t\t0.349861,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.402107,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1294,\t\t0.602846,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.39743,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1295,\t\t0.630469,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.873666,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1296,\t\t2.718991,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.356489,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1297,\t\t20.629249,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t177.778742,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1298,\t\t0.112693,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.014603,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1299,\t\t0.005313,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.158207,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1300,\t\t3.51109,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.74405,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1301,\t\t8.715888,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.863304,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1302,\t\t0.566852,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.877299,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1303,\t\t0.484662,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.335516,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1304,\t\t1.427777,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.594319,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1305,\t\t0.002208,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.004567,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1306,\t\t0.293775,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.827014,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1307,\t\t0.031535,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.29894,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1308,\t\t0.482707,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.278321,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1309,\t\t0.457528,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.34909,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1310,\t\t0.2248,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.64589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1311,\t\t0.414279,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.854004,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1312,\t\t60.831761,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t262.264924,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1313,\t\t4.53936,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.836748,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1314,\t\t1.865074,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.003987,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1315,\t\t1.407182,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.879027,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1316,\t\t0.19579,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.757497,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1317,\t\t4.89723,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.958574,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1318,\t\t0.28057,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.956332,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1319,\t\t3.607103,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.708276,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1320,\t\t2.548622,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.75859,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1321,\t\t0.016604,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.161123,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1322,\t\t0.196077,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.929763,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1323,\t\t49.356062,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t199.111909,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1324,\t\t1.654827,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.063258,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1325,\t\t4.004326,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.497559,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1326,\t\t9.56894,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.928865,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1327,\t\t9.098545,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.796895,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1328,\t\t3.2605,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.063343,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1329,\t\t24.712738,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t218.675424,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1330,\t\t5.159392,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.131028,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1331,\t\t0.047239,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.289238,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1332,\t\t1.510518,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.293088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1333,\t\t8.378229,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.650254,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1334,\t\t0.008389,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.215341,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1335,\t\t0.188721,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.306939,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1336,\t\t2.116941,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.773035,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1337,\t\t30.958678,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t121.31241,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1338,\t\t0.126166,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.832524,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1339,\t\t1.883313,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.086482,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1340,\t\t25.193611,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t70.098327,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1341,\t\t38.903014,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t205.513321,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1342,\t\t0.019749,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.734589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1344,\t\t0.034214,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.226057,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1345,\t\t0.306959,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.971188,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1346,\t\t37.542519,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t214.719215,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1347,\t\t98.72214,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t414.115976,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1348,\t\t2.849716,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.707927,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1349,\t\t6.815717,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t42.352342,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1350,\t\t0.009973,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.094971,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1351,\t\t9.2e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.015958,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1352,\t\t0.009841,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.83726,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1355,\t\t0.244701,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.688324,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1356,\t\t22.439716,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.486231,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1357,\t\t15.388789,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.459913,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1358,\t\t0.036115,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.247293,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1359,\t\t21.262915,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t70.633589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1360,\t\t3.394704,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.135983,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1361,\t\t17.732495,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t63.207173,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1362,\t\t16.073297,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.107216,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1363,\t\t0.00602,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.036158,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1364,\t\t0.008457,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.061068,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1365,\t\t7.4e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.000456,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1366,\t\t0.133289,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.229992,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1367,\t\t3.720232,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t43.863891,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1368,\t\t0.001248,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.298243,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1369,\t\t2.796777,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.968859,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1370,\t\t0.073568,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.343308,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1371,\t\t13.958061,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t81.767208,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1372,\t\t58.068147,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t192.966588,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1373,\t\t9.001398,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.200257,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1374,\t\t38.168009,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t108.220146,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1375,\t\t20.120835,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.223816,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1376,\t\t38.415648,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t176.213655,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1377,\t\t68.061848,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t234.376272,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1378,\t\t55.349369,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t246.029906,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1379,\t\t0.111508,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.805984,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1380,\t\t0.19817,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.213356,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1381,\t\t0.144105,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.01257,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1382,\t\t29.448599,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t138.839906,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1383,\t\t28.8955,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t109.821439,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1384,\t\t0.745611,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.669135,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1385,\t\t0.014025,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.124455,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1386,\t\t0.107733,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.673858,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1387,\t\t0.605268,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.493561,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1388,\t\t0.151595,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.928188,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1389,\t\t0.034875,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.213536,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1390,\t\t0.646719,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.732816,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1391,\t\t0.088853,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.521719,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1392,\t\t3.313362,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.306386,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1393,\t\t0.113612,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.376509,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1394,\t\t0.093541,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.077886,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1395,\t\t0.006965,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.073776,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1396,\t\t0.001937,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.026112,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1397,\t\t4.870087,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.084545,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1398,\t\t0.546287,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.779641,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1399,\t\t3.298148,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.868157,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1400,\t\t0.106677,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.297197,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1401,\t\t16.684143,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.339497,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1402,\t\t5.055552,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.328902,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1403,\t\t30.321741,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.651672,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1404,\t\t33.118783,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t134.800518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1405,\t\t6.021286,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.550802,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1406,\t\t2.414843,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.763987,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1407,\t\t0.033041,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.211614,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1408,\t\t10.584741,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t41.078698,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1409,\t\t2.238485,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.019786,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1410,\t\t5.976191,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t37.466518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1411,\t\t11.591789,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.395367,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1412,\t\t0.593497,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.987601,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1413,\t\t0.49108,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.679791,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1414,\t\t1.592339,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.992489,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1415,\t\t0.536069,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.454501,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1416,\t\t0.587497,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.958002,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1417,\t\t6.9e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.001311,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1418,\t\t13.912878,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t88.264613,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1419,\t\t4.662062,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.260903,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1420,\t\t0.099947,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.399757,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1421,\t\t0.353192,\t\t0,\t\t9999,\t\t-9999,\t\t0.999608,\t\t100,\t\t1,\t\t6.972369,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1422,\t\t0.253275,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.730495,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1423,\t\t0.096661,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.931017,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1424,\t\t135.574548,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t219.092115,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1425,\t\t4.748571,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.366402,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1426,\t\t20.054725,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t68.762602,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1427,\t\t74.662807,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t480.698671,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1428,\t\t39.828118,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t334.885743,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1429,\t\t0.535089,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.279826,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1430,\t\t6e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.034248,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1431,\t\t91.17549,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t227.662022,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1432,\t\t1.348558,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.058931,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1433,\t\t453.2655,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1289.241188,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1434,\t\t32.759997,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.440014,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1435,\t\t15.496544,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t86.713217,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1436,\t\t14.396063,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t98.434116,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1437,\t\t44.163256,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t238.321958,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1438,\t\t99.441767,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t392.815158,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1439,\t\t24.890704,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.103164,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1440,\t\t0.091821,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.833609,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1441,\t\t0.034275,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.171578,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1442,\t\t0.047578,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.715522,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1443,\t\t21.317809,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t103.005076,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1444,\t\t0.487562,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.981696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1445,\t\t1.094903,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.036799,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1446,\t\t161.324387,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t758.547933,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1447,\t\t24.627996,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.477411,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1448,\t\t1.797663,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.523578,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1449,\t\t19.387572,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t95.437673,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1450,\t\t14.706152,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t59.256809,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1451,\t\t17.01524,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t68.198838,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1452,\t\t5.607337,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.068921,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1453,\t\t12.11857,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.93775,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1454,\t\t22.601402,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.126607,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1455,\t\t0.129074,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.654438,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1456,\t\t11.049496,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.054822,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1457,\t\t0.327084,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.002672,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1458,\t\t0.04021,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.246199,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1459,\t\t0.436561,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.309059,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1460,\t\t3.811911,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t101.498473,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1461,\t\t2.858467,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.951737,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1462,\t\t0.383683,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.402686,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1463,\t\t0.074978,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.711207,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1464,\t\t13.388959,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t218.884211,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1465,\t\t0.855786,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.299939,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1466,\t\t1.145169,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.685017,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1467,\t\t0.353676,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.096155,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1468,\t\t3.720876,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.789171,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1469,\t\t11.381529,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t65.007467,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1470,\t\t18.500305,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t78.965265,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1471,\t\t36.36694,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t159.165074,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1472,\t\t2.182928,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.980182,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1473,\t\t1.357384,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.362608,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1474,\t\t0.280168,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.398948,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1475,\t\t0.077898,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.39088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1476,\t\t56.723376,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t250.480113,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1477,\t\t2.086897,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.122974,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1479,\t\t0.309757,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.592606,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1480,\t\t0.96417,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.681964,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1481,\t\t0.008038,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.053146,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1482,\t\t2.322416,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.51083,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1483,\t\t0.606951,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.599649,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1484,\t\t0.005476,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.02991,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1485,\t\t0.103178,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.563547,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1486,\t\t0.530833,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.89934,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1487,\t\t0.168631,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.142917,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1488,\t\t0.004706,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.569856,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1489,\t\t0.012558,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.118938,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1490,\t\t298.613902,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t782.463701,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1491,\t\t40.218141,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.622838,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1492,\t\t92.172551,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t229.927503,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1493,\t\t43.467116,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.557175,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1494,\t\t117.130613,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t404.486733,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1495,\t\t7.844757,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.920717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1496,\t\t1.8e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.000282,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1497,\t\t16.551791,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.070006,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1498,\t\t31.194747,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t105.800802,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1499,\t\t0.161841,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.286676,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1500,\t\t0.049612,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.154817,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1501,\t\t1.12898,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.165333,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1502,\t\t0.011233,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.938928,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1503,\t\t6.060497,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.972187,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1504,\t\t37.419159,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t188.822836,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1505,\t\t2.502294,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.765913,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1506,\t\t5.256361,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.406717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1507,\t\t1.352479,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.438042,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1508,\t\t0.011655,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.065259,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1510,\t\t11.151628,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t107.008141,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1511,\t\t33.261342,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.22192,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1512,\t\t3.833361,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.130052,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1513,\t\t1.554979,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.051786,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1516,\t\t0.002696,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.02881,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1517,\t\t0.19223,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.286804,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1518,\t\t0.092424,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.670542,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1519,\t\t0.006415,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.04654,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n\t])\n\tppc[\"branch\"] = array([\n\t\t[586,\t\t1,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[589,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[590,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[593,\t\t112,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[594,\t\t114,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[595,\t\t115,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[598,\t\t118,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[599,\t\t119,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[601,\t\t119,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[602,\t\t121,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[603,\t\t526,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[607,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[608,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[609,\t\t529,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[612,\t\t493,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[613,\t\t130,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[614,\t\t130,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[616,\t\t132,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[617,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[618,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[619,\t\t134,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[621,\t\t136,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[624,\t\t14,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[629,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[632,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[637,\t\t148,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[638,\t\t149,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[640,\t\t153,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[641,\t\t155,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[642,\t\t533,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[643,\t\t534,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[647,\t\t536,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[650,\t\t166,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[652,\t\t167,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[655,\t\t170,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[661,\t\t177,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[663,\t\t178,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[666,\t\t180,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[668,\t\t183,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[670,\t\t183,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[672,\t\t185,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[676,\t\t19,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[681,\t\t197,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[683,\t\t200,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[687,\t\t202,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[691,\t\t209,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[693,\t\t21,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[694,\t\t21,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[695,\t\t210,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[696,\t\t211,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[697,\t\t211,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[698,\t\t212,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[702,\t\t215,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[704,\t\t217,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[705,\t\t217,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[707,\t\t219,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[713,\t\t225,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[714,\t\t225,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[716,\t\t226,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[717,\t\t227,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[719,\t\t229,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[722,\t\t545,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[724,\t\t238,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[727,\t\t243,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[728,\t\t244,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[730,\t\t547,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[732,\t\t247,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[735,\t\t253,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[738,\t\t258,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[741,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[742,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[743,\t\t500,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[746,\t\t273,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[747,\t\t273,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[748,\t\t274,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[749,\t\t274,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[750,\t\t557,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[753,\t\t28,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[758,\t\t286,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[760,\t\t287,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[762,\t\t289,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[763,\t\t560,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[765,\t\t560,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[767,\t\t292,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[769,\t\t293,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[771,\t\t297,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[772,\t\t3,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[774,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[777,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[778,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[781,\t\t303,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[784,\t\t563,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[785,\t\t501,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[787,\t\t308,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[788,\t\t311,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[789,\t\t565,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[791,\t\t314,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[792,\t\t316,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[795,\t\t319,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[800,\t\t326,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[801,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[802,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[805,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[806,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[808,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[809,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[811,\t\t568,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[814,\t\t570,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[816,\t\t335,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[817,\t\t571,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[821,\t\t338,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[822,\t\t339,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[826,\t\t339,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[830,\t\t345,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[834,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[835,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[836,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[837,\t\t350,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[839,\t\t350,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[841,\t\t573,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[843,\t\t352,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[844,\t\t352,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[845,\t\t356,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[849,\t\t574,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[850,\t\t574,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[851,\t\t575,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[853,\t\t362,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[855,\t\t363,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[856,\t\t363,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[857,\t\t365,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[858,\t\t368,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[860,\t\t371,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[862,\t\t372,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[863,\t\t374,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[864,\t\t374,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[865,\t\t375,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[867,\t\t376,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[869,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[870,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[872,\t\t378,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[874,\t\t576,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[875,\t\t381,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[877,\t\t578,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[882,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[883,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[885,\t\t393,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[886,\t\t394,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[889,\t\t397,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[890,\t\t40,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[893,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[894,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[895,\t\t580,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[896,\t\t581,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[898,\t\t403,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[900,\t\t405,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[902,\t\t405,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[903,\t\t406,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[905,\t\t413,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[906,\t\t414,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[907,\t\t583,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[909,\t\t417,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[913,\t\t422,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[915,\t\t423,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[917,\t\t43,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[918,\t\t424,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[920,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[921,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[922,\t\t429,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[923,\t\t432,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[925,\t\t44,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[928,\t\t435,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[931,\t\t439,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[935,\t\t45,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[936,\t\t445,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[937,\t\t447,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[939,\t\t450,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[940,\t\t451,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[942,\t\t458,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[944,\t\t458,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[945,\t\t459,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[950,\t\t462,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[952,\t\t47,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[958,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[959,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[960,\t\t479,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[963,\t\t481,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[965,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[966,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[967,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[968,\t\t486,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[969,\t\t486,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[971,\t\t51,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[973,\t\t506,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[976,\t\t58,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[978,\t\t491,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[980,\t\t508,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[981,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[982,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[983,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[984,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[985,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[986,\t\t64,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[987,\t\t65,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[988,\t\t66,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[993,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[994,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[995,\t\t509,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[997,\t\t510,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[999,\t\t70,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1000,\t\t71,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1002,\t\t71,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1003,\t\t72,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1007,\t\t511,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1008,\t\t75,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1010,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1011,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1012,\t\t81,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1014,\t\t83,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1026,\t\t518,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1027,\t\t218,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1028,\t\t221,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1029,\t\t268,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1030,\t\t269,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1031,\t\t498,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1032,\t\t1,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1033,\t\t3,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1034,\t\t4,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1035,\t\t6,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1036,\t\t7,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1037,\t\t8,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1038,\t\t9,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1039,\t\t11,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1040,\t\t14,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1041,\t\t16,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1042,\t\t17,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1043,\t\t19,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1044,\t\t21,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1045,\t\t23,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1046,\t\t25,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1047,\t\t27,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1048,\t\t28,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1049,\t\t29,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1050,\t\t31,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1051,\t\t33,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1052,\t\t34,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1053,\t\t35,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1054,\t\t36,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1055,\t\t38,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1056,\t\t39,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1057,\t\t40,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1058,\t\t41,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1059,\t\t43,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1060,\t\t44,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1061,\t\t45,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1062,\t\t47,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1063,\t\t48,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1064,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1065,\t\t50,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1066,\t\t51,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1067,\t\t53,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1068,\t\t54,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1069,\t\t55,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1070,\t\t57,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1071,\t\t58,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1072,\t\t59,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1073,\t\t60,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1074,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1075,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1076,\t\t64,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1077,\t\t65,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1078,\t\t66,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1079,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1080,\t\t70,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1081,\t\t71,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1082,\t\t72,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1083,\t\t73,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1084,\t\t75,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1085,\t\t76,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1086,\t\t77,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1087,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1088,\t\t80,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1089,\t\t81,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1090,\t\t82,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1091,\t\t83,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1092,\t\t84,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1093,\t\t85,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1094,\t\t88,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1095,\t\t89,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1096,\t\t90,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1097,\t\t91,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1098,\t\t92,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1099,\t\t93,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1100,\t\t97,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1101,\t\t98,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1102,\t\t101,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1103,\t\t102,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1104,\t\t103,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1105,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1106,\t\t109,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1107,\t\t110,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1108,\t\t111,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1109,\t\t112,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1110,\t\t113,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1111,\t\t114,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1112,\t\t115,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1113,\t\t116,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1114,\t\t118,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1115,\t\t119,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1116,\t\t121,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1117,\t\t122,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1118,\t\t126,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1119,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1120,\t\t130,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1121,\t\t131,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1122,\t\t132,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1123,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1124,\t\t134,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1125,\t\t135,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1126,\t\t136,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1127,\t\t137,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1128,\t\t139,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1129,\t\t140,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1130,\t\t141,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1131,\t\t142,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1132,\t\t144,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1133,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1134,\t\t146,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1135,\t\t147,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1136,\t\t148,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1137,\t\t149,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1138,\t\t150,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1139,\t\t151,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1140,\t\t152,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1141,\t\t153,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1142,\t\t154,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1143,\t\t155,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1144,\t\t158,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1145,\t\t161,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1146,\t\t162,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1147,\t\t163,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1148,\t\t164,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1149,\t\t166,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1150,\t\t167,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1151,\t\t168,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1152,\t\t169,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1153,\t\t170,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1154,\t\t171,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1155,\t\t172,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1156,\t\t173,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1157,\t\t174,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1158,\t\t175,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1159,\t\t176,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1160,\t\t177,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1161,\t\t178,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1162,\t\t179,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1163,\t\t180,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1164,\t\t181,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1165,\t\t182,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1166,\t\t183,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1167,\t\t185,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1168,\t\t186,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1169,\t\t187,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1170,\t\t188,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1171,\t\t189,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1172,\t\t190,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1173,\t\t192,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1174,\t\t193,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1175,\t\t194,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1176,\t\t196,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1177,\t\t197,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1178,\t\t198,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1179,\t\t199,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1180,\t\t200,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1181,\t\t202,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1182,\t\t203,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1183,\t\t204,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1184,\t\t205,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1185,\t\t206,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1186,\t\t207,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1187,\t\t208,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1188,\t\t209,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1189,\t\t210,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1190,\t\t211,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1191,\t\t212,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1192,\t\t213,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1193,\t\t214,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1194,\t\t215,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1195,\t\t216,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1196,\t\t217,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1197,\t\t218,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1198,\t\t219,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1199,\t\t221,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1200,\t\t222,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1201,\t\t223,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1202,\t\t224,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1203,\t\t225,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1204,\t\t226,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1205,\t\t227,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1206,\t\t228,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1207,\t\t229,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1208,\t\t230,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1209,\t\t234,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1210,\t\t235,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1211,\t\t237,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1212,\t\t238,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1213,\t\t239,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1214,\t\t240,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1215,\t\t241,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1216,\t\t242,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1217,\t\t243,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1218,\t\t244,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1219,\t\t247,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1220,\t\t251,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1221,\t\t252,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1222,\t\t253,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1223,\t\t254,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1224,\t\t255,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1225,\t\t256,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1226,\t\t257,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1227,\t\t258,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1228,\t\t260,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1229,\t\t263,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1230,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1231,\t\t266,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1232,\t\t267,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1233,\t\t268,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1235,\t\t271,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1236,\t\t272,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1237,\t\t273,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1238,\t\t274,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1239,\t\t275,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1240,\t\t276,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1241,\t\t278,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1242,\t\t281,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1243,\t\t282,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1244,\t\t283,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1245,\t\t284,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1246,\t\t285,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1247,\t\t286,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1248,\t\t287,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1250,\t\t289,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1251,\t\t291,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1252,\t\t292,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1253,\t\t293,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1254,\t\t294,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1255,\t\t295,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1256,\t\t296,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1257,\t\t297,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1258,\t\t298,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1259,\t\t299,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1260,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1261,\t\t302,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1262,\t\t303,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1263,\t\t304,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1264,\t\t307,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1265,\t\t308,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1266,\t\t309,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1267,\t\t311,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1268,\t\t312,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1269,\t\t314,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1270,\t\t316,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1271,\t\t317,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1272,\t\t318,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1273,\t\t319,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1274,\t\t321,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1277,\t\t324,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1278,\t\t325,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1280,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1281,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1282,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1283,\t\t331,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1284,\t\t333,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1285,\t\t335,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1286,\t\t337,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1287,\t\t338,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1288,\t\t339,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1289,\t\t340,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1290,\t\t341,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1291,\t\t342,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1292,\t\t343,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1293,\t\t344,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1294,\t\t345,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1295,\t\t346,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1296,\t\t347,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1297,\t\t348,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1298,\t\t350,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1299,\t\t352,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1300,\t\t353,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1301,\t\t354,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1302,\t\t355,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1303,\t\t356,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1304,\t\t357,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1305,\t\t359,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1306,\t\t361,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1307,\t\t362,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1308,\t\t363,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1309,\t\t364,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1310,\t\t365,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1311,\t\t366,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1312,\t\t367,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1313,\t\t368,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1314,\t\t369,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1315,\t\t370,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1316,\t\t371,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1317,\t\t372,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1318,\t\t373,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1319,\t\t374,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1320,\t\t375,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1321,\t\t376,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1322,\t\t377,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1323,\t\t378,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1324,\t\t379,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1325,\t\t381,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1326,\t\t384,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1327,\t\t385,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1328,\t\t386,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1329,\t\t387,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1330,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1331,\t\t390,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1332,\t\t391,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1333,\t\t392,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1334,\t\t393,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1335,\t\t394,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1336,\t\t395,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1337,\t\t396,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1338,\t\t397,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1339,\t\t398,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1340,\t\t399,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1341,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1342,\t\t403,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1344,\t\t405,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1345,\t\t406,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1346,\t\t407,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1347,\t\t408,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1348,\t\t410,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1349,\t\t411,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1350,\t\t412,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1351,\t\t413,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1352,\t\t414,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1355,\t\t418,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1356,\t\t419,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1357,\t\t420,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1358,\t\t421,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1359,\t\t422,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1360,\t\t423,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1361,\t\t424,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1362,\t\t425,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1363,\t\t426,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1364,\t\t427,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1365,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1366,\t\t429,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1367,\t\t430,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1368,\t\t431,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1369,\t\t432,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1370,\t\t433,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1371,\t\t434,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1372,\t\t435,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1373,\t\t436,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1374,\t\t437,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1375,\t\t438,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1376,\t\t439,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1377,\t\t440,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1378,\t\t441,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1379,\t\t442,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1380,\t\t443,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1381,\t\t445,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1382,\t\t446,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1383,\t\t447,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1384,\t\t448,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1385,\t\t449,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1386,\t\t450,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1387,\t\t451,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1388,\t\t453,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1389,\t\t454,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1390,\t\t455,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1391,\t\t456,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1392,\t\t457,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1393,\t\t458,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1394,\t\t459,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1395,\t\t460,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1396,\t\t461,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1397,\t\t462,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1398,\t\t463,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1399,\t\t464,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1400,\t\t465,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1401,\t\t466,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1402,\t\t467,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1403,\t\t468,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1404,\t\t469,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1405,\t\t470,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1406,\t\t471,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1407,\t\t472,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1408,\t\t473,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1409,\t\t474,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1410,\t\t475,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1411,\t\t476,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1412,\t\t477,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1413,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1414,\t\t479,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1415,\t\t480,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1416,\t\t481,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1417,\t\t482,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1418,\t\t483,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1419,\t\t484,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1420,\t\t485,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1421,\t\t486,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1422,\t\t487,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1423,\t\t488,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1424,\t\t489,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1425,\t\t490,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1426,\t\t491,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1427,\t\t492,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1428,\t\t493,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1429,\t\t494,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1430,\t\t495,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1431,\t\t496,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1432,\t\t497,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1433,\t\t498,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1434,\t\t499,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1435,\t\t500,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1436,\t\t501,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1437,\t\t502,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1438,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1439,\t\t504,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1440,\t\t505,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1441,\t\t506,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1442,\t\t507,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1443,\t\t508,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1444,\t\t509,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1445,\t\t510,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1446,\t\t511,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1447,\t\t512,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1448,\t\t513,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1449,\t\t514,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1450,\t\t515,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1451,\t\t516,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1452,\t\t517,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1453,\t\t518,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1454,\t\t519,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1455,\t\t520,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1456,\t\t521,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1457,\t\t522,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1458,\t\t523,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1459,\t\t524,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1460,\t\t525,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1461,\t\t526,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1462,\t\t527,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1463,\t\t528,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1464,\t\t529,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1465,\t\t530,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1466,\t\t531,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1467,\t\t532,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1468,\t\t533,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1469,\t\t534,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1470,\t\t535,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1471,\t\t536,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1472,\t\t537,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1473,\t\t538,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1474,\t\t539,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1475,\t\t540,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1476,\t\t541,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1477,\t\t542,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1479,\t\t544,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1480,\t\t545,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1481,\t\t546,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1482,\t\t547,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1483,\t\t548,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1484,\t\t549,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1485,\t\t550,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1486,\t\t551,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1487,\t\t552,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1488,\t\t554,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1489,\t\t555,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1490,\t\t556,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1491,\t\t557,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1492,\t\t558,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1493,\t\t559,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1494,\t\t560,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1495,\t\t561,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1496,\t\t562,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1497,\t\t563,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1498,\t\t564,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1499,\t\t565,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1500,\t\t566,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1501,\t\t567,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1502,\t\t568,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1503,\t\t569,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1504,\t\t570,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1505,\t\t571,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1506,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1507,\t\t573,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1508,\t\t574,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1510,\t\t576,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1511,\t\t577,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1512,\t\t578,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1513,\t\t579,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1516,\t\t582,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1517,\t\t583,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1518,\t\t584,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1519,\t\t585,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1,\t\t490,\t\t0,\t\t0.01433884297520661,\t\t0.151691958358336,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t43.375\t\t],\n\t\t[3,\t\t4,\t\t0,\t\t0.006291637811634348,\t\t0.903417549506624,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t72.681\t\t],\n\t\t[491,\t\t6,\t\t0,\t\t0.011200661157024791,\t\t0.118492839955776,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.882\t\t],\n\t\t[7,\t\t5,\t\t0,\t\t0.005794840720221606,\t\t0.20802058859584005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.471\t\t],\n\t\t[8,\t\t9,\t\t0,\t\t0.0024379328254847646,\t\t0.350063268897336,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.163\t\t],\n\t\t[492,\t\t11,\t\t0,\t\t0.018224793388429753,\t\t0.0482004476327704,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.565\t\t],\n\t\t[11,\t\t493,\t\t0,\t\t0.030286942148760328,\t\t0.08010209706571599,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.809\t\t],\n\t\t[492,\t\t493,\t\t0,\t\t0.04521652892561983,\t\t0.11958747011094399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t68.39\t\t],\n\t\t[494,\t\t14,\t\t0,\t\t0.012990743801652892,\t\t0.137430291356512,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.297\t\t],\n\t\t[13,\t\t15,\t\t0,\t\t0.007681959833795014,\t\t0.27576354266704156,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.371\t\t],\n\t\t[16,\t\t5,\t\t0,\t\t0.006275623268698061,\t\t0.22527950450957998,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t36.248000000000005\t\t],\n\t\t[17,\t\t18,\t\t0,\t\t0.04623522622347646,\t\t0.9335989000302801,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t200.291\t\t],\n\t\t[17,\t\t12,\t\t0,\t\t0.0056020313942728535,\t\t0.113118303398186,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.268\t\t],\n\t\t[14,\t\t495,\t\t0,\t\t0.0017957024793388433,\t\t0.018996904156819597,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.432\t\t],\n\t\t[494,\t\t19,\t\t0,\t\t0.010246611570247935,\t\t0.10839986031771602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.996\t\t],\n\t\t[20,\t\t21,\t\t0,\t\t0.005415685595567867,\t\t0.19440984828307922,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t31.281\t\t],\n\t\t[20,\t\t22,\t\t0,\t\t0.0049706544321329645,\t\t0.713737278110032,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.42100000000001\t\t],\n\t\t[497,\t\t23,\t\t0,\t\t0.002190413223140496,\t\t0.005793146490362,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.313\t\t],\n\t\t[23,\t\t499,\t\t0,\t\t0.020799669421487598,\t\t0.22004164444829602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.919\t\t],\n\t\t[25,\t\t26,\t\t0,\t\t0.00141845567867036,\t\t0.050919084651523595,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.193\t\t],\n\t\t[25,\t\t22,\t\t0,\t\t0.0035578254847645433,\t\t0.0319293051869808,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.275\t\t],\n\t\t[23,\t\t27,\t\t0,\t\t0.027738181818181818,\t\t0.073361203699828,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.95399999999999\t\t],\n\t\t[28,\t\t23,\t\t0,\t\t0.012841652892561981,\t\t0.0339632611780132,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.423\t\t],\n\t\t[8,\t\t21,\t\t0,\t\t0.004948753462603878,\t\t0.17764812836304802,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t28.584\t\t],\n\t\t[9,\t\t29,\t\t0,\t\t0.002212863573407202,\t\t0.31774552934092004,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t25.563000000000002\t\t],\n\t\t[30,\t\t25,\t\t0,\t\t0.019958795013850415,\t\t0.17911796401827998,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t57.641000000000005\t\t],\n\t\t[31,\t\t32,\t\t0,\t\t0.0299776084949446,\t\t0.605319030583196,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t129.863\t\t],\n\t\t[32,\t\t33,\t\t0,\t\t0.016762234533725762,\t\t0.33846927983213604,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.61399999999999\t\t],\n\t\t[34,\t\t35,\t\t0,\t\t0.001931900826446281,\t\t0.020437759184893597,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t5.843999999999999\t\t],\n\t\t[35,\t\t36,\t\t0,\t\t0.0008730578512396695,\t\t0.0092361605077588,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.641\t\t],\n\t\t[490,\t\t6,\t\t0,\t\t0.049352066115702475,\t\t0.130525028606764,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.645\t\t],\n\t\t[37,\t\t10,\t\t0,\t\t0.02404639889196676,\t\t0.485553838251812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t104.169\t\t],\n\t\t[10,\t\t38,\t\t0,\t\t0.006848799630657894,\t\t0.13829351176534158,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.669\t\t],\n\t\t[37,\t\t38,\t\t0,\t\t0.01437834718372576,\t\t1.1613317560186958,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t124.574\t\t],\n\t\t[39,\t\t40,\t\t0,\t\t0.04521629732222991,\t\t0.913024308337812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t195.877\t\t],\n\t\t[39,\t\t41,\t\t0,\t\t0.017466989843005543,\t\t0.35269996139852006,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t75.667\t\t],\n\t\t[42,\t\t41,\t\t0,\t\t0.031145429362880884,\t\t0.6289001042979919,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t134.922\t\t],\n\t\t[18,\t\t42,\t\t0,\t\t0.03439750692520776,\t\t0.6945672650962679,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t149.01\t\t],\n\t\t[492,\t\t43,\t\t0,\t\t0.01819173553719008,\t\t0.192452068436848,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t55.03\t\t],\n\t\t[44,\t\t45,\t\t0,\t\t0.02562314049586777,\t\t0.067767398802972,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.755\t\t],\n\t\t[44,\t\t505,\t\t0,\t\t0.006061487603305785,\t\t0.0160312607980052,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.168\t\t],\n\t\t[46,\t\t12,\t\t0,\t\t0.0014741170360110802,\t\t0.2116687641962416,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.029\t\t],\n\t\t[47,\t\t48,\t\t0,\t\t0.005344182825484765,\t\t0.01199019212302604,\t\t428.0,\t\t428.0,\t\t428.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.7170000000000005\t\t],\n\t\t[49,\t\t50,\t\t0,\t\t0.0019151662049861494,\t\t0.0171874439892256,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.531000000000001\t\t],\n\t\t[31,\t\t33,\t\t0,\t\t0.013475992613088641,\t\t0.27211225959163604,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t58.378\t\t],\n\t\t[31,\t\t51,\t\t0,\t\t0.003518611495844875,\t\t0.5052381383693519,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.647\t\t],\n\t\t[52,\t\t53,\t\t0,\t\t0.010464421745152355,\t\t1.5025884408875438,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t120.885\t\t],\n\t\t[52,\t\t54,\t\t0,\t\t0.0076126500461911354,\t\t0.1537174637168,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.978\t\t],\n\t\t[506,\t\t55,\t\t0,\t\t0.012634380165289257,\t\t0.133660287181212,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.219\t\t],\n\t\t[506,\t\t507,\t\t0,\t\t0.044157355371900825,\t\t0.11678619613628,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t66.788\t\t],\n\t\t[57,\t\t506,\t\t0,\t\t0.004687272727272727,\t\t0.049587095736244,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.179\t\t],\n\t\t[57,\t\t58,\t\t0,\t\t0.014436363636363634,\t\t0.0381809096340232,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.835\t\t],\n\t\t[58,\t\t506,\t\t0,\t\t0.019797685950413223,\t\t0.052360391943288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.944000000000003\t\t],\n\t\t[59,\t\t60,\t\t0,\t\t0.019407548476454296,\t\t0.174170863885556,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.049\t\t],\n\t\t[508,\t\t62,\t\t0,\t\t0.051111404958677685,\t\t0.03379452026753001,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.653\t\t],\n\t\t[30,\t\t61,\t\t0,\t\t0.03143698060941828,\t\t0.28212765137935203,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t90.79\t\t],\n\t\t[63,\t\t506,\t\t0,\t\t0.027457190082644623,\t\t0.072618044249872,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.528999999999996\t\t],\n\t\t[13,\t\t64,\t\t0,\t\t0.0014816481994459833,\t\t0.2127501654814608,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.116\t\t],\n\t\t[65,\t\t66,\t\t0,\t\t0.03778185595567867,\t\t0.7629053006222161,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t163.671\t\t],\n\t\t[59,\t\t67,\t\t0,\t\t0.0051880193905817175,\t\t0.046559297286324804,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.982999999999999\t\t],\n\t\t[61,\t\t67,\t\t0,\t\t0.012931440443213295,\t\t0.1160517597580644,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.346\t\t],\n\t\t[68,\t\t69,\t\t0,\t\t0.011149584487534626,\t\t0.4002427745096039,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.4\t\t],\n\t\t[70,\t\t69,\t\t0,\t\t0.009625346260387812,\t\t0.345526355460808,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.596000000000004\t\t],\n\t\t[71,\t\t72,\t\t0,\t\t0.008878635734072021,\t\t0.318721276477736,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.283\t\t],\n\t\t[73,\t\t74,\t\t0,\t\t0.012529547553116345,\t\t0.253001288604392,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t54.278\t\t],\n\t\t[37,\t\t75,\t\t0,\t\t0.027459141274238225,\t\t0.5544652029066119,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t118.95299999999999\t\t],\n\t\t[72,\t\t75,\t\t0,\t\t0.006688711911357341,\t\t0.240108375006292,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.634\t\t],\n\t\t[37,\t\t72,\t\t0,\t\t0.036222068328739615,\t\t0.7314094881920841,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t156.914\t\t],\n\t\t[76,\t\t77,\t\t0,\t\t0.004683777700831025,\t\t0.6725445900750401,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t54.107\t\t],\n\t\t[77,\t\t51,\t\t0,\t\t0.00363183864265928,\t\t0.5214964473447999,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t41.955\t\t],\n\t\t[73,\t\t72,\t\t0,\t\t0.025475069252077563,\t\t0.514402082018968,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.35799999999999\t\t],\n\t\t[18,\t\t40,\t\t0,\t\t0.01302770083102493,\t\t0.26306018504072,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.43600000000001\t\t],\n\t\t[492,\t\t45,\t\t0,\t\t0.0308703030303719,\t\t0.18370114733484796,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t70.03699999999999\t\t],\n\t\t[10,\t\t74,\t\t0,\t\t0.030167359187465374,\t\t0.609150547206812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t130.685\t\t],\n\t\t[45,\t\t511,\t\t0,\t\t0.08203371900826446,\t\t0.05424014819960001,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.038000000000004\t\t],\n\t\t[78,\t\t32,\t\t0,\t\t0.013458795013850415,\t\t0.48313777647302397,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.738\t\t],\n\t\t[79,\t\t80,\t\t0,\t\t0.0038086911357340715,\t\t0.1367226831743568,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.999000000000002\t\t],\n\t\t[81,\t\t79,\t\t0,\t\t0.010767832409972299,\t\t0.3865388099484561,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t62.195\t\t],\n\t\t[34,\t\t82,\t\t0,\t\t0.0015497520661157025,\t\t0.00409874294399768,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.344\t\t],\n\t\t[83,\t\t84,\t\t0,\t\t0.00902611570247934,\t\t0.0238720301499152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.652000000000001\t\t],\n\t\t[83,\t\t499,\t\t0,\t\t0.04179570247933885,\t\t0.0276350398834796,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.608\t\t],\n\t\t[85,\t\t86,\t\t0,\t\t0.00802354570637119,\t\t0.28802563884886,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.343999999999994\t\t],\n\t\t[87,\t\t86,\t\t0,\t\t0.01904968836565097,\t\t0.683837154069184,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.031\t\t],\n\t\t[88,\t\t89,\t\t0,\t\t0.00380297520661157,\t\t0.010058007429140002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.752000000000001\t\t],\n\t\t[90,\t\t86,\t\t0,\t\t0.012097818559556786,\t\t0.434282055192244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t69.877\t\t],\n\t\t[91,\t\t86,\t\t0,\t\t9.26246537396122e-05,\t\t0.013299992817559201,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.07\t\t],\n\t\t[86,\t\t92,\t\t0,\t\t0.0001852493074792244,\t\t0.0066499964087796005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.07\t\t],\n\t\t[86,\t\t93,\t\t0,\t\t0.008152181440443215,\t\t0.292643346635492,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.086999999999996\t\t],\n\t\t[94,\t\t86,\t\t0,\t\t0.012883829639889197,\t\t0.46249792780547194,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.417\t\t],\n\t\t[86,\t\t95,\t\t0,\t\t0.010421052631578947,\t\t0.37409026526870803,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t60.192\t\t],\n\t\t[513,\t\t517,\t\t0,\t\t0.0008733884297520661,\t\t0.0023099144321748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.321\t\t],\n\t\t[97,\t\t66,\t\t0,\t\t0.03812777008310249,\t\t0.34217338998058805,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.113\t\t],\n\t\t[42,\t\t98,\t\t0,\t\t0.003091759002770083,\t\t0.44394630230884,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t35.716\t\t],\n\t\t[99,\t\t100,\t\t0,\t\t0.016371537396121884,\t\t0.587698093837988,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t94.56200000000001\t\t],\n\t\t[42,\t\t101,\t\t0,\t\t0.008165339335180054,\t\t0.29311568282888,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.163000000000004\t\t],\n\t\t[102,\t\t42,\t\t0,\t\t0.012403047091412742,\t\t0.44523901189173193,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t71.64\t\t],\n\t\t[103,\t\t87,\t\t0,\t\t0.007073060941828254,\t\t0.25390556381756,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.854\t\t],\n\t\t[104,\t\t103,\t\t0,\t\t0.0028852146814404432,\t\t0.1035721403291428,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.665\t\t],\n\t\t[105,\t\t87,\t\t0,\t\t0.006406682825484765,\t\t0.22998422159488002,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.005\t\t],\n\t\t[106,\t\t107,\t\t0,\t\t0.005714219759923823,\t\t0.11538365264216799,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.754\t\t],\n\t\t[108,\t\t107,\t\t0,\t\t0.0025427631578947367,\t\t0.09127896939786201,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.687000000000001\t\t],\n\t\t[109,\t\t106,\t\t0,\t\t0.003030470914127424,\t\t0.10878648330773438,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.504\t\t],\n\t\t[110,\t\t111,\t\t0,\t\t0.019821849030470913,\t\t0.7115558306889919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.491\t\t],\n\t\t[87,\t\t112,\t\t0,\t\t0.006135907202216068,\t\t0.220264039928212,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.441\t\t],\n\t\t[113,\t\t87,\t\t0,\t\t0.003981648199445983,\t\t0.14293141813921081,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.998\t\t],\n\t\t[87,\t\t85,\t\t0,\t\t0.011046225761772853,\t\t0.3965324494097,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.803000000000004\t\t],\n\t\t[110,\t\t114,\t\t0,\t\t0.011665339335180056,\t\t0.418757110306188,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.37899999999999\t\t],\n\t\t[115,\t\t116,\t\t0,\t\t0.007048925619834712,\t\t0.07457124214588401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.323\t\t],\n\t\t[117,\t\t118,\t\t0,\t\t0.005987534626038782,\t\t0.21493782785077598,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.584\t\t],\n\t\t[117,\t\t119,\t\t0,\t\t0.0038738746537396117,\t\t0.5562504472696961,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.751000000000005\t\t],\n\t\t[117,\t\t120,\t\t0,\t\t0.005886686288088643,\t\t0.8452704781039522,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t68.003\t\t],\n\t\t[121,\t\t122,\t\t0,\t\t0.0021170360110803325,\t\t0.0759964075574972,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.228\t\t],\n\t\t[123,\t\t124,\t\t0,\t\t0.0018386426592797783,\t\t0.0660027680945204,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.62\t\t],\n\t\t[125,\t\t126,\t\t0,\t\t0.004941135734072022,\t\t0.17737467056702802,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.54\t\t],\n\t\t[127,\t\t119,\t\t0,\t\t0.0029027008310249305,\t\t0.1041998502705648,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.766\t\t],\n\t\t[118,\t\t128,\t\t0,\t\t0.007397160664819945,\t\t0.265539950057812,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.726000000000006\t\t],\n\t\t[121,\t\t119,\t\t0,\t\t0.002552458448753463,\t\t0.0916270065931116,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.743\t\t],\n\t\t[530,\t\t527,\t\t0,\t\t0.022726611570247933,\t\t0.060106736329903994,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.374\t\t],\n\t\t[125,\t\t130,\t\t0,\t\t0.002931440443213297,\t\t0.105231531956442,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.932000000000002\t\t],\n\t\t[125,\t\t123,\t\t0,\t\t0.0019078081717451524,\t\t0.2739425623421336,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t22.039\t\t],\n\t\t[131,\t\t132,\t\t0,\t\t0.0035744459833795014,\t\t0.12831385593973843,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.646\t\t],\n\t\t[133,\t\t123,\t\t0,\t\t0.003864439058171745,\t\t0.13872389704704202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.320999999999998\t\t],\n\t\t[524,\t\t134,\t\t0,\t\t0.008092231404958678,\t\t0.08560847143881999,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.479\t\t],\n\t\t[135,\t\t136,\t\t0,\t\t0.005242901662049862,\t\t0.1882073282678,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.283\t\t],\n\t\t[123,\t\t131,\t\t0,\t\t0.003138331024930748,\t\t0.1126583971045252,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.127\t\t],\n\t\t[117,\t\t128,\t\t0,\t\t0.010800034626038782,\t\t0.38769479063117196,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.381\t\t],\n\t\t[137,\t\t521,\t\t0,\t\t0.013832396694214875,\t\t0.14633421587532003,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t41.843\t\t],\n\t\t[531,\t\t514,\t\t0,\t\t0.0059504132231404955,\t\t0.035409362037522,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.5\t\t],\n\t\t[139,\t\t521,\t\t0,\t\t0.021257520661157023,\t\t0.05622132386323199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.152\t\t],\n\t\t[140,\t\t514,\t\t0,\t\t0.018527603305785127,\t\t0.04900131122836401,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.023000000000003\t\t],\n\t\t[522,\t\t141,\t\t0,\t\t0.012168595041322314,\t\t0.032183175718526795,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.405\t\t],\n\t\t[142,\t\t523,\t\t0,\t\t0.007060165289256198,\t\t0.0746901476577608,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.357\t\t],\n\t\t[530,\t\t526,\t\t0,\t\t0.020281652892561983,\t\t0.053640374808152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.676\t\t],\n\t\t[140,\t\t532,\t\t0,\t\t0.004669090909090909,\t\t0.0123486871461184,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.062\t\t],\n\t\t[142,\t\t144,\t\t0,\t\t0.006678126721756199,\t\t0.0397397958689204,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.151\t\t],\n\t\t[140,\t\t522,\t\t0,\t\t0.020450247933884298,\t\t0.05408627047793199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.930999999999997\t\t],\n\t\t[145,\t\t146,\t\t0,\t\t0.028527603305785125,\t\t0.07544904460236,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.148\t\t],\n\t\t[147,\t\t523,\t\t0,\t\t0.02461289256198347,\t\t0.0650955220034416,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t37.227\t\t],\n\t\t[144,\t\t523,\t\t0,\t\t0.008479338842975206,\t\t0.0224259292904064,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.825\t\t],\n\t\t[139,\t\t523,\t\t0,\t\t0.029245619834710742,\t\t0.0193370088934308,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.116999999999997\t\t],\n\t\t[140,\t\t141,\t\t0,\t\t0.008362975206611572,\t\t0.022118173847506,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.649000000000001\t\t],\n\t\t[528,\t\t526,\t\t0,\t\t0.015389090909090908,\t\t0.0407006573227188,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.276\t\t],\n\t\t[528,\t\t148,\t\t0,\t\t0.014306115702479338,\t\t0.0378364333712244,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.638\t\t],\n\t\t[149,\t\t150,\t\t0,\t\t0.013604628099173552,\t\t0.035981157661543604,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.576999999999998\t\t],\n\t\t[145,\t\t528,\t\t0,\t\t0.00320595041322314,\t\t0.0084790121737992,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.849\t\t],\n\t\t[530,\t\t151,\t\t0,\t\t0.013144462809917355,\t\t0.0347641247737036,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.881\t\t],\n\t\t[524,\t\t152,\t\t0,\t\t0.014598347107438016,\t\t0.03860931919944,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.08\t\t],\n\t\t[149,\t\t525,\t\t0,\t\t0.016897190082644627,\t\t0.17875695122823998,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t51.114\t\t],\n\t\t[139,\t\t514,\t\t0,\t\t0.007824132231404959,\t\t0.020693056313687997,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.834000000000001\t\t],\n\t\t[126,\t\t120,\t\t0,\t\t0.012780297783933518,\t\t0.458781387757004,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.819\t\t],\n\t\t[530,\t\t153,\t\t0,\t\t0.02254545454545455,\t\t0.059627617060924,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.1\t\t],\n\t\t[528,\t\t147,\t\t0,\t\t0.15786710743801652,\t\t0.104380679149868,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t119.387\t\t],\n\t\t[528,\t\t154,\t\t0,\t\t0.006528264462809917,\t\t0.017265779790547203,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t9.874\t\t],\n\t\t[130,\t\t120,\t\t0,\t\t0.01450502077562327,\t\t0.5206947188067639,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t83.781\t\t],\n\t\t[528,\t\t155,\t\t0,\t\t0.16064132231404957,\t\t0.1062149715341,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t121.485\t\t],\n\t\t[524,\t\t533,\t\t0,\t\t0.004432727272727273,\t\t0.0468942356109744,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.409\t\t],\n\t\t[524,\t\t149,\t\t0,\t\t0.0056413223140495865,\t\t0.05968007537478799,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.065\t\t],\n\t\t[154,\t\t150,\t\t0,\t\t0.007539173553719007,\t\t0.0199394052006688,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t11.402999999999999\t\t],\n\t\t[157,\t\t110,\t\t0,\t\t0.009962084487534625,\t\t0.357614433044424,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t57.541000000000004\t\t],\n\t\t[119,\t\t158,\t\t0,\t\t0.0002490189289012004,\t\t0.08045252664623159,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t3,\t\t1,\t\t-360,\t\t4.315\t\t],\n\t\t[159,\t\t60,\t\t0,\t\t0.010967451523545706,\t\t0.0984261617997728,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.674\t\t],\n\t\t[536,\t\t161,\t\t0,\t\t0.021314380165289255,\t\t0.056371704363524,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.238\t\t],\n\t\t[115,\t\t151,\t\t0,\t\t0.00379404958677686,\t\t0.0401376047510724,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.477\t\t],\n\t\t[162,\t\t134,\t\t0,\t\t0.0015910743801652895,\t\t0.016832124393744,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t4.813\t\t],\n\t\t[115,\t\t526,\t\t0,\t\t0.0037884297520661154,\t\t0.010019537998747198,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.73\t\t],\n\t\t[138,\t\t87,\t\t0,\t\t0.0011838642659279777,\t\t0.16999131006813442,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t13.675999999999998\t\t],\n\t\t[123,\t\t163,\t\t0,\t\t0.0022778739612188364,\t\t0.08177009602828919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.157\t\t],\n\t\t[112,\t\t164,\t\t0,\t\t0.0008672957063711912,\t\t0.12453516639176802,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.019\t\t],\n\t\t[112,\t\t165,\t\t0,\t\t0.005989439058171744,\t\t0.21500619230086396,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.595\t\t],\n\t\t[166,\t\t165,\t\t0,\t\t0.002632790858725762,\t\t0.09451074335350361,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.207\t\t],\n\t\t[167,\t\t537,\t\t0,\t\t0.00832595041322314,\t\t0.08808100664460242,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t25.186\t\t],\n\t\t[168,\t\t104,\t\t0,\t\t0.002552458448753463,\t\t0.0916270065931116,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.743\t\t],\n\t\t[531,\t\t520,\t\t0,\t\t0.016156694214876033,\t\t0.042730794079516396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.436999999999998\t\t],\n\t\t[139,\t\t520,\t\t0,\t\t0.010682314049586776,\t\t0.0282522993797748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.157\t\t],\n\t\t[520,\t\t169,\t\t0,\t\t0.0011328925619834712,\t\t0.0119849761681232,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t3.427\t\t],\n\t\t[168,\t\t105,\t\t0,\t\t0.007340893351800554,\t\t0.26352009133553606,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.401\t\t],\n\t\t[520,\t\t170,\t\t0,\t\t0.005842644628099174,\t\t0.015452470732151198,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t8.837\t\t],\n\t\t[171,\t\t89,\t\t0,\t\t0.005505454545454546,\t\t0.058242717567848004,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.654\t\t],\n\t\t[521,\t\t172,\t\t0,\t\t0.006304793388429752,\t\t0.06669899780522001,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.072\t\t],\n\t\t[123,\t\t173,\t\t0,\t\t0.005247403047091413,\t\t0.18836891696656402,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.309\t\t],\n\t\t[521,\t\t174,\t\t0,\t\t0.013300495867768597,\t\t0.035176796844864404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.117\t\t],\n\t\t[37,\t\t39,\t\t0,\t\t0.004338873499549862,\t\t0.35044859579205606,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t37.592\t\t],\n\t\t[530,\t\t175,\t\t0,\t\t0.013128595041322313,\t\t0.0347221581224188,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.857\t\t],\n\t\t[530,\t\t176,\t\t0,\t\t0.005685289256198347,\t\t0.01503630144005,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.599\t\t],\n\t\t[88,\t\t530,\t\t0,\t\t0.006015867768595041,\t\t0.0159106066755372,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.099\t\t],\n\t\t[177,\t\t496,\t\t0,\t\t0.018632066115702478,\t\t0.19711036673178398,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.361999999999995\t\t],\n\t\t[178,\t\t525,\t\t0,\t\t0.03106842975206612,\t\t0.08216895464241199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.99100000000001\t\t],\n\t\t[179,\t\t493,\t\t0,\t\t0.057079669421487594,\t\t0.15096278779194802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.333\t\t],\n\t\t[180,\t\t181,\t\t0,\t\t0.041027438016528923,\t\t0.10850827416682,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.053999999999995\t\t],\n\t\t[182,\t\t180,\t\t0,\t\t0.00866314049586777,\t\t0.09164817200545601,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.206\t\t],\n\t\t[179,\t\t181,\t\t0,\t\t0.01957223140495868,\t\t0.051764115772731996,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.603\t\t],\n\t\t[180,\t\t493,\t\t0,\t\t0.06676561983471074,\t\t0.17657993119175203,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t100.98299999999999\t\t],\n\t\t[183,\t\t30,\t\t0,\t\t0.0024804362880886427,\t\t0.356166349712776,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t28.654\t\t],\n\t\t[183,\t\t21,\t\t0,\t\t0.0025647506925207757,\t\t0.36827307214930394,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t29.628\t\t],\n\t\t[538,\t\t185,\t\t0,\t\t0.018631404958677687,\t\t0.0123189607681008,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.09\t\t],\n\t\t[538,\t\t89,\t\t0,\t\t0.014509752066115702,\t\t0.038375005396288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.945999999999998\t\t],\n\t\t[184,\t\t186,\t\t0,\t\t0.0016554709141274237,\t\t0.059427351084826,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.562000000000001\t\t],\n\t\t[184,\t\t187,\t\t0,\t\t0.002698753462603878,\t\t0.09687863927102919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.588\t\t],\n\t\t[520,\t\t172,\t\t0,\t\t0.0034188429752066113,\t\t0.0361682589818792,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.342\t\t],\n\t\t[89,\t\t175,\t\t0,\t\t0.0037309090909090903,\t\t0.0098674088877672,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.643\t\t],\n\t\t[185,\t\t89,\t\t0,\t\t0.005812892561983471,\t\t0.0153737832609196,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.792\t\t],\n\t\t[89,\t\t188,\t\t0,\t\t0.003108760330578513,\t\t0.008221966434607202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.702\t\t],\n\t\t[189,\t\t190,\t\t0,\t\t0.008599492151454294,\t\t0.17364414688031998,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.253\t\t],\n\t\t[539,\t\t172,\t\t0,\t\t0.0021570247933884296,\t\t0.022819366646419197,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t6.525\t\t],\n\t\t[504,\t\t192,\t\t0,\t\t0.0003084297520661157,\t\t0.00326290713886456,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.9329999999999999\t\t],\n\t\t[105,\t\t186,\t\t0,\t\t0.003273372576177285,\t\t0.1175060580379876,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.907\t\t],\n\t\t[105,\t\t187,\t\t0,\t\t0.0021712257617728533,\t\t0.0779416868808324,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.540999999999999\t\t],\n\t\t[539,\t\t193,\t\t0,\t\t0.005608595041322314,\t\t0.01483346262541,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.482999999999999\t\t],\n\t\t[187,\t\t194,\t\t0,\t\t4.8649584487534626e-05,\t\t0.0069856037041576,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.562\t\t],\n\t\t[539,\t\t540,\t\t0,\t\t0.004394710743801653,\t\t0.0116230138006708,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.647\t\t],\n\t\t[539,\t\t196,\t\t0,\t\t0.00332297520661157,\t\t0.008788516227194,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.026\t\t],\n\t\t[197,\t\t540,\t\t0,\t\t0.004737190082644629,\t\t0.012528794024621601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.165\t\t],\n\t\t[110,\t\t198,\t\t0,\t\t0.00018724030470914128,\t\t0.02688587333118328,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.1630000000000003\t\t],\n\t\t[197,\t\t539,\t\t0,\t\t0.009172231404958677,\t\t0.024258473063998802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.873\t\t],\n\t\t[199,\t\t537,\t\t0,\t\t0.03612826446280991,\t\t0.0238877676441712,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.322\t\t],\n\t\t[134,\t\t526,\t\t0,\t\t0.007771239669421488,\t\t0.020553167475975197,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.754000000000001\t\t],\n\t\t[200,\t\t193,\t\t0,\t\t0.0009322314049586776,\t\t0.009862163056380801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.82\t\t],\n\t\t[4,\t\t201,\t\t0,\t\t0.013726108033240996,\t\t0.49273365914097605,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t79.282\t\t],\n\t\t[202,\t\t86,\t\t0,\t\t0.00013365650969529087,\t\t0.00479794133417816,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.772\t\t],\n\t\t[85,\t\t203,\t\t0,\t\t0.0019011426592797783,\t\t0.2729854600553416,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.962\t\t],\n\t\t[147,\t\t204,\t\t0,\t\t0.0073874380165289254,\t\t0.0781523963903056,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t22.346999999999998\t\t],\n\t\t[147,\t\t205,\t\t0,\t\t0.005959669421487603,\t\t0.00394049369636956,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.507\t\t],\n\t\t[123,\t\t206,\t\t0,\t\t0.0005753116343490305,\t\t0.0826091142668064,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t6.646\t\t],\n\t\t[537,\t\t207,\t\t0,\t\t0.018456198347107437,\t\t0.048812461297776,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.915\t\t],\n\t\t[165,\t\t208,\t\t0,\t\t0.00414612188365651,\t\t0.14883562055771601,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.948\t\t],\n\t\t[4,\t\t94,\t\t0,\t\t0.013687673130193905,\t\t0.49135394025941603,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t79.06\t\t],\n\t\t[4,\t\t2,\t\t0,\t\t5.2054478301015697e-05,\t\t0.016817654469309,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t3,\t\t1,\t\t-360,\t\t0.902\t\t],\n\t\t[209,\t\t4,\t\t0,\t\t0.0022369286703601107,\t\t0.32120104149338397,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.840999999999998\t\t],\n\t\t[119,\t\t163,\t\t0,\t\t0.003535145429362881,\t\t0.12690306230914922,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.419\t\t],\n\t\t[210,\t\t3,\t\t0,\t\t0.0003150969529085873,\t\t0.011311208844832242,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.82\t\t],\n\t\t[99,\t\t211,\t\t0,\t\t0.0035045013850415513,\t\t0.1258030161741948,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.242\t\t],\n\t\t[99,\t\t69,\t\t0,\t\t0.021717970914127423,\t\t0.7796219621557,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t125.443\t\t],\n\t\t[212,\t\t99,\t\t0,\t\t0.008453774238227147,\t\t0.30346978938770003,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.82899999999999\t\t],\n\t\t[213,\t\t214,\t\t0,\t\t0.01490115702479339,\t\t0.15764073118032798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t45.076\t\t],\n\t\t[510,\t\t215,\t\t0,\t\t0.002174710743801653,\t\t0.09202587186721281,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t13.157\t\t],\n\t\t[128,\t\t69,\t\t0,\t\t0.010711651662049862,\t\t1.538088234801848,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t123.741\t\t],\n\t\t[216,\t\t69,\t\t0,\t\t0.009628462603878117,\t\t1.3825528982351443,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t111.228\t\t],\n\t\t[217,\t\t98,\t\t0,\t\t0.0012787396121883656,\t\t0.045903620070299994,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.386\t\t],\n\t\t[504,\t\t218,\t\t0,\t\t0.027480991735537193,\t\t0.072680994226412,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.565\t\t],\n\t\t[177,\t\t504,\t\t0,\t\t0.07054809917355372,\t\t0.18658373169634002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t106.704\t\t],\n\t\t[219,\t\t209,\t\t0,\t\t0.003938798476454294,\t\t0.5655728721401839,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t45.501000000000005\t\t],\n\t\t[219,\t\t220,\t\t0,\t\t0.0013026315789473684,\t\t0.1870451326342096,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t15.048\t\t],\n\t\t[94,\t\t95,\t\t0,\t\t0.01070740997229917,\t\t0.38436979242743197,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.846000000000004\t\t],\n\t\t[159,\t\t221,\t\t0,\t\t0.009937153739612188,\t\t0.356719480257712,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.397\t\t],\n\t\t[34,\t\t161,\t\t0,\t\t0.010965289256198347,\t\t0.116002818645824,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.17\t\t],\n\t\t[222,\t\t221,\t\t0,\t\t0.0046457756232686975,\t\t0.16677196601221997,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.834\t\t],\n\t\t[211,\t\t52,\t\t0,\t\t0.05267313019390582,\t\t0.472709090515552,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t152.12\t\t],\n\t\t[215,\t\t223,\t\t0,\t\t0.04873190082644628,\t\t0.128884831985184,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.707\t\t],\n\t\t[224,\t\t215,\t\t0,\t\t0.019086280991735535,\t\t0.050478887076288004,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.868000000000002\t\t],\n\t\t[225,\t\t224,\t\t0,\t\t0.04200925619834711,\t\t0.11110496071615601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.538999999999994\t\t],\n\t\t[224,\t\t223,\t\t0,\t\t0.031061818181818183,\t\t0.082151468537468,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.981\t\t],\n\t\t[226,\t\t6,\t\t0,\t\t0.06420099173553719,\t\t0.0424492677936932,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.552\t\t],\n\t\t[7,\t\t3,\t\t0,\t\t0.009332929362880887,\t\t0.335029305054692,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t53.907\t\t],\n\t\t[216,\t\t227,\t\t0,\t\t0.01989941135734072,\t\t0.7143401282507,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.939\t\t],\n\t\t[228,\t\t229,\t\t0,\t\t0.010545454545454545,\t\t0.027890337012274,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.95\t\t],\n\t\t[227,\t\t230,\t\t0,\t\t0.003993074792243767,\t\t0.573366419334696,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t46.128\t\t],\n\t\t[231,\t\t53,\t\t0,\t\t0.007193213296398893,\t\t1.0328749562310842,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t83.096\t\t],\n\t\t[544,\t\t545,\t\t0,\t\t0.013061818181818181,\t\t0.034545548464856,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.756\t\t],\n\t\t[234,\t\t235,\t\t0,\t\t0.04608859504132231,\t\t0.121893887321888,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t69.709\t\t],\n\t\t[546,\t\t214,\t\t0,\t\t0.057025454545454546,\t\t0.15081940173295602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.251\t\t],\n\t\t[233,\t\t227,\t\t0,\t\t0.0029001038781163438,\t\t0.1041066260218888,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.750999999999998\t\t],\n\t\t[237,\t\t238,\t\t0,\t\t0.026324628099173554,\t\t0.06962267451304,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.816\t\t],\n\t\t[212,\t\t100,\t\t0,\t\t0.007955505540166205,\t\t0.285583163531816,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.951\t\t],\n\t\t[519,\t\t239,\t\t0,\t\t0.01740429752066116,\t\t0.046030422038308406,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.324\t\t],\n\t\t[238,\t\t519,\t\t0,\t\t0.015166280991735538,\t\t0.040111375593995205,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.939\t\t],\n\t\t[213,\t\t240,\t\t0,\t\t0.01665388429752066,\t\t0.04404574915373599,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.189\t\t],\n\t\t[241,\t\t242,\t\t0,\t\t0.009862015235457064,\t\t0.3540221919932281,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.963\t\t],\n\t\t[70,\t\t241,\t\t0,\t\t0.003819858033240997,\t\t0.5484941897752321,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.126999999999995\t\t],\n\t\t[509,\t\t213,\t\t0,\t\t0.011363636363636364,\t\t0.120216969880216,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t34.375\t\t],\n\t\t[68,\t\t243,\t\t0,\t\t0.003611668975069252,\t\t0.1296500701715312,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.861\t\t],\n\t\t[243,\t\t244,\t\t0,\t\t0.0007699099722991691,\t\t0.027637882270859202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.447\t\t],\n\t\t[68,\t\t244,\t\t0,\t\t0.004104051246537396,\t\t0.147325387728876,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.705\t\t],\n\t\t[544,\t\t547,\t\t0,\t\t0.02418776859504132,\t\t0.255884661882476,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.168\t\t],\n\t\t[245,\t\t227,\t\t0,\t\t0.012676419667590028,\t\t0.45505241780707606,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.219\t\t],\n\t\t[246,\t\t208,\t\t0,\t\t0.0010155817174515235,\t\t0.0364568961999408,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.8660000000000005\t\t],\n\t\t[112,\t\t208,\t\t0,\t\t0.0017927631578947367,\t\t0.0643558063672372,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.355\t\t],\n\t\t[165,\t\t247,\t\t0,\t\t0.0002113919667590028,\t\t0.0075884538459086,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.2209999999999999\t\t],\n\t\t[537,\t\t549,\t\t0,\t\t0.00032066115702479337,\t\t0.00084807607842936,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.485\t\t],\n\t\t[537,\t\t550,\t\t0,\t\t0.00032198347107438016,\t\t0.0008515732993697601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.48700000000000004\t\t],\n\t\t[537,\t\t551,\t\t0,\t\t0.0002651239669421488,\t\t0.0007011927988648,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.401\t\t],\n\t\t[110,\t\t251,\t\t0,\t\t0.00023857340720221602,\t\t0.008564200982522441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.3780000000000001\t\t],\n\t\t[510,\t\t252,\t\t0,\t\t0.08467702479338843,\t\t0.055987884365424005,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.03699999999999\t\t],\n\t\t[529,\t\t253,\t\t0,\t\t0.04859504132231405,\t\t0.12852286961777998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.5\t\t],\n\t\t[237,\t\t239,\t\t0,\t\t0.03309421487603306,\t\t0.08752669712542799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.055\t\t],\n\t\t[254,\t\t238,\t\t0,\t\t0.07815008264462811,\t\t0.05167231372274401,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t59.101000000000006\t\t],\n\t\t[69,\t\t255,\t\t0,\t\t0.0009369806094182826,\t\t0.134541235754472,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.824000000000002\t\t],\n\t\t[510,\t\t225,\t\t0,\t\t0.021953719008264466,\t\t0.232250442756508,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t66.41\t\t],\n\t\t[256,\t\t257,\t\t0,\t\t0.010125619834710746,\t\t0.0267799693631888,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.315\t\t],\n\t\t[258,\t\t190,\t\t0,\t\t0.011717451523545707,\t\t0.10515695255750121,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.84\t\t],\n\t\t[258,\t\t259,\t\t0,\t\t0.015782548476454293,\t\t0.1416387085570408,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.58\t\t],\n\t\t[260,\t\t261,\t\t0,\t\t0.006791031855955679,\t\t0.9751256416231477,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t78.45\t\t],\n\t\t[554,\t\t553,\t\t0,\t\t0.17583338842975205,\t\t0.11625986438453201,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t132.974\t\t],\n\t\t[515,\t\t263,\t\t0,\t\t0.006987107438016529,\t\t0.0739172618295936,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.136\t\t],\n\t\t[14,\t\t264,\t\t0,\t\t0.01700694214876033,\t\t0.17991802858084,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.446000000000005\t\t],\n\t\t[116,\t\t555,\t\t0,\t\t0.0009768595041322315,\t\t0.0103342878835768,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.955\t\t],\n\t\t[151,\t\t116,\t\t0,\t\t0.007244958677685951,\t\t0.0191612735410668,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.958\t\t],\n\t\t[111,\t\t114,\t\t0,\t\t0.008806613573407202,\t\t0.3161358573133961,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.867\t\t],\n\t\t[77,\t\t111,\t\t0,\t\t0.00288452216066482,\t\t0.41418912211817605,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.321999999999996\t\t],\n\t\t[266,\t\t525,\t\t0,\t\t0.01042909090909091,\t\t0.027582581569373602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.774000000000001\t\t],\n\t\t[267,\t\t120,\t\t0,\t\t0.013136945983379503,\t\t0.471584184581432,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t75.87899999999999\t\t],\n\t\t[268,\t\t269,\t\t0,\t\t0.0010327272727272726,\t\t0.0027313295556817604,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.5619999999999998\t\t],\n\t\t[556,\t\t271,\t\t0,\t\t0.052289586776859506,\t\t0.0345735262323792,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.544000000000004\t\t],\n\t\t[556,\t\t272,\t\t0,\t\t0.04685355371900827,\t\t0.030979257409249603,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.433\t\t],\n\t\t[529,\t\t273,\t\t0,\t\t0.0034604958677685953,\t\t0.009152227205140799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.234\t\t],\n\t\t[128,\t\t274,\t\t0,\t\t0.0029350761772853184,\t\t0.1053620459045884,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.953\t\t],\n\t\t[34,\t\t275,\t\t0,\t\t0.0008290909090909092,\t\t0.00054818938265696,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.627\t\t],\n\t\t[503,\t\t276,\t\t0,\t\t0.006707438016528925,\t\t0.07095861291266,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t20.29\t\t],\n\t\t[503,\t\t504,\t\t0,\t\t0.06432727272727272,\t\t0.680524223098808,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t194.59\t\t],\n\t\t[177,\t\t218,\t\t0,\t\t0.04330380165289256,\t\t0.114528740018308,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t65.497\t\t],\n\t\t[277,\t\t278,\t\t0,\t\t0.007191135734072023,\t\t1.032576638635032,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t83.072\t\t],\n\t\t[557,\t\t558,\t\t0,\t\t0.04341289256198347,\t\t0.258338836678648,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t98.493\t\t],\n\t\t[557,\t\t559,\t\t0,\t\t0.03415867768595042,\t\t0.09034195998366001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.665\t\t],\n\t\t[559,\t\t558,\t\t0,\t\t0.04474314049586777,\t\t0.11833546501370001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.67399999999999\t\t],\n\t\t[277,\t\t78,\t\t0,\t\t0.03585768698060942,\t\t0.32180078416049196,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t103.557\t\t],\n\t\t[277,\t\t279,\t\t0,\t\t0.021390927977839334,\t\t0.191970480441328,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.777\t\t],\n\t\t[78,\t\t279,\t\t0,\t\t0.015811980609418283,\t\t0.1419028439283376,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.665\t\t],\n\t\t[281,\t\t282,\t\t0,\t\t0.0023178670360110803,\t\t0.08320574945862161,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.388\t\t],\n\t\t[283,\t\t161,\t\t0,\t\t0.036741157024793386,\t\t0.09717203248350399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t55.571000000000005\t\t],\n\t\t[268,\t\t161,\t\t0,\t\t0.018883636363636366,\t\t0.199771751868832,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.123000000000005\t\t],\n\t\t[256,\t\t284,\t\t0,\t\t0.010755371900826446,\t\t0.113782083346976,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t32.535\t\t],\n\t\t[515,\t\t516,\t\t0,\t\t0.04071140495867769,\t\t0.107672438361532,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.576\t\t],\n\t\t[263,\t\t516,\t\t0,\t\t0.0030355371900826445,\t\t0.128452925198488,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t18.365\t\t],\n\t\t[516,\t\t285,\t\t0,\t\t0.006908429752066116,\t\t0.018271230811372,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.449000000000002\t\t],\n\t\t[63,\t\t286,\t\t0,\t\t0.019088925619834708,\t\t0.050485881518556,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.872\t\t],\n\t\t[287,\t\t516,\t\t0,\t\t0.01732892561983471,\t\t0.011457770111127998,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.105\t\t],\n\t\t[8,\t\t102,\t\t0,\t\t0.015100069252077563,\t\t0.542055501663692,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t87.21799999999999\t\t],\n\t\t[8,\t\t101,\t\t0,\t\t0.019246883656509697,\t\t0.69091598202144,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t111.17\t\t],\n\t\t[80,\t\t288,\t\t0,\t\t0.007984072022160666,\t\t0.2866086302684072,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t46.11600000000001\t\t],\n\t\t[80,\t\t289,\t\t0,\t\t0.0003782317636201524,\t\t0.122198345223416,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t6.553999999999999\t\t],\n\t\t[276,\t\t560,\t\t0,\t\t0.01778314049586777,\t\t0.047032375838192794,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.897\t\t],\n\t\t[37,\t\t290,\t\t0,\t\t0.005629501385041551,\t\t0.4546919507138321,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t48.773999999999994\t\t],\n\t\t[290,\t\t74,\t\t0,\t\t0.02071595106187673,\t\t1.673216783321968,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t179.483\t\t],\n\t\t[512,\t\t291,\t\t0,\t\t0.0053299173553719,\t\t0.056385693247479204,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t16.123\t\t],\n\t\t[78,\t\t292,\t\t0,\t\t0.0058149815327908595,\t\t0.469673087481408,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t50.381\t\t],\n\t\t[199,\t\t548,\t\t0,\t\t0.0015530578512396695,\t\t0.00410748599634868,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.349\t\t],\n\t\t[491,\t\t293,\t\t0,\t\t0.014176528925619833,\t\t0.009373426429729999,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.720999999999998\t\t],\n\t\t[4,\t\t294,\t\t0,\t\t9.669321329639889e-05,\t\t0.013884198109531681,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.117\t\t],\n\t\t[490,\t\t541,\t\t0,\t\t0.050580495867768596,\t\t0.133773946861896,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.503\t\t],\n\t\t[491,\t\t295,\t\t0,\t\t0.010613553719008264,\t\t0.028070443890777202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.053\t\t],\n\t\t[491,\t\t296,\t\t0,\t\t0.004400661157024794,\t\t0.0116387512948784,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.656000000000001\t\t],\n\t\t[295,\t\t297,\t\t0,\t\t0.020297520661157024,\t\t0.053682341459340005,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.7\t\t],\n\t\t[508,\t\t161,\t\t0,\t\t0.023239669421487603,\t\t0.061463658055360006,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.15\t\t],\n\t\t[117,\t\t123,\t\t0,\t\t0.005876211911357341,\t\t0.21094161505628,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.941\t\t],\n\t\t[133,\t\t117,\t\t0,\t\t0.004469182825484764,\t\t0.0401081792747688,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.907\t\t],\n\t\t[71,\t\t74,\t\t0,\t\t0.03904524469065097,\t\t0.7884161162841721,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t169.144\t\t],\n\t\t[74,\t\t278,\t\t0,\t\t0.0077122576177285325,\t\t1.10740463560792,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t89.09200000000001\t\t],\n\t\t[298,\t\t515,\t\t0,\t\t0.021701157024793388,\t\t0.05739464148919599,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.823\t\t],\n\t\t[5,\t\t299,\t\t0,\t\t0.0016232686980609415,\t\t0.058271370400665996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.376\t\t],\n\t\t[32,\t\t292,\t\t0,\t\t0.009679362880886427,\t\t0.34746541983297996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.908\t\t],\n\t\t[5,\t\t29,\t\t0,\t\t0.00743395083102493,\t\t1.0674425076571843,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t85.87700000000001\t\t],\n\t\t[503,\t\t560,\t\t0,\t\t0.015140495867768593,\t\t0.160172719142436,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.8\t\t],\n\t\t[300,\t\t301,\t\t0,\t\t0.004892053324099723,\t\t0.7024509290644521,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.513000000000005\t\t],\n\t\t[51,\t\t300,\t\t0,\t\t0.002573493767313019,\t\t0.3695284920307039,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.729\t\t],\n\t\t[244,\t\t302,\t\t0,\t\t0.007714508310249307,\t\t1.107727813004004,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t89.118\t\t],\n\t\t[31,\t\t302,\t\t0,\t\t0.004369113573407203,\t\t0.6273619041941161,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.472\t\t],\n\t\t[51,\t\t282,\t\t0,\t\t0.006288434903047093,\t\t0.9029576432132521,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.64399999999999\t\t],\n\t\t[303,\t\t304,\t\t0,\t\t8.795013850415512e-05,\t\t0.000789298639172312,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.254\t\t],\n\t\t[305,\t\t304,\t\t0,\t\t0.003881117266849031,\t\t0.0783689646873844,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.813\t\t],\n\t\t[305,\t\t259,\t\t0,\t\t0.0025625,\t\t0.36794989475177603,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t29.601999999999997\t\t],\n\t\t[306,\t\t307,\t\t0,\t\t0.03223268698060942,\t\t0.289268628831688,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t93.088\t\t],\n\t\t[305,\t\t308,\t\t0,\t\t0.0024272853185595567,\t\t0.0217833994511184,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.01\t\t],\n\t\t[305,\t\t309,\t\t0,\t\t0.011014773776523545,\t\t0.22241441259921202,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.716\t\t],\n\t\t[310,\t\t309,\t\t0,\t\t0.009565962603878117,\t\t0.343394627639832,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.253\t\t],\n\t\t[306,\t\t309,\t\t0,\t\t0.035333795013850415,\t\t0.31709917455019604,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t102.044\t\t],\n\t\t[311,\t\t280,\t\t0,\t\t0.003433691135734072,\t\t0.1232611016590444,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.833\t\t],\n\t\t[280,\t\t278,\t\t0,\t\t0.009749769159764544,\t\t0.7874838737974121,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t84.47200000000001\t\t],\n\t\t[311,\t\t32,\t\t0,\t\t0.01205909510619806,\t\t0.9740069506375919,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t104.48\t\t],\n\t\t[13,\t\t312,\t\t0,\t\t0.0043324965373961214,\t\t0.622104056565324,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.049\t\t],\n\t\t[313,\t\t314,\t\t0,\t\t0.006092624653739613,\t\t0.218710302449316,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.191\t\t],\n\t\t[312,\t\t313,\t\t0,\t\t0.00893957756232687,\t\t0.32090893884734,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.635\t\t],\n\t\t[547,\t\t566,\t\t0,\t\t0.027035702479338848,\t\t0.286013220297816,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.783\t\t],\n\t\t[245,\t\t315,\t\t0,\t\t0.014162569252077564,\t\t0.508401547875772,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.803\t\t],\n\t\t[312,\t\t316,\t\t0,\t\t8.803670360110802e-05,\t\t0.01264120812658816,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.0170000000000001\t\t],\n\t\t[312,\t\t314,\t\t0,\t\t0.005339854570637119,\t\t0.191687700220296,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.843000000000004\t\t],\n\t\t[554,\t\t546,\t\t0,\t\t0.08174743801652892,\t\t0.21620344446439202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t123.64299999999999\t\t],\n\t\t[262,\t\t216,\t\t0,\t\t0.042641966759002774,\t\t0.38268554099981195,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t123.15\t\t],\n\t\t[317,\t\t233,\t\t0,\t\t0.005647276084951523,\t\t0.114031901035644,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.464000000000002\t\t],\n\t\t[318,\t\t317,\t\t0,\t\t0.008311634349030471,\t\t0.16783161497270002,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t36.006\t\t],\n\t\t[231,\t\t52,\t\t0,\t\t0.035263677285318554,\t\t1.2658796434850879,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t203.683\t\t],\n\t\t[319,\t\t567,\t\t0,\t\t0.006089586776859504,\t\t0.0644223069721,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.421\t\t],\n\t\t[557,\t\t321,\t\t0,\t\t0.010004628099173555,\t\t0.10583989458750401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.264\t\t],\n\t\t[277,\t\t65,\t\t0,\t\t0.009430170821779778,\t\t0.7616700793261759,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t81.703\t\t],\n\t\t[322,\t\t288,\t\t0,\t\t0.006545013850415513,\t\t0.528637424797136,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.706\t\t],\n\t\t[322,\t\t323,\t\t0,\t\t0.0018503000923372577,\t\t0.14944779312484,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t16.031\t\t],\n\t\t[277,\t\t324,\t\t0,\t\t0.019719529085872576,\t\t0.39818407235049996,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t85.425\t\t],\n\t\t[324,\t\t325,\t\t0,\t\t0.01103508771932133,\t\t0.22282459929396403,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.803999999999995\t\t],\n\t\t[277,\t\t325,\t\t0,\t\t0.008665743305609418,\t\t0.174981914850048,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.54\t\t],\n\t\t[326,\t\t327,\t\t0,\t\t0.007654214876033058,\t\t0.0202436634226288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.577\t\t],\n\t\t[328,\t\t326,\t\t0,\t\t0.10300958677685952,\t\t0.068109252150368,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.90100000000001\t\t],\n\t\t[328,\t\t327,\t\t0,\t\t0.09827173553719008,\t\t0.064976616491468,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.318\t\t],\n\t\t[326,\t\t329,\t\t0,\t\t0.028062148760330575,\t\t0.07421802283046801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.443999999999996\t\t],\n\t\t[568,\t\t329,\t\t0,\t\t0.05699900826446282,\t\t0.15074945731414802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.211\t\t],\n\t\t[568,\t\t326,\t\t0,\t\t0.03218644628099173,\t\t0.08512585494846397,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.681999999999995\t\t],\n\t\t[332,\t\t78,\t\t0,\t\t0.006471029547541551,\t\t0.522661750455416,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.065\t\t],\n\t\t[333,\t\t306,\t\t0,\t\t0.008580159279778392,\t\t0.308006702824228,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.559\t\t],\n\t\t[332,\t\t333,\t\t0,\t\t0.007504674515235457,\t\t0.26939943395502003,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.347\t\t],\n\t\t[332,\t\t334,\t\t0,\t\t0.017124653739612188,\t\t0.15368328149175597,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.456\t\t],\n\t\t[66,\t\t334,\t\t0,\t\t0.030625,\t\t0.27484062260471603,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t88.445\t\t],\n\t\t[330,\t\t335,\t\t0,\t\t0.00550536703601108,\t\t0.790516769355108,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.598\t\t],\n\t\t[336,\t\t66,\t\t0,\t\t0.015054362880886425,\t\t0.1351036887216764,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.477\t\t],\n\t\t[330,\t\t336,\t\t0,\t\t0.039036357340720224,\t\t0.350327404269788,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t112.73700000000001\t\t],\n\t\t[68,\t\t70,\t\t0,\t\t0.016314058171745152,\t\t0.14640868261713597,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.115\t\t],\n\t\t[509,\t\t337,\t\t0,\t\t0.03494082644628099,\t\t0.09241056617056001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t52.848\t\t],\n\t\t[324,\t\t288,\t\t0,\t\t0.012627423822714683,\t\t0.11332339674541761,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t36.468\t\t],\n\t\t[338,\t\t559,\t\t0,\t\t0.009228099173553718,\t\t0.097624922595552,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t27.915\t\t],\n\t\t[339,\t\t559,\t\t0,\t\t0.03560595041322315,\t\t0.023542417076125203,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.927\t\t],\n\t\t[339,\t\t340,\t\t0,\t\t0.08711537190082644,\t\t0.23040041287850396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t131.762\t\t],\n\t\t[559,\t\t340,\t\t0,\t\t0.20983272727272728,\t\t0.138740000599684,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t158.686\t\t],\n\t\t[341,\t\t292,\t\t0,\t\t0.0009329409048961218,\t\t0.07535316024134399,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.083\t\t],\n\t\t[557,\t\t342,\t\t0,\t\t0.006019834710743802,\t\t0.0636843933534336,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t18.21\t\t],\n\t\t[558,\t\t343,\t\t0,\t\t0.010650247933884296,\t\t0.11266996708783199,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.217\t\t],\n\t\t[502,\t\t340,\t\t0,\t\t0.021737520661157025,\t\t0.22996326026071198,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t65.756\t\t],\n\t\t[72,\t\t32,\t\t0,\t\t0.00675502077562327,\t\t0.969954803293024,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t78.03399999999999\t\t],\n\t\t[344,\t\t345,\t\t0,\t\t0.0005762927054480609,\t\t0.04654686738645321,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.993\t\t],\n\t\t[346,\t\t47,\t\t0,\t\t0.0011340027700831024,\t\t0.04070792194158799,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.55\t\t],\n\t\t[46,\t\t47,\t\t0,\t\t0.0008975069252077563,\t\t0.0322183003580208,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.184\t\t],\n\t\t[346,\t\t345,\t\t0,\t\t0.0007217797783933517,\t\t0.025910126194627202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.169\t\t],\n\t\t[347,\t\t328,\t\t0,\t\t0.029905454545454544,\t\t0.07909314882361201,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.232\t\t],\n\t\t[347,\t\t348,\t\t0,\t\t0.04883438016528925,\t\t0.129155866607944,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.862\t\t],\n\t\t[571,\t\t348,\t\t0,\t\t0.041548429752066116,\t\t0.10988617921762801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.842\t\t],\n\t\t[347,\t\t572,\t\t0,\t\t0.016052231404958678,\t\t0.04245451362512801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.279\t\t],\n\t\t[571,\t\t570,\t\t0,\t\t0.17379041322314048,\t\t0.11490906279551602,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t131.429\t\t],\n\t\t[14,\t\t350,\t\t0,\t\t0.02166743801652892,\t\t0.05730546235524,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.772\t\t],\n\t\t[350,\t\t573,\t\t0,\t\t0.026277685950413226,\t\t0.06949852316919598,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.745\t\t],\n\t\t[15,\t\t351,\t\t0,\t\t0.02639265927977839,\t\t0.236857956201204,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.222\t\t],\n\t\t[352,\t\t15,\t\t0,\t\t0.0015260560941828254,\t\t0.219126704094076,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.629\t\t],\n\t\t[15,\t\t335,\t\t0,\t\t0.0035338758079432133,\t\t1.1417173740880242,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.235\t\t],\n\t\t[232,\t\t227,\t\t0,\t\t5.5747922437673134e-05,\t\t0.000500303468136644,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.161\t\t],\n\t\t[565,\t\t544,\t\t0,\t\t0.0394803305785124,\t\t0.10441652566461601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t59.714\t\t],\n\t\t[235,\t\t567,\t\t0,\t\t0.02391404958677686,\t\t0.25298896294275997,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.34\t\t],\n\t\t[567,\t\t286,\t\t0,\t\t0.008068760330578512,\t\t0.34144067500694797,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.816\t\t],\n\t\t[353,\t\t519,\t\t0,\t\t0.007621818181818182,\t\t0.080631926038356,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.055999999999997\t\t],\n\t\t[354,\t\t353,\t\t0,\t\t0.0008436363636363636,\t\t0.00892490784392768,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.552\t\t],\n\t\t[355,\t\t354,\t\t0,\t\t0.0068502479338842966,\t\t0.0181173530898976,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.360999999999999\t\t],\n\t\t[354,\t\t356,\t\t0,\t\t0.01855404958677686,\t\t0.049071255647172,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.063000000000002\t\t],\n\t\t[357,\t\t358,\t\t0,\t\t0.0034823407202216067,\t\t0.5000300103406239,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.228\t\t],\n\t\t[574,\t\t359,\t\t0,\t\t0.013352066115702478,\t\t0.0353131884615884,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.195\t\t],\n\t\t[235,\t\t575,\t\t0,\t\t0.007459504132231404,\t\t0.0789147905557,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.565\t\t],\n\t\t[167,\t\t361,\t\t0,\t\t0.000616198347107438,\t\t0.0065188198358579995,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.864\t\t],\n\t\t[528,\t\t362,\t\t0,\t\t0.0011960330578512398,\t\t0.012652945368078402,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.6180000000000003\t\t],\n\t\t[363,\t\t344,\t\t0,\t\t0.0002662742382271468,\t\t0.009558592968871479,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.538\t\t],\n\t\t[259,\t\t364,\t\t0,\t\t0.013069713758102496,\t\t0.26390852570525997,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.618\t\t],\n\t\t[54,\t\t56,\t\t0,\t\t0.007723337950138504,\t\t0.0693122289241068,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.305\t\t],\n\t\t[365,\t\t364,\t\t0,\t\t0.0049974607571537395,\t\t0.10091058802821559,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.649\t\t],\n\t\t[231,\t\t366,\t\t0,\t\t0.0013273891966759002,\t\t0.0476500209962672,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t7.667000000000001\t\t],\n\t\t[30,\t\t367,\t\t0,\t\t0.01126108033240997,\t\t0.1010613005635992,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.522\t\t],\n\t\t[61,\t\t367,\t\t0,\t\t0.020337603878116343,\t\t0.18251754162067196,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t58.735\t\t],\n\t\t[254,\t\t368,\t\t0,\t\t0.0004297520661157025,\t\t0.00454638722456732,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.3\t\t],\n\t\t[254,\t\t369,\t\t0,\t\t0.00015999999999999999,\t\t0.00169265493591832,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.484\t\t],\n\t\t[254,\t\t370,\t\t0,\t\t0.0003669421487603306,\t\t0.0038819152455960805,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.11\t\t],\n\t\t[99,\t\t358,\t\t0,\t\t0.0020184383656509696,\t\t0.28982797432374396,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.316999999999997\t\t],\n\t\t[354,\t\t519,\t\t0,\t\t0.006762644628099174,\t\t0.07154264880985199,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.457\t\t],\n\t\t[571,\t\t371,\t\t0,\t\t0.023726942148760328,\t\t0.06275238397221199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.887\t\t],\n\t\t[207,\t\t372,\t\t0,\t\t0.002329256198347108,\t\t0.006160354689297601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.523\t\t],\n\t\t[57,\t\t373,\t\t0,\t\t0.0017725619834710745,\t\t0.0046880246727212796,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.681\t\t],\n\t\t[209,\t\t374,\t\t0,\t\t0.0010122922437673131,\t\t0.0363388121515216,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.847\t\t],\n\t\t[375,\t\t376,\t\t0,\t\t0.0045364727608518006,\t\t0.0916021467933684,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.652\t\t],\n\t\t[376,\t\t377,\t\t0,\t\t0.0030886426592797783,\t\t0.062367022394423606,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.38\t\t],\n\t\t[16,\t\t49,\t\t0,\t\t0.002266101108033241,\t\t0.32538991773524,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.178\t\t],\n\t\t[318,\t\t377,\t\t0,\t\t0.004755078485685596,\t\t0.0960163149704152,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.599\t\t],\n\t\t[378,\t\t297,\t\t0,\t\t0.01753917355371901,\t\t0.046387138574374404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.528000000000002\t\t],\n\t\t[562,\t\t379,\t\t0,\t\t0.01802314049586777,\t\t0.047667121439141605,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.26\t\t],\n\t\t[576,\t\t563,\t\t0,\t\t0.001808264462809917,\t\t0.004782449638150801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.735\t\t],\n\t\t[576,\t\t381,\t\t0,\t\t0.0034320661157024794,\t\t0.009077036954898,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.191\t\t],\n\t\t[577,\t\t576,\t\t0,\t\t0.06004495867768594,\t\t0.15880530575430396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t90.818\t\t],\n\t\t[244,\t\t383,\t\t0,\t\t0.006845567867036011,\t\t0.1382282547912684,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.655\t\t],\n\t\t[244,\t\t306,\t\t0,\t\t0.02679108956599723,\t\t0.5409756541164079,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t116.059\t\t],\n\t\t[383,\t\t306,\t\t0,\t\t0.0300685595567867,\t\t0.269846910348376,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.838\t\t],\n\t\t[380,\t\t306,\t\t0,\t\t0.00025605955678670365,\t\t0.03676764369572,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.958\t\t],\n\t\t[252,\t\t225,\t\t0,\t\t0.062094545454545444,\t\t0.041056499553586,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.958999999999996\t\t],\n\t\t[220,\t\t76,\t\t0,\t\t0.002772074099722992,\t\t0.398042682239984,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.023\t\t],\n\t\t[542,\t\t384,\t\t0,\t\t0.007939834710743802,\t\t0.020999063146094,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.009\t\t],\n\t\t[385,\t\t384,\t\t0,\t\t0.053734876033057856,\t\t0.035529141854791196,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.637\t\t],\n\t\t[542,\t\t385,\t\t0,\t\t0.011306115702479337,\t\t0.119608453436296,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t34.201\t\t],\n\t\t[386,\t\t385,\t\t0,\t\t0.003668760330578512,\t\t0.0388121580140316,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.097999999999999\t\t],\n\t\t[387,\t\t578,\t\t0,\t\t0.015444628099173553,\t\t0.16339016240905604,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.72\t\t],\n\t\t[332,\t\t388,\t\t0,\t\t0.014036184210526315,\t\t0.5038646344377999,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.07300000000001\t\t],\n\t\t[382,\t\t332,\t\t0,\t\t0.017764369806094183,\t\t0.637697365901468,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t102.60700000000001\t\t],\n\t\t[382,\t\t388,\t\t0,\t\t0.00476159972299169,\t\t0.17092976750548,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.503\t\t],\n\t\t[579,\t\t578,\t\t0,\t\t0.01911074380165289,\t\t0.050543585664,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.905\t\t],\n\t\t[577,\t\t387,\t\t0,\t\t0.07597818181818182,\t\t0.20094506949431204,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.917\t\t],\n\t\t[144,\t\t390,\t\t0,\t\t0.0004277685950413223,\t\t0.0011313509747276,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.647\t\t],\n\t\t[37,\t\t49,\t\t0,\t\t0.008441481994459835,\t\t0.303028527944352,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t48.758\t\t],\n\t\t[391,\t\t233,\t\t0,\t\t0.014211218836565096,\t\t0.1275369872004348,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.042\t\t],\n\t\t[392,\t\t310,\t\t0,\t\t0.007035318559556785,\t\t0.06313767618386361,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.317999999999998\t\t],\n\t\t[260,\t\t393,\t\t0,\t\t0.006341412742382271,\t\t0.0569102963692744,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.314\t\t],\n\t\t[394,\t\t230,\t\t0,\t\t0.0007590027700831025,\t\t0.00681158510656168,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.1919999999999997\t\t],\n\t\t[395,\t\t282,\t\t0,\t\t0.008762984764542936,\t\t0.314569689934484,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.615\t\t],\n\t\t[395,\t\t244,\t\t0,\t\t0.0034046052631578946,\t\t0.12221699007344,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.665\t\t],\n\t\t[25,\t\t396,\t\t0,\t\t0.008809037396121884,\t\t0.316222866612064,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.881\t\t],\n\t\t[81,\t\t74,\t\t0,\t\t0.0075207756232686974,\t\t0.26997742429652244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t43.44\t\t],\n\t\t[278,\t\t80,\t\t0,\t\t0.016286011080332407,\t\t0.5846279085788,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t94.068\t\t],\n\t\t[81,\t\t278,\t\t0,\t\t0.021054016620498613,\t\t0.755787629231688,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t121.60799999999999\t\t],\n\t\t[569,\t\t570,\t\t0,\t\t0.03253950413223141,\t\t0.08605961294018,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.216\t\t],\n\t\t[397,\t\t552,\t\t0,\t\t0.006289586776859504,\t\t0.0166345314104904,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.513\t\t],\n\t\t[542,\t\t398,\t\t0,\t\t0.0005580165289256199,\t\t0.0059033089500572,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.6880000000000002\t\t],\n\t\t[398,\t\t385,\t\t0,\t\t0.021893553719008262,\t\t0.05790348713648401,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.114000000000004\t\t],\n\t\t[399,\t\t499,\t\t0,\t\t0.03266380165289256,\t\t0.021597087927192803,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.701999999999998\t\t],\n\t\t[83,\t\t399,\t\t0,\t\t0.025700495867768593,\t\t0.016992996557050798,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.436\t\t],\n\t\t[498,\t\t400,\t\t0,\t\t0.012134214876033058,\t\t0.032092247974028,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.352999999999998\t\t],\n\t\t[518,\t\t239,\t\t0,\t\t0.04685289256198347,\t\t0.123915281026504,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t70.865\t\t],\n\t\t[575,\t\t543,\t\t0,\t\t0.0030307438016528923,\t\t0.032062521596058796,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.168\t\t],\n\t\t[401,\t\t360,\t\t0,\t\t0.007957063711911357,\t\t0.071409774520472,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.98\t\t],\n\t\t[580,\t\t581,\t\t0,\t\t0.007134545454545454,\t\t0.018869255592422397,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.790999999999999\t\t],\n\t\t[401,\t\t402,\t\t0,\t\t0.0033434903047091418,\t\t0.030005778188384805,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.656\t\t],\n\t\t[403,\t\t231,\t\t0,\t\t0.009592105263157893,\t\t0.08608327126915,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.701999999999998\t\t],\n\t\t[189,\t\t360,\t\t0,\t\t0.028456024930747923,\t\t0.255375399471348,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t82.181\t\t],\n\t\t[234,\t\t404,\t\t0,\t\t0.008092561983471074,\t\t0.0214029921648796,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.24\t\t],\n\t\t[235,\t\t404,\t\t0,\t\t0.05107504132231405,\t\t0.13508190749437998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.251\t\t],\n\t\t[235,\t\t580,\t\t0,\t\t0.000580495867768595,\t\t0.00153527999352772,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.878\t\t],\n\t\t[216,\t\t259,\t\t0,\t\t0.0022115650969529088,\t\t0.079389770210892,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t12.774000000000001\t\t],\n\t\t[405,\t\t259,\t\t0,\t\t0.0052832409972299165,\t\t0.1896554115982928,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.516\t\t],\n\t\t[405,\t\t318,\t\t0,\t\t0.0066348684210526315,\t\t0.23817552558268398,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t38.323\t\t],\n\t\t[406,\t\t230,\t\t0,\t\t8.098164819944598e-05,\t\t0.046512685161986804,\t\t6845.0,\t\t6845.0,\t\t6845.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.871\t\t],\n\t\t[542,\t\t407,\t\t0,\t\t0.025569586776859506,\t\t0.067625761355152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.674\t\t],\n\t\t[23,\t\t408,\t\t0,\t\t0.03224528925619835,\t\t0.08528148128033601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.771\t\t],\n\t\t[577,\t\t348,\t\t0,\t\t0.012999008264462809,\t\t0.13751772188026398,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.321999999999996\t\t],\n\t\t[562,\t\t564,\t\t0,\t\t0.06921520661157024,\t\t0.18305853298686803,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t104.68799999999999\t\t],\n\t\t[582,\t\t507,\t\t0,\t\t0.006357685950413223,\t\t0.016814638289042002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.616\t\t],\n\t\t[27,\t\t410,\t\t0,\t\t0.0030042975206611565,\t\t0.007945685980170399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.544\t\t],\n\t\t[501,\t\t27,\t\t0,\t\t0.003811570247933884,\t\t0.040322957460962,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.53\t\t],\n\t\t[27,\t\t411,\t\t0,\t\t0.004648595041322314,\t\t0.012294480221518,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.031000000000001\t\t],\n\t\t[411,\t\t410,\t\t0,\t\t0.002054214876033058,\t\t0.0054329327333556,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.1069999999999998\t\t],\n\t\t[403,\t\t360,\t\t0,\t\t0.008191481994459833,\t\t0.07351353506655639,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.656999999999996\t\t],\n\t\t[412,\t\t360,\t\t0,\t\t0.016761772853185596,\t\t0.15042664773666,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.408\t\t],\n\t\t[326,\t\t413,\t\t0,\t\t0.012077024793388432,\t\t0.12776397267356798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t36.533\t\t],\n\t\t[414,\t\t413,\t\t0,\t\t0.008093223140495867,\t\t0.08561896310149601,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t24.482\t\t],\n\t\t[6,\t\t297,\t\t0,\t\t0.019472396694214876,\t\t0.0128750188978664,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.725999999999999\t\t],\n\t\t[554,\t\t580,\t\t0,\t\t0.07435371900826447,\t\t0.196648733567264,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t112.46\t\t],\n\t\t[262,\t\t401,\t\t0,\t\t0.03931232686980609,\t\t0.35280406181043206,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t113.53399999999999\t\t],\n\t\t[499,\t\t556,\t\t0,\t\t0.04185586776859504,\t\t0.11069928308639199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t63.306999999999995\t\t],\n\t\t[224,\t\t229,\t\t0,\t\t0.004135206611570248,\t\t0.0437467367631624,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.509\t\t],\n\t\t[583,\t\t507,\t\t0,\t\t0.024632727272727268,\t\t0.065147980317596,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.257\t\t],\n\t\t[415,\t\t307,\t\t0,\t\t0.015675554016620498,\t\t0.1406784987952448,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.271\t\t],\n\t\t[416,\t\t507,\t\t0,\t\t0.0010555371900826446,\t\t0.011166626467730801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.193\t\t],\n\t\t[284,\t\t561,\t\t0,\t\t0.015221487603305786,\t\t0.16102953827307598,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.045\t\t],\n\t\t[543,\t\t417,\t\t0,\t\t0.0006614876033057851,\t\t0.027991756419545603,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t4.002\t\t],\n\t\t[418,\t\t506,\t\t0,\t\t0.0009395041322314049,\t\t0.009939101917118,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.842\t\t],\n\t\t[220,\t\t157,\t\t0,\t\t0.004599549861495845,\t\t0.165112574384632,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.566999999999997\t\t],\n\t\t[295,\t\t419,\t\t0,\t\t0.0012023140495867769,\t\t0.012719392565946,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.637\t\t],\n\t\t[295,\t\t420,\t\t0,\t\t0.0008003305785123967,\t\t0.008466771900532,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.421\t\t],\n\t\t[541,\t\t62,\t\t0,\t\t0.05133355371900827,\t\t0.0339414035471236,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.821\t\t],\n\t\t[52,\t\t421,\t\t0,\t\t0.00013885041551246538,\t\t0.004984389831631239,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.802\t\t],\n\t\t[60,\t\t160,\t\t0,\t\t6.128808864265928e-05,\t\t0.000550023067454096,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.177\t\t],\n\t\t[535,\t\t161,\t\t0,\t\t3.735537190082645e-05,\t\t0.00039518596644331203,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.113\t\t],\n\t\t[267,\t\t282,\t\t0,\t\t0.0065652700831024926,\t\t0.235677115717012,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.921\t\t],\n\t\t[52,\t\t365,\t\t0,\t\t0.007655586334279779,\t\t0.15458444922992,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.164\t\t],\n\t\t[28,\t\t27,\t\t0,\t\t0.015726942148760328,\t\t0.041594197273402404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.787\t\t],\n\t\t[30,\t\t201,\t\t0,\t\t0.009128289473684211,\t\t0.327683234253536,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t52.725\t\t],\n\t\t[422,\t\t81,\t\t0,\t\t0.0004226685133887349,\t\t0.13655487952674,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t6,\t\t1,\t\t-360,\t\t7.324\t\t],\n\t\t[119,\t\t425,\t\t0,\t\t0.003579120498614958,\t\t0.1284816595874996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.673000000000002\t\t],\n\t\t[423,\t\t425,\t\t0,\t\t0.0006518351800554017,\t\t0.0233992864289392,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.765\t\t],\n\t\t[424,\t\t425,\t\t0,\t\t0.005922957063711911,\t\t0.21261965153389198,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.211\t\t],\n\t\t[426,\t\t428,\t\t0,\t\t0.013948429752066116,\t\t0.14756174042535197,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t42.193999999999996\t\t],\n\t\t[427,\t\t428,\t\t0,\t\t0.0002664462809917355,\t\t0.0028187600792304794,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.8059999999999999\t\t],\n\t\t[19,\t\t428,\t\t0,\t\t0.023607603305785128,\t\t0.24974703912892798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t71.413\t\t],\n\t\t[45,\t\t429,\t\t0,\t\t0.02562314049586777,\t\t0.067767398802972,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.755\t\t],\n\t\t[44,\t\t429,\t\t0,\t\t5.289256198347107e-05,\t\t0.00013988883767892,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.08\t\t],\n\t\t[505,\t\t429,\t\t0,\t\t0.006012561983471073,\t\t0.015901863623161996,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.094\t\t],\n\t\t[231,\t\t431,\t\t0,\t\t0.011677285318559558,\t\t0.4191859418495199,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.44800000000001\t\t],\n\t\t[190,\t\t431,\t\t0,\t\t0.009600761772853185,\t\t0.34464383257266795,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.45399999999999\t\t],\n\t\t[430,\t\t431,\t\t0,\t\t0.0028100761772853187,\t\t0.1008748520662472,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.230999999999998\t\t],\n\t\t[286,\t\t433,\t\t0,\t\t0.01568694214876033,\t\t0.16595362535967603,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.453\t\t],\n\t\t[432,\t\t433,\t\t0,\t\t0.00010049586776859504,\t\t0.00106315516636076,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.304\t\t],\n\t\t[506,\t\t433,\t\t0,\t\t0.0065904132231404955,\t\t0.06972059669946801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.936\t\t],\n\t\t[23,\t\t434,\t\t0,\t\t0.02613685950413223,\t\t0.069126069139116,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.532\t\t],\n\t\t[400,\t\t434,\t\t0,\t\t0.008155371900826446,\t\t0.021569110159669603,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t12.335\t\t],\n\t\t[500,\t\t434,\t\t0,\t\t0.006338512396694216,\t\t0.0167639285853336,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t9.587\t\t],\n\t\t[32,\t\t436,\t\t0,\t\t0.0044813019390581715,\t\t0.16086776359270402,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.884\t\t],\n\t\t[435,\t\t436,\t\t0,\t\t0.0006634349030470914,\t\t0.023815688073266,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.832\t\t],\n\t\t[78,\t\t436,\t\t0,\t\t0.00897680055401662,\t\t0.32224515307884394,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.85\t\t],\n\t\t[86,\t\t438,\t\t0,\t\t0.014693213296398892,\t\t0.52745036936438,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t84.868\t\t],\n\t\t[437,\t\t438,\t\t0,\t\t1.0387811634349031e-05,\t\t0.0003728969948845,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.06\t\t],\n\t\t[221,\t\t438,\t\t0,\t\t0.002280124653739612,\t\t0.081850890377238,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.17\t\t],\n\t\t[207,\t\t439,\t\t0,\t\t0.055703801652892564,\t\t0.0368309823503996,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.126000000000005\t\t],\n\t\t[516,\t\t439,\t\t0,\t\t0.05448462809917355,\t\t0.03602487292327441,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.20399999999999\t\t],\n\t\t[513,\t\t439,\t\t0,\t\t0.046726611570247926,\t\t0.0308953241066316,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.336999999999996\t\t],\n\t\t[181,\t\t441,\t\t0,\t\t0.040805289256198356,\t\t0.10792074104825197,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.718\t\t],\n\t\t[440,\t\t441,\t\t0,\t\t0.0001322314049586777,\t\t0.000349722094197784,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.2\t\t],\n\t\t[504,\t\t441,\t\t0,\t\t0.05916099173553719,\t\t0.156467413554364,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t89.48100000000001\t\t],\n\t\t[135,\t\t442,\t\t0,\t\t0.004956890581717451,\t\t0.177940231009092,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.631\t\t],\n\t\t[109,\t\t442,\t\t0,\t\t0.0015380886426592797,\t\t0.055213615042649204,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.884\t\t],\n\t\t[112,\t\t442,\t\t0,\t\t0.0027304362880886425,\t\t0.09801597510545401,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.770999999999999\t\t],\n\t\t[113,\t\t443,\t\t0,\t\t0.0019885734072022164,\t\t0.07138491472072879,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.485999999999999\t\t],\n\t\t[132,\t\t443,\t\t0,\t\t0.006788434903047091,\t\t0.24368818615747198,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.21\t\t],\n\t\t[107,\t\t443,\t\t0,\t\t2.2333795013850418e-05,\t\t0.000801728539002036,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.129\t\t],\n\t\t[444,\t\t445,\t\t0,\t\t7.877423822714682e-05,\t\t0.00282780221121528,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.455\t\t],\n\t\t[112,\t\t445,\t\t0,\t\t0.002816135734072022,\t\t0.101092375313206,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.266\t\t],\n\t\t[109,\t\t445,\t\t0,\t\t0.0014354224376731304,\t\t0.0515281497432104,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.291\t\t],\n\t\t[119,\t\t447,\t\t0,\t\t0.005212690443213296,\t\t0.74849127803204,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t60.217\t\t],\n\t\t[100,\t\t447,\t\t0,\t\t0.0050695117728531865,\t\t0.7279322237145921,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t58.563\t\t],\n\t\t[446,\t\t447,\t\t0,\t\t2.9518698060941832e-05,\t\t0.00423859584186224,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.341\t\t],\n\t\t[124,\t\t448,\t\t0,\t\t6.509695290858726e-05,\t\t0.00233682116794768,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.376\t\t],\n\t\t[125,\t\t448,\t\t0,\t\t0.00615148891966759,\t\t0.22082338542026803,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.531\t\t],\n\t\t[131,\t\t448,\t\t0,\t\t3.912742382271468e-05,\t\t0.0014045786807313759,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.226\t\t],\n\t\t[449,\t\t450,\t\t0,\t\t0.0023614958448753462,\t\t0.08477191683710039,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.64\t\t],\n\t\t[173,\t\t450,\t\t0,\t\t0.002862361495844876,\t\t0.10275176694050518,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.533\t\t],\n\t\t[184,\t\t450,\t\t0,\t\t0.004022853185595568,\t\t0.14441057621844403,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.236\t\t],\n\t\t[144,\t\t451,\t\t0,\t\t0.007672727272727273,\t\t0.020292624515794402,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.605\t\t],\n\t\t[140,\t\t451,\t\t0,\t\t0.006991074380165291,\t\t0.018489807120219602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.574000000000002\t\t],\n\t\t[514,\t\t451,\t\t0,\t\t0.01149289256198347,\t\t0.030396095817207994,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.383\t\t],\n\t\t[537,\t\t585,\t\t0,\t\t0.05072595041322314,\t\t0.134158641165824,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.723\t\t],\n\t\t[141,\t\t585,\t\t0,\t\t0.007994710743801653,\t\t0.0211441978151932,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.092\t\t],\n\t\t[584,\t\t585,\t\t0,\t\t9.256198347107438e-05,\t\t0.000244805465938352,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.14\t\t],\n\t\t[522,\t\t454,\t\t0,\t\t0.0035008264462809916,\t\t0.0092588924438956,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.295\t\t],\n\t\t[144,\t\t454,\t\t0,\t\t0.00452892561983471,\t\t0.011977981726290799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.85\t\t],\n\t\t[453,\t\t454,\t\t0,\t\t0.001114710743801653,\t\t0.0029481572540882,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.686\t\t],\n\t\t[199,\t\t456,\t\t0,\t\t0.013063140495867768,\t\t0.0086372614214612,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.879\t\t],\n\t\t[140,\t\t456,\t\t0,\t\t0.005061818181818182,\t\t0.013387361765852802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t7.656000000000001\t\t],\n\t\t[455,\t\t456,\t\t0,\t\t0.0011365289256198346,\t\t0.00300586139962416,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.719\t\t],\n\t\t[537,\t\t456,\t\t0,\t\t0.039058512396694216,\t\t0.025825228046024003,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.538\t\t],\n\t\t[538,\t\t457,\t\t0,\t\t0.027927272727272728,\t\t0.0184653265736368,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.12\t\t],\n\t\t[153,\t\t457,\t\t0,\t\t0.030093223140495867,\t\t0.019897438549384,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.758000000000003\t\t],\n\t\t[176,\t\t457,\t\t0,\t\t0.004579173553719009,\t\t0.0030277190305137603,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.463\t\t],\n\t\t[524,\t\t459,\t\t0,\t\t0.004318677685950414,\t\t0.011421923596476799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.532\t\t],\n\t\t[458,\t\t459,\t\t0,\t\t0.001993388429752066,\t\t0.0052720605700488,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.015\t\t],\n\t\t[134,\t\t459,\t\t0,\t\t0.011813553719008265,\t\t0.031244171895617998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.868\t\t],\n\t\t[460,\t\t461,\t\t0,\t\t6.611570247933885e-05,\t\t0.000174861047098892,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.1\t\t],\n\t\t[150,\t\t461,\t\t0,\t\t0.008018512396694214,\t\t0.021207147792120403,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.128\t\t],\n\t\t[149,\t\t461,\t\t0,\t\t0.005586115702479339,\t\t0.0147740098693748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.449\t\t],\n\t\t[521,\t\t463,\t\t0,\t\t0.014348429752066114,\t\t0.009487086110365599,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.850999999999999\t\t],\n\t\t[462,\t\t463,\t\t0,\t\t0.007197355371900825,\t\t0.0047588433967958406,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.443\t\t],\n\t\t[538,\t\t463,\t\t0,\t\t0.012211570247933883,\t\t0.0080742088497664,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.235\t\t],\n\t\t[110,\t\t464,\t\t0,\t\t0.0025753116343490306,\t\t0.0924473799817492,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.875\t\t],\n\t\t[90,\t\t464,\t\t0,\t\t0.007328947368421053,\t\t0.26309125979076,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.332\t\t],\n\t\t[165,\t\t464,\t\t0,\t\t0.002152527700831025,\t\t0.0772704722900764,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.433\t\t],\n\t\t[458,\t\t465,\t\t0,\t\t0.002003305785123967,\t\t0.0052982897270776,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.03\t\t],\n\t\t[134,\t\t465,\t\t0,\t\t0.011838677685950413,\t\t0.031310619093534,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.906\t\t],\n\t\t[524,\t\t465,\t\t0,\t\t0.004293553719008264,\t\t0.0113554763986092,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.494\t\t],\n\t\t[466,\t\t467,\t\t0,\t\t0.0023509349030470914,\t\t0.084392804892244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.579\t\t],\n\t\t[110,\t\t467,\t\t0,\t\t0.0025337603878116343,\t\t0.09095579200221118,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.635\t\t],\n\t\t[165,\t\t467,\t\t0,\t\t0.0022891274238227145,\t\t0.08217406777274441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.222000000000001\t\t],\n\t\t[468,\t\t469,\t\t0,\t\t0.0005269421487603305,\t\t0.0013936425453786,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.797\t\t],\n\t\t[541,\t\t469,\t\t0,\t\t0.022390743801652895,\t\t0.05921844221026801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.866\t\t],\n\t\t[490,\t\t469,\t\t0,\t\t0.028243305785123966,\t\t0.07469714209944801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.718\t\t],\n\t\t[263,\t\t471,\t\t0,\t\t0.0371900826446281,\t\t0.0245898347482832,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.125\t\t],\n\t\t[470,\t\t471,\t\t0,\t\t0.001570909090909091,\t\t0.0010386746197682802,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.188\t\t],\n\t\t[534,\t\t471,\t\t0,\t\t0.024497190082644622,\t\t0.0161973787927468,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.526\t\t],\n\t\t[136,\t\t472,\t\t0,\t\t0.0007079293628808865,\t\t0.025412930201351602,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.0889999999999995\t\t],\n\t\t[110,\t\t472,\t\t0,\t\t0.00019511772853185596,\t\t0.0070042485539216805,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.127\t\t],\n\t\t[251,\t\t472,\t\t0,\t\t4.207063711911357e-05,\t\t0.00151023282928764,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.243\t\t],\n\t\t[226,\t\t474,\t\t0,\t\t0.017639669421487602,\t\t0.011663231841509601,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.34\t\t],\n\t\t[473,\t\t474,\t\t0,\t\t0.003467107438016529,\t\t0.00916971330986216,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t5.244\t\t],\n\t\t[257,\t\t474,\t\t0,\t\t0.020264462809917356,\t\t0.053594910935781594,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.65\t\t],\n\t\t[6,\t\t474,\t\t0,\t\t0.08066247933884299,\t\t0.05333349367016,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.001000000000005\t\t],\n\t\t[299,\t\t475,\t\t0,\t\t0.013238227146814403,\t\t0.47521993028123993,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.464\t\t],\n\t\t[3,\t\t475,\t\t0,\t\t0.0002794321329639889,\t\t0.010030929162389441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.614\t\t],\n\t\t[210,\t\t475,\t\t0,\t\t0.0001481994459833795,\t\t0.00531999712702368,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.856\t\t],\n\t\t[297,\t\t476,\t\t0,\t\t0.0193500826446281,\t\t0.05117658265464801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.267\t\t],\n\t\t[296,\t\t476,\t\t0,\t\t0.005596694214876033,\t\t0.014801987636898,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.465\t\t],\n\t\t[295,\t\t476,\t\t0,\t\t0.0009474380165289256,\t\t0.00250575880492432,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.433\t\t],\n\t\t[313,\t\t478,\t\t0,\t\t0.008696849030470914,\t\t0.31219557906752804,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.233000000000004\t\t],\n\t\t[477,\t\t478,\t\t0,\t\t1.5235457063711912e-05,\t\t0.0005469155924977479,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.08800000000000001\t\t],\n\t\t[245,\t\t478,\t\t0,\t\t0.005264542936288089,\t\t0.188984197007248,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.408\t\t],\n\t\t[479,\t\t481,\t\t0,\t\t0.028420495867768597,\t\t0.07516576970575199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.986000000000004\t\t],\n\t\t[565,\t\t481,\t\t0,\t\t0.024842314049586776,\t\t0.065702289836964,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.574\t\t],\n\t\t[480,\t\t481,\t\t0,\t\t7.735537190082645e-05,\t\t0.000204587425105844,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.11699999999999999\t\t],\n\t\t[415,\t\t482,\t\t0,\t\t0.011021814404432133,\t\t0.0989140353680364,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.831\t\t],\n\t\t[56,\t\t482,\t\t0,\t\t0.002630886426592798,\t\t0.0236105947261788,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.598\t\t],\n\t\t[409,\t\t482,\t\t0,\t\t0.0007635041551246537,\t\t0.0068519822810072005,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.205\t\t],\n\t\t[483,\t\t484,\t\t0,\t\t9.037396121883656e-05,\t\t0.000811050963873968,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.261\t\t],\n\t\t[3,\t\t484,\t\t0,\t\t0.010022160664819944,\t\t0.08994275516621358,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.944000000000003\t\t],\n\t\t[301,\t\t484,\t\t0,\t\t0.00966516620498615,\t\t0.08673894848517479,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.913\t\t],\n\t\t[233,\t\t485,\t\t0,\t\t0.01410180055401662,\t\t0.1265550251138996,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.726\t\t],\n\t\t[392,\t\t485,\t\t0,\t\t0.00914819944598338,\t\t0.0820994883738036,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.42\t\t],\n\t\t[391,\t\t485,\t\t0,\t\t8.518005540166207e-05,\t\t0.000764438839512864,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.24600000000000002\t\t],\n\t\t[579,\t\t488,\t\t0,\t\t0.004636473829194215,\t\t0.11036180126571601,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.038\t\t],\n\t\t[486,\t\t488,\t\t0,\t\t0.00016969696969690082,\t\t0.00403929018798184,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.77\t\t],\n\t\t[487,\t\t488,\t\t0,\t\t0.00014567493112954544,\t\t0.00346749456396992,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.6609999999999999\t\t],\n\t\t[270,\t\t489,\t\t0,\t\t0.0001745152354570637,\t\t0.0062646695140596,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.008\t\t],\n\t\t[331,\t\t489,\t\t0,\t\t0.003002943213296399,\t\t0.10779830627119119,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.345\t\t],\n\t\t[396,\t\t489,\t\t0,\t\t0.01124792243767313,\t\t0.40377286606072005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.968\t\t],\n\t\t[519,\t\t253,\t\t0,\t\t0.013353485337561985,\t\t0.141267767926912,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.394293146100004\t\t],\n\t\t[382,\t\t349,\t\t0,\t\t0.009091647380263157,\t\t1.30547149138788,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t105.02671053600001\t\t],\n\t\t[349,\t\t351,\t\t0,\t\t0.0005858117819605263,\t\t0.0841168325920224,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.76729770521\t\t],\n\t\t[459,\t\t465,\t\t0,\t\t1.578788789911157e-05,\t\t0.00016702153987596,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.047758360894800005\t\t],\n\t\t[549,\t\t550,\t\t0,\t\t3.680432518409091e-05,\t\t0.000389356391787088,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.111333083682\t\t],\n\t\t[550,\t\t551,\t\t0,\t\t5.755645674710744e-05,\t\t0.0006088951287918401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.17410828165999997\t\t],\n\t\t[194,\t\t195,\t\t0,\t\t1.7560672583171745e-05,\t\t0.00252154053805592,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.202860889681\t\t],\n\t\t[247,\t\t248,\t\t0,\t\t2.1755213937811637e-05,\t\t0.0031238355819477198,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.25131623141\t\t],\n\t\t[2,\t\t294,\t\t0,\t\t2.3531392658518004e-05,\t\t0.003378877444715,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.271834647991\t\t],\n\t\t[549,\t\t551,\t\t0,\t\t9.265809538429751e-05,\t\t0.0009802386406577602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.28029073853799996\t\t],\n\t\t[54,\t\t365,\t\t0,\t\t2.573045189134349e-05,\t\t0.00369464080598484,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.297238180249\t\t],\n\t\t[131,\t\t265,\t\t0,\t\t2.7616389041343487e-05,\t\t0.00396544290388756,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.319024526206\t\t],\n\t\t[91,\t\t92,\t\t0,\t\t2.8945628197853184e-05,\t\t0.0041563086239824396,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.33437989694200004\t\t],\n\t\t[247,\t\t249,\t\t0,\t\t3.098840072160664e-05,\t\t0.00444963074500788,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.357978005136\t\t],\n\t\t[186,\t\t191,\t\t0,\t\t3.1591661821191135e-05,\t\t0.00453625312865552,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.36494687735799997\t\t],\n\t\t[129,\t\t173,\t\t0,\t\t3.202671277479225e-05,\t\t0.00459872218332188,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.369972585975\t\t],\n\t\t[96,\t\t202,\t\t0,\t\t3.5971247867797784e-05,\t\t0.00516511877739804,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.415539855369\t\t],\n\t\t[53,\t\t320,\t\t0,\t\t3.784209581142659e-05,\t\t0.00543375421308236,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.437151890814\t\t],\n\t\t[24,\t\t396,\t\t0,\t\t4.144748602818559e-05,\t\t0.005951452925597279,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.47880135859800005\t\t],\n\t\t[133,\t\t156,\t\t0,\t\t4.431754564044322e-05,\t\t0.0063635653674415605,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.511956287238\t\t],\n\t\t[442,\t\t452,\t\t0,\t\t4.483572190450138e-05,\t\t0.006437970402313801,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.517942259441\t\t],\n\t\t[445,\t\t452,\t\t0,\t\t4.490753296371191e-05,\t\t0.0064482817668697215,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.518771820797\t\t],\n\t\t[247,\t\t250,\t\t0,\t\t4.594910768732687e-05,\t\t0.00659784169268824,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.530804092004\t\t],\n\t\t[187,\t\t195,\t\t0,\t\t4.755760376239612e-05,\t\t0.006828805970367921,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.549385438663\t\t],\n\t\t[216,\t\t236,\t\t0,\t\t5.03353075283241e-05,\t\t0.00722765701751724,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.581473472567\t\t],\n\t\t[244,\t\t389,\t\t0,\t\t5.1633313019736845e-05,\t\t0.007414037889302401,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.596468032004\t\t],\n\t\t[394,\t\t406,\t\t0,\t\t5.6346419007686985e-05,\t\t0.008090793734075721,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.650913832377\t\t],\n\t\t[442,\t\t445,\t\t0,\t\t6.388070648310249e-05,\t\t0.00917264360085512,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.737949921293\t\t],\n\t\t[442,\t\t444,\t\t0,\t\t6.584378362735456e-05,\t\t0.00945452224616264,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.760627388463\t\t],\n\t\t[198,\t\t472,\t\t0,\t\t8.37554210498615e-05,\t\t0.0120264578966664,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.967542623967\t\t],\n\t\t[464,\t\t467,\t\t0,\t\t8.460287496468144e-05,\t\t0.01214814397621276,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.977332411594\t\t],\n\t\t[198,\t\t251,\t\t0,\t\t8.83613182396122e-05,\t\t0.012687819608389479,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.0207499483\t\t],\n\t\t[112,\t\t143,\t\t0,\t\t9.049653833033241e-05,\t\t0.012994416294241841,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.04541601079\t\t],\n\t\t[2,\t\t490,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[5,\t\t491,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[10,\t\t492,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[12,\t\t493,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[13,\t\t494,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[15,\t\t495,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[18,\t\t496,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[20,\t\t497,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[22,\t\t498,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[24,\t\t499,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[26,\t\t500,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[30,\t\t501,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[32,\t\t502,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[37,\t\t503,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[42,\t\t504,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[46,\t\t505,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[52,\t\t506,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[56,\t\t507,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[61,\t\t508,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[68,\t\t509,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[69,\t\t510,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[74,\t\t511,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[78,\t\t512,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[86,\t\t513,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[87,\t\t514,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[94,\t\t515,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[95,\t\t516,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[96,\t\t517,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[99,\t\t518,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[100,\t\t519,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[104,\t\t520,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[105,\t\t521,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[106,\t\t522,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[107,\t\t523,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[117,\t\t524,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[120,\t\t525,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[123,\t\t526,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[124,\t\t527,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[125,\t\t528,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[128,\t\t529,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[129,\t\t530,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[138,\t\t531,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[143,\t\t532,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[156,\t\t533,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[157,\t\t534,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[159,\t\t535,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[160,\t\t536,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[165,\t\t537,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[184,\t\t538,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[191,\t\t539,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[195,\t\t540,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[201,\t\t541,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[220,\t\t542,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[231,\t\t543,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[232,\t\t544,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[233,\t\t545,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[236,\t\t546,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[245,\t\t547,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[246,\t\t548,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[248,\t\t549,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[249,\t\t550,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[250,\t\t551,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[259,\t\t552,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[261,\t\t553,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[262,\t\t554,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[265,\t\t555,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[270,\t\t556,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[277,\t\t557,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[279,\t\t558,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[280,\t\t559,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[290,\t\t560,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[301,\t\t561,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[305,\t\t562,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[306,\t\t563,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[310,\t\t564,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[313,\t\t565,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[315,\t\t566,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[320,\t\t567,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[330,\t\t568,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[332,\t\t569,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[334,\t\t570,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[336,\t\t571,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[349,\t\t572,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[351,\t\t573,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[358,\t\t574,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[360,\t\t575,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[380,\t\t576,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[382,\t\t577,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[383,\t\t578,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[389,\t\t579,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[401,\t\t580,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[402,\t\t581,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[409,\t\t582,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[415,\t\t583,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[444,\t\t584,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[452,\t\t585,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t]\n\t])\n\tppc[\"gen_control\"] = array([\n\t\t[586,\t\t1,\t\t0.08658028904199107,\t\t4.329014452099554,\t\t0, 0, 0],\n\t\t[589,\t\t1,\t\t0.010042676909098597,\t\t0.5021338454549299,\t\t0, 0, 0],\n\t\t[590,\t\t1,\t\t0.012095775674984046,\t\t0.6047887837492023,\t\t0, 0, 0],\n\t\t[593,\t\t1,\t\t0.0017666198683200384,\t\t0.08833099341600192,\t\t0, 0, 0],\n\t\t[594,\t\t1,\t\t0.006047887837492023,\t\t0.30239439187460115,\t\t0, 0, 0],\n\t\t[595,\t\t1,\t\t1.50560576164933,\t\t75.2802880824665,\t\t0, 0, 0],\n\t\t[598,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[599,\t\t1,\t\t0.0029602819415092537,\t\t0.1480140970754627,\t\t0, 0, 0],\n\t\t[601,\t\t1,\t\t0.019576058000303126,\t\t0.9788029000151565,\t\t0, 0, 0],\n\t\t[602,\t\t1,\t\t0.007830423200121252,\t\t0.39152116000606263,\t\t0, 0, 0],\n\t\t[603,\t\t1,\t\t1.0997606567649967,\t\t54.98803283824984,\t\t0, 0, 0],\n\t\t[607,\t\t1,\t\t0.5729577951308232,\t\t28.64788975654116,\t\t0, 0, 0],\n\t\t[608,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[609,\t\t1,\t\t0.0057932399285449895,\t\t0.2896619964272495,\t\t0, 0, 0],\n\t\t[612,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[613,\t\t1,\t\t0.027056340325622208,\t\t1.3528170162811104,\t\t0, 0, 0],\n\t\t[614,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[616,\t\t1,\t\t0.0046154933496649645,\t\t0.23077466748324824,\t\t0, 0, 0],\n\t\t[617,\t\t1,\t\t0.04360845440717932,\t\t2.1804227203589663,\t\t0, 0, 0],\n\t\t[618,\t\t1,\t\t0.010631550198538607,\t\t0.5315775099269304,\t\t0, 0, 0],\n\t\t[619,\t\t1,\t\t0.037560566569687294,\t\t1.8780283284843649,\t\t0, 0, 0],\n\t\t[621,\t\t1,\t\t0.24350706293059987,\t\t12.175353146529993,\t\t0, 0, 0],\n\t\t[624,\t\t1,\t\t0.004297183463481174,\t\t0.21485917317405873,\t\t0, 0, 0],\n\t\t[629,\t\t1,\t\t0.023968734429639437,\t\t1.198436721481972,\t\t0, 0, 0],\n\t\t[632,\t\t1,\t\t0.01435577586688896,\t\t0.717788793344448,\t\t0, 0, 0],\n\t\t[637,\t\t1,\t\t0.017093240888069558,\t\t0.854662044403478,\t\t0, 0, 0],\n\t\t[638,\t\t1,\t\t0.02048324117592693,\t\t1.0241620587963465,\t\t0, 0, 0],\n\t\t[640,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[641,\t\t1,\t\t0.0040107045659157625,\t\t0.20053522829578813,\t\t0, 0, 0],\n\t\t[642,\t\t1,\t\t0.00919915571071155,\t\t0.4599577855355775,\t\t0, 0, 0],\n\t\t[643,\t\t1,\t\t0.27279157245950864,\t\t13.639578622975431,\t\t0, 0, 0],\n\t\t[647,\t\t1,\t\t0.00445633840657307,\t\t0.2228169203286535,\t\t0, 0, 0],\n\t\t[650,\t\t1,\t\t0.4216014442504307,\t\t21.080072212521536,\t\t0, 0, 0],\n\t\t[652,\t\t1,\t\t0.00746436683100989,\t\t0.37321834155049455,\t\t0, 0, 0],\n\t\t[655,\t\t1,\t\t0.019576058000303126,\t\t0.9788029000151565,\t\t0, 0, 0],\n\t\t[661,\t\t1,\t\t0.010408733278209955,\t\t0.5204366639104978,\t\t0, 0, 0],\n\t\t[663,\t\t1,\t\t0.00238732414637843,\t\t0.1193662073189215,\t\t0, 0, 0],\n\t\t[666,\t\t1,\t\t0.00919915571071155,\t\t0.4599577855355775,\t\t0, 0, 0],\n\t\t[668,\t\t1,\t\t0.24382537281678363,\t\t12.191268640839182,\t\t0, 0, 0],\n\t\t[670,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[672,\t\t1,\t\t0.010536057232683471,\t\t0.5268028616341736,\t\t0, 0, 0],\n\t\t[676,\t\t1,\t\t0.11777465788800255,\t\t5.888732894400127,\t\t0, 0, 0],\n\t\t[681,\t\t1,\t\t0.0063821132179850025,\t\t0.31910566089925013,\t\t0, 0, 0],\n\t\t[683,\t\t1,\t\t0.008753521870054244,\t\t0.4376760935027122,\t\t0, 0, 0],\n\t\t[687,\t\t1,\t\t0.42303383873825773,\t\t21.151691936912886,\t\t0, 0, 0],\n\t\t[691,\t\t1,\t\t0.008276057040778557,\t\t0.4138028520389279,\t\t0, 0, 0],\n\t\t[693,\t\t1,\t\t0.06175211791965539,\t\t3.0876058959827692,\t\t0, 0, 0],\n\t\t[694,\t\t1,\t\t0.005220282133414166,\t\t0.2610141066707083,\t\t0, 0, 0],\n\t\t[695,\t\t1,\t\t0.004679155326901723,\t\t0.23395776634508614,\t\t0, 0, 0],\n\t\t[696,\t\t1,\t\t0.22950142793851305,\t\t11.475071396925653,\t\t0, 0, 0],\n\t\t[697,\t\t1,\t\t0.0036923946797319715,\t\t0.1846197339865986,\t\t0, 0, 0],\n\t\t[698,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[702,\t\t1,\t\t0.023363945645890238,\t\t1.168197282294512,\t\t0, 0, 0],\n\t\t[704,\t\t1,\t\t0.16170142218136566,\t\t8.085071109068283,\t\t0, 0, 0],\n\t\t[705,\t\t1,\t\t0.005411268065124442,\t\t0.27056340325622213,\t\t0, 0, 0],\n\t\t[707,\t\t1,\t\t0.010822536130248884,\t\t0.5411268065124443,\t\t0, 0, 0],\n\t\t[713,\t\t1,\t\t0.004265352474862795,\t\t0.21326762374313976,\t\t0, 0, 0],\n\t\t[714,\t\t1,\t\t0.00477464829275686,\t\t0.238732414637843,\t\t0, 0, 0],\n\t\t[716,\t\t1,\t\t1.5915494309189534e-05,\t\t0.0007957747154594768,\t\t0, 0, 0],\n\t\t[717,\t\t1,\t\t0.0017507043740108488,\t\t0.08753521870054244,\t\t0, 0, 0],\n\t\t[719,\t\t1,\t\t0.623250757147862,\t\t31.162537857393104,\t\t0, 0, 0],\n\t\t[722,\t\t1,\t\t0.006589014644004467,\t\t0.3294507322002233,\t\t0, 0, 0],\n\t\t[724,\t\t1,\t\t0.0019257748114119334,\t\t0.09628874057059668,\t\t0, 0, 0],\n\t\t[727,\t\t1,\t\t0.019576058000303126,\t\t0.9788029000151565,\t\t0, 0, 0],\n\t\t[728,\t\t1,\t\t0.16233804195373325,\t\t8.116902097686662,\t\t0, 0, 0],\n\t\t[730,\t\t1,\t\t0.10077690996578814,\t\t5.038845498289407,\t\t0, 0, 0],\n\t\t[732,\t\t1,\t\t0.004647324338283344,\t\t0.2323662169141672,\t\t0, 0, 0],\n\t\t[735,\t\t1,\t\t0.013496339174192726,\t\t0.6748169587096363,\t\t0, 0, 0],\n\t\t[738,\t\t1,\t\t0.04408591923645501,\t\t2.2042959618227504,\t\t0, 0, 0],\n\t\t[741,\t\t1,\t\t0.0340591578216656,\t\t1.7029578910832803,\t\t0, 0, 0],\n\t\t[742,\t\t1,\t\t0.0028647889756541157,\t\t0.14323944878270578,\t\t0, 0, 0],\n\t\t[743,\t\t1,\t\t0.44881693951914486,\t\t22.440846975957243,\t\t0, 0, 0],\n\t\t[746,\t\t1,\t\t0.03183098861837907,\t\t1.5915494309189535,\t\t0, 0, 0],\n\t\t[747,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[748,\t\t1,\t\t0.03501408748021698,\t\t1.7507043740108488,\t\t0, 0, 0],\n\t\t[749,\t\t1,\t\t0.0025464790894703256,\t\t0.12732395447351627,\t\t0, 0, 0],\n\t\t[750,\t\t1,\t\t0.028902537665488188,\t\t1.4451268832744095,\t\t0, 0, 0],\n\t\t[753,\t\t1,\t\t0.049624511256052974,\t\t2.4812255628026487,\t\t0, 0, 0],\n\t\t[758,\t\t1,\t\t0.0058887328944001276,\t\t0.2944366447200064,\t\t0, 0, 0],\n\t\t[760,\t\t1,\t\t0.2527380496299298,\t\t12.636902481496492,\t\t0, 0, 0],\n\t\t[762,\t\t1,\t\t0.3517324242330887,\t\t17.586621211654435,\t\t0, 0, 0],\n\t\t[763,\t\t1,\t\t0.006461690689530951,\t\t0.32308453447654756,\t\t0, 0, 0],\n\t\t[765,\t\t1,\t\t0.018780283284843647,\t\t0.9390141642421824,\t\t0, 0, 0],\n\t\t[767,\t\t1,\t\t0.0035650707252584553,\t\t0.17825353626292276,\t\t0, 0, 0],\n\t\t[769,\t\t1,\t\t0.013782818071758136,\t\t0.6891409035879068,\t\t0, 0, 0],\n\t\t[771,\t\t1,\t\t0.21963382146681557,\t\t10.981691073340778,\t\t0, 0, 0],\n\t\t[772,\t\t1,\t\t0.002992112930127632,\t\t0.1496056465063816,\t\t0, 0, 0],\n\t\t[774,\t\t1,\t\t0.010663381187156987,\t\t0.5331690593578494,\t\t0, 0, 0],\n\t\t[777,\t\t1,\t\t0.012573240504259732,\t\t0.6286620252129866,\t\t0, 0, 0],\n\t\t[778,\t\t1,\t\t0.004679155326901723,\t\t0.23395776634508614,\t\t0, 0, 0],\n\t\t[781,\t\t1,\t\t0.4169859509007658,\t\t20.84929754503829,\t\t0, 0, 0],\n\t\t[784,\t\t1,\t\t0.4058451048843331,\t\t20.292255244216655,\t\t0, 0, 0],\n\t\t[785,\t\t1,\t\t0.00047746482927568597,\t\t0.0238732414637843,\t\t0, 0, 0],\n\t\t[787,\t\t1,\t\t0.24764509145098912,\t\t12.382254572549456,\t\t0, 0, 0],\n\t\t[788,\t\t1,\t\t0.2785211504108168,\t\t13.926057520540843,\t\t0, 0, 0],\n\t\t[789,\t\t1,\t\t0.0123185925953127,\t\t0.615929629765635,\t\t0, 0, 0],\n\t\t[791,\t\t1,\t\t0.0031830988618379067,\t\t0.15915494309189535,\t\t0, 0, 0],\n\t\t[792,\t\t1,\t\t0.009979014931861837,\t\t0.49895074659309185,\t\t0, 0, 0],\n\t\t[795,\t\t1,\t\t0.004329014452099553,\t\t0.2164507226049777,\t\t0, 0, 0],\n\t\t[800,\t\t1,\t\t0.0058091554228541795,\t\t0.290457771142709,\t\t0, 0, 0],\n\t\t[801,\t\t1,\t\t0.007957747154594767,\t\t0.3978873577297384,\t\t0, 0, 0],\n\t\t[802,\t\t1,\t\t0.07957747154594767,\t\t3.9788735772973833,\t\t0, 0, 0],\n\t\t[805,\t\t1,\t\t0.44881693951914486,\t\t22.440846975957243,\t\t0, 0, 0],\n\t\t[806,\t\t1,\t\t0.005697746962689853,\t\t0.2848873481344927,\t\t0, 0, 0],\n\t\t[808,\t\t1,\t\t0.034616200122487235,\t\t1.7308100061243619,\t\t0, 0, 0],\n\t\t[809,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[811,\t\t1,\t\t0.0040107045659157625,\t\t0.20053522829578813,\t\t0, 0, 0],\n\t\t[814,\t\t1,\t\t0.014164789935178685,\t\t0.7082394967589343,\t\t0, 0, 0],\n\t\t[816,\t\t1,\t\t0.012748310941660816,\t\t0.6374155470830408,\t\t0, 0, 0],\n\t\t[817,\t\t1,\t\t0.017188733853924696,\t\t0.8594366926962349,\t\t0, 0, 0],\n\t\t[821,\t\t1,\t\t0.013130282805081364,\t\t0.6565141402540683,\t\t0, 0, 0],\n\t\t[822,\t\t1,\t\t0.04265352474862795,\t\t2.1326762374313977,\t\t0, 0, 0],\n\t\t[826,\t\t1,\t\t0.018461973398659858,\t\t0.9230986699329929,\t\t0, 0, 0],\n\t\t[830,\t\t1,\t\t0.02832957987035737,\t\t1.4164789935178685,\t\t0, 0, 0],\n\t\t[834,\t\t1,\t\t0.007416620348082323,\t\t0.37083101740411617,\t\t0, 0, 0],\n\t\t[835,\t\t1,\t\t0.010138169874953733,\t\t0.5069084937476867,\t\t0, 0, 0],\n\t\t[836,\t\t1,\t\t0.008116902097686661,\t\t0.4058451048843331,\t\t0, 0, 0],\n\t\t[837,\t\t1,\t\t0.15024226627874918,\t\t7.512113313937459,\t\t0, 0, 0],\n\t\t[839,\t\t1,\t\t0.011666057328635928,\t\t0.5833028664317964,\t\t0, 0, 0],\n\t\t[841,\t\t1,\t\t0.0037083101740411615,\t\t0.18541550870205808,\t\t0, 0, 0],\n\t\t[843,\t\t1,\t\t0.10599719209920229,\t\t5.2998596049601145,\t\t0, 0, 0],\n\t\t[844,\t\t1,\t\t0.012732395447351627,\t\t0.6366197723675814,\t\t0, 0, 0],\n\t\t[845,\t\t1,\t\t0.10122254380644544,\t\t5.061127190322272,\t\t0, 0, 0],\n\t\t[849,\t\t1,\t\t0.24796340133717296,\t\t12.398170066858649,\t\t0, 0, 0],\n\t\t[850,\t\t1,\t\t0.005092958178940651,\t\t0.25464790894703254,\t\t0, 0, 0],\n\t\t[851,\t\t1,\t\t0.01265281797580568,\t\t0.632640898790284,\t\t0, 0, 0],\n\t\t[853,\t\t1,\t\t0.0036923946797319715,\t\t0.1846197339865986,\t\t0, 0, 0],\n\t\t[855,\t\t1,\t\t0.21899720169444797,\t\t10.949860084722399,\t\t0, 0, 0],\n\t\t[856,\t\t1,\t\t0.011459155902616463,\t\t0.5729577951308231,\t\t0, 0, 0],\n\t\t[857,\t\t1,\t\t0.4462704604296745,\t\t22.313523021483725,\t\t0, 0, 0],\n\t\t[858,\t\t1,\t\t0.01808000153523931,\t\t0.9040000767619655,\t\t0, 0, 0],\n\t\t[860,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[862,\t\t1,\t\t0.23077466748324824,\t\t11.538733374162412,\t\t0, 0, 0],\n\t\t[863,\t\t1,\t\t0.0001909859317102744,\t\t0.00954929658551372,\t\t0, 0, 0],\n\t\t[864,\t\t1,\t\t0.2785211504108168,\t\t13.926057520540843,\t\t0, 0, 0],\n\t\t[865,\t\t1,\t\t0.0035014087480216977,\t\t0.17507043740108488,\t\t0, 0, 0],\n\t\t[867,\t\t1,\t\t0.24478030247533505,\t\t12.239015123766753,\t\t0, 0, 0],\n\t\t[869,\t\t1,\t\t0.4329014452099553,\t\t21.645072260497766,\t\t0, 0, 0],\n\t\t[870,\t\t1,\t\t0.018589297353133374,\t\t0.9294648676566688,\t\t0, 0, 0],\n\t\t[872,\t\t1,\t\t0.00716197243913529,\t\t0.3580986219567645,\t\t0, 0, 0],\n\t\t[874,\t\t1,\t\t0.006589014644004467,\t\t0.3294507322002233,\t\t0, 0, 0],\n\t\t[875,\t\t1,\t\t0.007766761222884492,\t\t0.38833806114422464,\t\t0, 0, 0],\n\t\t[877,\t\t1,\t\t0.007894085177358009,\t\t0.39470425886790045,\t\t0, 0, 0],\n\t\t[882,\t\t1,\t\t0.005538592019597957,\t\t0.2769296009798979,\t\t0, 0, 0],\n\t\t[883,\t\t1,\t\t0.005729577951308231,\t\t0.28647889756541156,\t\t0, 0, 0],\n\t\t[885,\t\t1,\t\t0.15597184423005742,\t\t7.798592211502871,\t\t0, 0, 0],\n\t\t[886,\t\t1,\t\t0.8186930272647096,\t\t40.93465136323548,\t\t0, 0, 0],\n\t\t[889,\t\t1,\t\t0.0030239439187460114,\t\t0.15119719593730058,\t\t0, 0, 0],\n\t\t[890,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[893,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[894,\t\t1,\t\t0.025146481008519465,\t\t1.2573240504259733,\t\t0, 0, 0],\n\t\t[895,\t\t1,\t\t0.0030239439187460114,\t\t0.15119719593730058,\t\t0, 0, 0],\n\t\t[896,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[898,\t\t1,\t\t0.013464508185574344,\t\t0.6732254092787172,\t\t0, 0, 0],\n\t\t[900,\t\t1,\t\t0.03584169318429482,\t\t1.7920846592147412,\t\t0, 0, 0],\n\t\t[902,\t\t1,\t\t0.006207042780583919,\t\t0.31035213902919595,\t\t0, 0, 0],\n\t\t[903,\t\t1,\t\t0.0031990143561470966,\t\t0.15995071780735484,\t\t0, 0, 0],\n\t\t[905,\t\t1,\t\t0.021851973686517232,\t\t1.0925986843258617,\t\t0, 0, 0],\n\t\t[906,\t\t1,\t\t0.010504226244065093,\t\t0.5252113122032547,\t\t0, 0, 0],\n\t\t[907,\t\t1,\t\t0.02142225534016911,\t\t1.0711127670084555,\t\t0, 0, 0],\n\t\t[909,\t\t1,\t\t0.005856901905781748,\t\t0.2928450952890874,\t\t0, 0, 0],\n\t\t[913,\t\t1,\t\t0.02355493157760051,\t\t1.1777465788800257,\t\t0, 0, 0],\n\t\t[915,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[917,\t\t1,\t\t0.005411268065124442,\t\t0.27056340325622213,\t\t0, 0, 0],\n\t\t[918,\t\t1,\t\t0.012254930618075942,\t\t0.612746530903797,\t\t0, 0, 0],\n\t\t[920,\t\t1,\t\t0.0020371832715762603,\t\t0.10185916357881303,\t\t0, 0, 0],\n\t\t[921,\t\t1,\t\t0.019735212943395024,\t\t0.9867606471697512,\t\t0, 0, 0],\n\t\t[922,\t\t1,\t\t0.05220282133414166,\t\t2.6101410667070835,\t\t0, 0, 0],\n\t\t[923,\t\t1,\t\t0.023236621691416718,\t\t1.161831084570836,\t\t0, 0, 0],\n\t\t[925,\t\t1,\t\t0.008276057040778557,\t\t0.4138028520389279,\t\t0, 0, 0],\n\t\t[928,\t\t1,\t\t0.019576058000303126,\t\t0.9788029000151565,\t\t0, 0, 0],\n\t\t[931,\t\t1,\t\t0.03455253814525047,\t\t1.7276269072625237,\t\t0, 0, 0],\n\t\t[935,\t\t1,\t\t0.007352958370845565,\t\t0.36764791854227824,\t\t0, 0, 0],\n\t\t[936,\t\t1,\t\t0.016615776058793875,\t\t0.8307888029396938,\t\t0, 0, 0],\n\t\t[937,\t\t1,\t\t0.00477464829275686,\t\t0.238732414637843,\t\t0, 0, 0],\n\t\t[939,\t\t1,\t\t1.5915494309189534e-05,\t\t0.0007957747154594768,\t\t0, 0, 0],\n\t\t[940,\t\t1,\t\t0.009421972631040205,\t\t0.47109863155201026,\t\t0, 0, 0],\n\t\t[942,\t\t1,\t\t0.016520283092938737,\t\t0.8260141546469368,\t\t0, 0, 0],\n\t\t[944,\t\t1,\t\t0.004042535554534142,\t\t0.2021267777267071,\t\t0, 0, 0],\n\t\t[945,\t\t1,\t\t0.011140846016432674,\t\t0.5570423008216338,\t\t0, 0, 0],\n\t\t[950,\t\t1,\t\t0.005092958178940651,\t\t0.25464790894703254,\t\t0, 0, 0],\n\t\t[952,\t\t1,\t\t0.005045211696013082,\t\t0.2522605848006541,\t\t0, 0, 0],\n\t\t[958,\t\t1,\t\t0.010615634704229418,\t\t0.530781735211471,\t\t0, 0, 0],\n\t\t[959,\t\t1,\t\t0.007241549910681238,\t\t0.3620774955340619,\t\t0, 0, 0],\n\t\t[960,\t\t1,\t\t0.004217605991935227,\t\t0.21088029959676136,\t\t0, 0, 0],\n\t\t[963,\t\t1,\t\t0.2785211504108168,\t\t13.926057520540843,\t\t0, 0, 0],\n\t\t[965,\t\t1,\t\t0.11204507993669433,\t\t5.602253996834716,\t\t0, 0, 0],\n\t\t[966,\t\t1,\t\t0.021008452488130186,\t\t1.0504226244065094,\t\t0, 0, 0],\n\t\t[967,\t\t1,\t\t0.01193662073189215,\t\t0.5968310365946076,\t\t0, 0, 0],\n\t\t[968,\t\t1,\t\t0.017188733853924696,\t\t0.8594366926962349,\t\t0, 0, 0],\n\t\t[969,\t\t1,\t\t0.018111832523857688,\t\t0.9055916261928845,\t\t0, 0, 0],\n\t\t[971,\t\t1,\t\t0.0031830988618379067,\t\t0.15915494309189535,\t\t0, 0, 0],\n\t\t[973,\t\t1,\t\t0.4287634166895661,\t\t21.438170834478306,\t\t0, 0, 0],\n\t\t[976,\t\t1,\t\t0.008562535938343968,\t\t0.4281267969171984,\t\t0, 0, 0],\n\t\t[978,\t\t1,\t\t0.0007321127382227185,\t\t0.03660563691113593,\t\t0, 0, 0],\n\t\t[980,\t\t1,\t\t0.11140846016432673,\t\t5.570423008216337,\t\t0, 0, 0],\n\t\t[981,\t\t1,\t\t0.03787887645587108,\t\t1.8939438227935543,\t\t0, 0, 0],\n\t\t[982,\t\t1,\t\t0.0015756339366097638,\t\t0.07878169683048819,\t\t0, 0, 0],\n\t\t[983,\t\t1,\t\t0.01400563499208679,\t\t0.7002817496043395,\t\t0, 0, 0],\n\t\t[984,\t\t1,\t\t0.14801409707546268,\t\t7.400704853773133,\t\t0, 0, 0],\n\t\t[985,\t\t1,\t\t0.0035014087480216977,\t\t0.17507043740108488,\t\t0, 0, 0],\n\t\t[986,\t\t1,\t\t0.0017825353626292277,\t\t0.08912676813146138,\t\t0, 0, 0],\n\t\t[987,\t\t1,\t\t0.02618098813861678,\t\t1.3090494069308392,\t\t0, 0, 0],\n\t\t[988,\t\t1,\t\t0.0008116902097686662,\t\t0.04058451048843331,\t\t0, 0, 0],\n\t\t[993,\t\t1,\t\t0.06238873769202297,\t\t3.119436884601149,\t\t0, 0, 0],\n\t\t[994,\t\t1,\t\t0.010504226244065093,\t\t0.5252113122032547,\t\t0, 0, 0],\n\t\t[995,\t\t1,\t\t0.0006684507609859605,\t\t0.033422538049298026,\t\t0, 0, 0],\n\t\t[997,\t\t1,\t\t0.005984225860255264,\t\t0.2992112930127632,\t\t0, 0, 0],\n\t\t[999,\t\t1,\t\t0.004965634224467135,\t\t0.24828171122335674,\t\t0, 0, 0],\n\t\t[1000,\t\t1,\t\t0.015597184423005743,\t\t0.7798592211502873,\t\t0, 0, 0],\n\t\t[1002,\t\t1,\t\t0.0031512678732195276,\t\t0.15756339366097638,\t\t0, 0, 0],\n\t\t[1003,\t\t1,\t\t0.2864788975654116,\t\t14.32394487827058,\t\t0, 0, 0],\n\t\t[1007,\t\t1,\t\t0.007416620348082323,\t\t0.37083101740411617,\t\t0, 0, 0],\n\t\t[1008,\t\t1,\t\t0.015597184423005743,\t\t0.7798592211502873,\t\t0, 0, 0],\n\t\t[1010,\t\t1,\t\t0.238732414637843,\t\t11.93662073189215,\t\t0, 0, 0],\n\t\t[1011,\t\t1,\t\t0.005952394871636886,\t\t0.2976197435818443,\t\t0, 0, 0],\n\t\t[1012,\t\t1,\t\t0.9024085273310466,\t\t45.12042636655233,\t\t0, 0, 0],\n\t\t[1014,\t\t1,\t\t0.238732414637843,\t\t11.93662073189215,\t\t0, 0, 0],\n\t\t[1026,\t\t1,\t\t0.20868396138209316,\t\t10.434198069104658,\t\t0, 0, 0],\n\t\t[1027,\t\t3,\t\t0.003074873500535418,\t\t0.15374367502677092,\t\t2.22, 61.69, 0.004502],\n\t\t[1028,\t\t2,\t\t0.025464790894703257,\t\t1.273239544735163,\t\t0, 0, 0],\n\t\t[1029,\t\t2,\t\t0.003819718634205488,\t\t0.19098593171027442,\t\t0, 0, 0],\n\t\t[1030,\t\t2,\t\t0.06480789282701978,\t\t3.2403946413509894,\t\t0, 0, 0],\n\t\t[1031,\t\t2,\t\t0.0921316134570364,\t\t4.60658067285182,\t\t0, 0, 0],\n\t\t[1032,\t\t2,\t\t0.009772775025341927,\t\t0.4886387512670964,\t\t0, 0, 0],\n\t\t[1033,\t\t2,\t\t0.0026886138418885097,\t\t0.13443069209442549,\t\t0, 0, 0],\n\t\t[1034,\t\t2,\t\t0.005364335122251813,\t\t0.26821675611259066,\t\t0, 0, 0],\n\t\t[1035,\t\t3,\t\t0.00317587127473044,\t\t0.158793563736522,\t\t2.22, 61.69, 0.004502],\n\t\t[1036,\t\t2,\t\t0.0042795539826391196,\t\t0.21397769913195597,\t\t0, 0, 0],\n\t\t[1037,\t\t2,\t\t0.004524499526473651,\t\t0.22622497632368255,\t\t0, 0, 0],\n\t\t[1038,\t\t2,\t\t0.0042656867561187625,\t\t0.21328433780593814,\t\t0, 0, 0],\n\t\t[1039,\t\t2,\t\t0.006324822334701528,\t\t0.31624111673507643,\t\t0, 0, 0],\n\t\t[1040,\t\t3,\t\t1.642625163724774e-06,\t\t8.21312581862387e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1041,\t\t2,\t\t0.012998987840239671,\t\t0.6499493920119837,\t\t0, 0, 0],\n\t\t[1042,\t\t2,\t\t0.0023020643537383497,\t\t0.11510321768691748,\t\t0, 0, 0],\n\t\t[1043,\t\t3,\t\t0.0002005198670519635,\t\t0.010025993352598175,\t\t2.22, 61.69, 0.004502],\n\t\t[1044,\t\t3,\t\t0.001524916938962078,\t\t0.07624584694810391,\t\t2.22, 61.69, 0.004502],\n\t\t[1045,\t\t2,\t\t0.002797147096485702,\t\t0.13985735482428513,\t\t0, 0, 0],\n\t\t[1046,\t\t2,\t\t0.00679827557108513,\t\t0.33991377855425653,\t\t0, 0, 0],\n\t\t[1047,\t\t3,\t\t0.0005748138926305899,\t\t0.028740694631529493,\t\t2.22, 61.69, 0.004502],\n\t\t[1048,\t\t2,\t\t0.0034040932598907643,\t\t0.17020466299453824,\t\t0, 0, 0],\n\t\t[1049,\t\t2,\t\t0.01870104799381521,\t\t0.9350523996907605,\t\t0, 0, 0],\n\t\t[1050,\t\t2,\t\t0.002092718690099598,\t\t0.10463593450497992,\t\t0, 0, 0],\n\t\t[1051,\t\t2,\t\t0.015188131185502987,\t\t0.7594065592751494,\t\t0, 0, 0],\n\t\t[1052,\t\t3,\t\t0.001315809692296204,\t\t0.06579048461481019,\t\t2.22, 61.69, 0.004502],\n\t\t[1053,\t\t3,\t\t0.001042024786453249,\t\t0.05210123932266245,\t\t2.22, 61.69, 0.004502],\n\t\t[1054,\t\t2,\t\t0.017434200209443074,\t\t0.8717100104721537,\t\t0, 0, 0],\n\t\t[1055,\t\t3,\t\t0.00013247523555879063,\t\t0.006623761777939533,\t\t2.22, 61.69, 0.004502],\n\t\t[1056,\t\t2,\t\t0.03567360587548865,\t\t1.7836802937744327,\t\t0, 0, 0],\n\t\t[1057,\t\t2,\t\t0.02718238967557453,\t\t1.3591194837787268,\t\t0, 0, 0],\n\t\t[1058,\t\t2,\t\t0.06721018861714274,\t\t3.3605094308571375,\t\t0, 0, 0],\n\t\t[1059,\t\t2,\t\t0.02641152929543176,\t\t1.320576464771588,\t\t0, 0, 0],\n\t\t[1060,\t\t3,\t\t0.00042205319527346903,\t\t0.021102659763673454,\t\t2.22, 61.69, 0.004502],\n\t\t[1061,\t\t2,\t\t0.008125532257389345,\t\t0.4062766128694673,\t\t0, 0, 0],\n\t\t[1062,\t\t3,\t\t0.00012113022465261961,\t\t0.006056511232630981,\t\t2.22, 61.69, 0.004502],\n\t\t[1063,\t\t3,\t\t0.0003619812277182999,\t\t0.018099061385914994,\t\t2.22, 61.69, 0.004502],\n\t\t[1064,\t\t2,\t\t0.013355424896304362,\t\t0.667771244815218,\t\t0, 0, 0],\n\t\t[1065,\t\t2,\t\t0.021608252882636087,\t\t1.0804126441318045,\t\t0, 0, 0],\n\t\t[1066,\t\t2,\t\t0.005525902356903777,\t\t0.2762951178451888,\t\t0, 0, 0],\n\t\t[1067,\t\t3,\t\t0.002078788013715776,\t\t0.1039394006857888,\t\t2.22, 61.69, 0.004502],\n\t\t[1068,\t\t3,\t\t0.00024305375093185358,\t\t0.01215268754659268,\t\t2.22, 61.69, 0.004502],\n\t\t[1069,\t\t3,\t\t0.00013249575266864374,\t\t0.006624787633432187,\t\t2.22, 61.69, 0.004502],\n\t\t[1070,\t\t3,\t\t5.020379247175116e-05,\t\t0.0025101896235875582,\t\t2.22, 61.69, 0.004502],\n\t\t[1071,\t\t3,\t\t0.0002755733400308117,\t\t0.013778667001540588,\t\t2.22, 61.69, 0.004502],\n\t\t[1072,\t\t2,\t\t0.007168748144119091,\t\t0.3584374072059546,\t\t0, 0, 0],\n\t\t[1073,\t\t2,\t\t0.004954025493475761,\t\t0.24770127467378808,\t\t0, 0, 0],\n\t\t[1074,\t\t2,\t\t0.009778033156939965,\t\t0.48890165784699824,\t\t0, 0, 0],\n\t\t[1075,\t\t3,\t\t0.0010048055180333312,\t\t0.05024027590166657,\t\t2.22, 61.69, 0.004502],\n\t\t[1076,\t\t3,\t\t6.734938509653708e-05,\t\t0.0033674692548268537,\t\t2.22, 61.69, 0.004502],\n\t\t[1077,\t\t3,\t\t0.0015330876185133755,\t\t0.07665438092566877,\t\t2.22, 61.69, 0.004502],\n\t\t[1078,\t\t3,\t\t0.001363221476402357,\t\t0.06816107382011785,\t\t2.22, 61.69, 0.004502],\n\t\t[1079,\t\t2,\t\t0.004604543003215469,\t\t0.23022715016077344,\t\t0, 0, 0],\n\t\t[1080,\t\t2,\t\t0.004013645444866649,\t\t0.20068227224333246,\t\t0, 0, 0],\n\t\t[1081,\t\t2,\t\t0.025823979083824652,\t\t1.2911989541912325,\t\t0, 0, 0],\n\t\t[1082,\t\t2,\t\t0.03247105626963941,\t\t1.623552813481971,\t\t0, 0, 0],\n\t\t[1083,\t\t2,\t\t0.04034141649573272,\t\t2.017070824786636,\t\t0, 0, 0],\n\t\t[1084,\t\t2,\t\t0.0383703068502718,\t\t1.9185153425135901,\t\t0, 0, 0],\n\t\t[1085,\t\t2,\t\t0.006456533632213395,\t\t0.32282668161066974,\t\t0, 0, 0],\n\t\t[1086,\t\t2,\t\t0.011093633530907244,\t\t0.5546816765453622,\t\t0, 0, 0],\n\t\t[1087,\t\t2,\t\t0.007427186304799236,\t\t0.3713593152399618,\t\t0, 0, 0],\n\t\t[1088,\t\t3,\t\t0.0023416461987310717,\t\t0.11708230993655358,\t\t2.22, 61.69, 0.004502],\n\t\t[1089,\t\t2,\t\t0.017837421191397636,\t\t0.8918710595698819,\t\t0, 0, 0],\n\t\t[1090,\t\t2,\t\t0.005674885746854652,\t\t0.2837442873427326,\t\t0, 0, 0],\n\t\t[1091,\t\t3,\t\t0.0024928004386840707,\t\t0.12464002193420355,\t\t2.22, 61.69, 0.004502],\n\t\t[1092,\t\t2,\t\t0.003437876146252996,\t\t0.1718938073126498,\t\t0, 0, 0],\n\t\t[1093,\t\t2,\t\t0.009906140914748767,\t\t0.49530704573743833,\t\t0, 0, 0],\n\t\t[1094,\t\t3,\t\t0.00020093923646134198,\t\t0.010046961823067099,\t\t2.22, 61.69, 0.004502],\n\t\t[1095,\t\t3,\t\t1.0725377703200369e-05,\t\t0.0005362688851600184,\t\t2.22, 61.69, 0.004502],\n\t\t[1096,\t\t2,\t\t0.005379826679377905,\t\t0.2689913339688953,\t\t0, 0, 0],\n\t\t[1097,\t\t3,\t\t0.0002929164939619051,\t\t0.014645824698095257,\t\t2.22, 61.69, 0.004502],\n\t\t[1098,\t\t2,\t\t0.004521623727146264,\t\t0.22608118635731317,\t\t0, 0, 0],\n\t\t[1099,\t\t2,\t\t0.018521637260932335,\t\t0.9260818630466169,\t\t0, 0, 0],\n\t\t[1100,\t\t3,\t\t8.529929871602723e-07,\t\t4.264964935801361e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1101,\t\t2,\t\t0.005343192104787693,\t\t0.2671596052393847,\t\t0, 0, 0],\n\t\t[1102,\t\t2,\t\t0.02234407998394998,\t\t1.1172039991974991,\t\t0, 0, 0],\n\t\t[1103,\t\t2,\t\t0.01562148424141561,\t\t0.7810742120707805,\t\t0, 0, 0],\n\t\t[1104,\t\t3,\t\t9.609451484971278e-06,\t\t0.0004804725742485639,\t\t2.22, 61.69, 0.004502],\n\t\t[1105,\t\t3,\t\t0.0001341187886099651,\t\t0.006705939430498256,\t\t2.22, 61.69, 0.004502],\n\t\t[1106,\t\t3,\t\t0.00011737928795785747,\t\t0.005868964397892874,\t\t2.22, 61.69, 0.004502],\n\t\t[1107,\t\t2,\t\t0.003960643983487226,\t\t0.1980321991743613,\t\t0, 0, 0],\n\t\t[1108,\t\t2,\t\t0.015696019774557212,\t\t0.7848009887278606,\t\t0, 0, 0],\n\t\t[1109,\t\t3,\t\t4.3046194964034426e-05,\t\t0.0021523097482017213,\t\t2.22, 61.69, 0.004502],\n\t\t[1110,\t\t3,\t\t9.222263762834483e-05,\t\t0.004611131881417241,\t\t2.22, 61.69, 0.004502],\n\t\t[1111,\t\t2,\t\t0.0038836683306443073,\t\t0.19418341653221535,\t\t0, 0, 0],\n\t\t[1112,\t\t2,\t\t0.003686241832202627,\t\t0.18431209161013137,\t\t0, 0, 0],\n\t\t[1113,\t\t3,\t\t0.00018039271233800431,\t\t0.009019635616900216,\t\t2.22, 61.69, 0.004502],\n\t\t[1114,\t\t3,\t\t0.0005841112818161505,\t\t0.029205564090807525,\t\t2.22, 61.69, 0.004502],\n\t\t[1115,\t\t2,\t\t0.0031213800626208083,\t\t0.1560690031310404,\t\t0, 0, 0],\n\t\t[1116,\t\t3,\t\t0.0020647571760916248,\t\t0.10323785880458125,\t\t2.22, 61.69, 0.004502],\n\t\t[1117,\t\t2,\t\t0.005780032679669937,\t\t0.2890016339834969,\t\t0, 0, 0],\n\t\t[1118,\t\t3,\t\t0.0003829884282146529,\t\t0.019149421410732646,\t\t2.22, 61.69, 0.004502],\n\t\t[1119,\t\t3,\t\t0.0027392301210454623,\t\t0.13696150605227314,\t\t2.22, 61.69, 0.004502],\n\t\t[1120,\t\t3,\t\t0.00010890304385064859,\t\t0.00544515219253243,\t\t2.22, 61.69, 0.004502],\n\t\t[1121,\t\t3,\t\t2.2279825336184193e-05,\t\t0.0011139912668092099,\t\t2.22, 61.69, 0.004502],\n\t\t[1122,\t\t3,\t\t6.58029481382571e-05,\t\t0.003290147406912855,\t\t2.22, 61.69, 0.004502],\n\t\t[1123,\t\t3,\t\t5.507563730350578e-05,\t\t0.0027537818651752893,\t\t2.22, 61.69, 0.004502],\n\t\t[1124,\t\t3,\t\t5.703974639776745e-05,\t\t0.002851987319888373,\t\t2.22, 61.69, 0.004502],\n\t\t[1125,\t\t3,\t\t0.0014874093896213212,\t\t0.07437046948106606,\t\t2.22, 61.69, 0.004502],\n\t\t[1126,\t\t3,\t\t0.001707899185311381,\t\t0.08539495926556905,\t\t2.22, 61.69, 0.004502],\n\t\t[1127,\t\t2,\t\t0.006703391093283916,\t\t0.3351695546641958,\t\t0, 0, 0],\n\t\t[1128,\t\t3,\t\t0.0001760935965728766,\t\t0.00880467982864383,\t\t2.22, 61.69, 0.004502],\n\t\t[1129,\t\t3,\t\t0.00026671558322571837,\t\t0.013335779161285921,\t\t2.22, 61.69, 0.004502],\n\t\t[1130,\t\t3,\t\t5.558270950083616e-05,\t\t0.002779135475041808,\t\t2.22, 61.69, 0.004502],\n\t\t[1131,\t\t3,\t\t0.0001628999979074131,\t\t0.008144999895370654,\t\t2.22, 61.69, 0.004502],\n\t\t[1132,\t\t3,\t\t1.932956599857466e-05,\t\t0.000966478299928733,\t\t2.22, 61.69, 0.004502],\n\t\t[1133,\t\t3,\t\t2.965744430275523e-05,\t\t0.0014828722151377617,\t\t2.22, 61.69, 0.004502],\n\t\t[1134,\t\t3,\t\t2.095539286099976e-05,\t\t0.0010477696430499879,\t\t2.22, 61.69, 0.004502],\n\t\t[1135,\t\t3,\t\t0.00038390897898262074,\t\t0.019195448949131038,\t\t2.22, 61.69, 0.004502],\n\t\t[1136,\t\t3,\t\t1.668891864605698e-05,\t\t0.000834445932302849,\t\t2.22, 61.69, 0.004502],\n\t\t[1137,\t\t3,\t\t0.00013471992391375442,\t\t0.0067359961956877205,\t\t2.22, 61.69, 0.004502],\n\t\t[1138,\t\t3,\t\t4.9658060884423396e-05,\t\t0.00248290304422117,\t\t2.22, 61.69, 0.004502],\n\t\t[1139,\t\t3,\t\t0.0010444496139746617,\t\t0.05222248069873308,\t\t2.22, 61.69, 0.004502],\n\t\t[1140,\t\t3,\t\t0.001342618347923983,\t\t0.06713091739619916,\t\t2.22, 61.69, 0.004502],\n\t\t[1141,\t\t2,\t\t0.0076014291266728595,\t\t0.380071456333643,\t\t0, 0, 0],\n\t\t[1142,\t\t3,\t\t4.955587600041013e-05,\t\t0.0024777938000205067,\t\t2.22, 61.69, 0.004502],\n\t\t[1143,\t\t3,\t\t0.0013488323364046454,\t\t0.06744161682023227,\t\t2.22, 61.69, 0.004502],\n\t\t[1144,\t\t2,\t\t0.0032080843499480067,\t\t0.16040421749740036,\t\t0, 0, 0],\n\t\t[1145,\t\t2,\t\t0.011197481443497569,\t\t0.5598740721748785,\t\t0, 0, 0],\n\t\t[1146,\t\t3,\t\t3.549829503375007e-05,\t\t0.0017749147516875033,\t\t2.22, 61.69, 0.004502],\n\t\t[1147,\t\t3,\t\t0.0025767162106334813,\t\t0.1288358105316741,\t\t2.22, 61.69, 0.004502],\n\t\t[1148,\t\t3,\t\t0.0010813434656413302,\t\t0.05406717328206651,\t\t2.22, 61.69, 0.004502],\n\t\t[1149,\t\t3,\t\t0.00047763819819904285,\t\t0.023881909909952145,\t\t2.22, 61.69, 0.004502],\n\t\t[1150,\t\t3,\t\t0.00018774837246059782,\t\t0.009387418623029891,\t\t2.22, 61.69, 0.004502],\n\t\t[1151,\t\t3,\t\t0.0008299047575760064,\t\t0.04149523787880033,\t\t2.22, 61.69, 0.004502],\n\t\t[1152,\t\t3,\t\t6.755719486160141e-06,\t\t0.00033778597430800705,\t\t2.22, 61.69, 0.004502],\n\t\t[1153,\t\t3,\t\t2.9245811828050778e-06,\t\t0.0001462290591402539,\t\t2.22, 61.69, 0.004502],\n\t\t[1154,\t\t3,\t\t6.829055436076071e-06,\t\t0.00034145277180380355,\t\t2.22, 61.69, 0.004502],\n\t\t[1155,\t\t3,\t\t3.830106722506135e-05,\t\t0.001915053361253068,\t\t2.22, 61.69, 0.004502],\n\t\t[1156,\t\t3,\t\t0.0008491615356901596,\t\t0.04245807678450799,\t\t2.22, 61.69, 0.004502],\n\t\t[1157,\t\t3,\t\t0.00027719360593007886,\t\t0.013859680296503944,\t\t2.22, 61.69, 0.004502],\n\t\t[1158,\t\t3,\t\t5.550217090300335e-05,\t\t0.0027751085451501677,\t\t2.22, 61.69, 0.004502],\n\t\t[1159,\t\t3,\t\t0.0007178548763228544,\t\t0.03589274381614272,\t\t2.22, 61.69, 0.004502],\n\t\t[1160,\t\t2,\t\t0.015175599618213626,\t\t0.7587799809106813,\t\t0, 0, 0],\n\t\t[1161,\t\t3,\t\t0.0007919108914493125,\t\t0.039595544572465634,\t\t2.22, 61.69, 0.004502],\n\t\t[1162,\t\t2,\t\t0.029029937775722364,\t\t1.4514968887861184,\t\t0, 0, 0],\n\t\t[1163,\t\t2,\t\t0.01740478197643592,\t\t0.8702390988217961,\t\t0, 0, 0],\n\t\t[1164,\t\t2,\t\t0.018183478445661972,\t\t0.9091739222830987,\t\t0, 0, 0],\n\t\t[1165,\t\t2,\t\t0.003640738012495192,\t\t0.18203690062475963,\t\t0, 0, 0],\n\t\t[1166,\t\t2,\t\t0.004859098854708689,\t\t0.24295494273543444,\t\t0, 0, 0],\n\t\t[1167,\t\t3,\t\t0.00027017862243310195,\t\t0.0135089311216551,\t\t2.22, 61.69, 0.004502],\n\t\t[1168,\t\t3,\t\t7.897701420997655e-05,\t\t0.003948850710498828,\t\t2.22, 61.69, 0.004502],\n\t\t[1169,\t\t3,\t\t0.00017327803824915608,\t\t0.008663901912457804,\t\t2.22, 61.69, 0.004502],\n\t\t[1170,\t\t3,\t\t1.3995245605921992e-05,\t\t0.0006997622802960997,\t\t2.22, 61.69, 0.004502],\n\t\t[1171,\t\t3,\t\t0.00033063847599797597,\t\t0.0165319237998988,\t\t2.22, 61.69, 0.004502],\n\t\t[1172,\t\t3,\t\t0.00010271797955751998,\t\t0.005135898977875999,\t\t2.22, 61.69, 0.004502],\n\t\t[1173,\t\t2,\t\t0.01618626952698487,\t\t0.8093134763492436,\t\t0, 0, 0],\n\t\t[1174,\t\t3,\t\t6.730444883101241e-05,\t\t0.003365222441550621,\t\t2.22, 61.69, 0.004502],\n\t\t[1175,\t\t3,\t\t5.42817523578762e-05,\t\t0.0027140876178938103,\t\t2.22, 61.69, 0.004502],\n\t\t[1176,\t\t3,\t\t1.4493722016340768e-05,\t\t0.0007246861008170384,\t\t2.22, 61.69, 0.004502],\n\t\t[1177,\t\t3,\t\t0.0017745146198091144,\t\t0.08872573099045572,\t\t2.22, 61.69, 0.004502],\n\t\t[1178,\t\t3,\t\t0.00018330858993049318,\t\t0.00916542949652466,\t\t2.22, 61.69, 0.004502],\n\t\t[1179,\t\t3,\t\t7.196952903861004e-05,\t\t0.003598476451930502,\t\t2.22, 61.69, 0.004502],\n\t\t[1180,\t\t3,\t\t3.6388115456734435e-05,\t\t0.0018194057728367218,\t\t2.22, 61.69, 0.004502],\n\t\t[1181,\t\t2,\t\t0.00545834972439398,\t\t0.272917486219699,\t\t0, 0, 0],\n\t\t[1182,\t\t2,\t\t0.006322880792722177,\t\t0.3161440396361089,\t\t0, 0, 0],\n\t\t[1183,\t\t3,\t\t0.0016828941055075822,\t\t0.08414470527537911,\t\t2.22, 61.69, 0.004502],\n\t\t[1184,\t\t3,\t\t0.00020172506278658237,\t\t0.01008625313932912,\t\t2.22, 61.69, 0.004502],\n\t\t[1185,\t\t3,\t\t0.0005998240037057172,\t\t0.02999120018528586,\t\t2.22, 61.69, 0.004502],\n\t\t[1186,\t\t3,\t\t0.0024768908443694128,\t\t0.12384454221847063,\t\t2.22, 61.69, 0.004502],\n\t\t[1187,\t\t3,\t\t0.0005667496013552781,\t\t0.028337480067763907,\t\t2.22, 61.69, 0.004502],\n\t\t[1188,\t\t2,\t\t0.011440868435801076,\t\t0.5720434217900537,\t\t0, 0, 0],\n\t\t[1189,\t\t3,\t\t0.0011060795774720767,\t\t0.05530397887360384,\t\t2.22, 61.69, 0.004502],\n\t\t[1190,\t\t2,\t\t0.01316014839314795,\t\t0.6580074196573976,\t\t0, 0, 0],\n\t\t[1191,\t\t2,\t\t0.002993148092080588,\t\t0.14965740460402943,\t\t0, 0, 0],\n\t\t[1192,\t\t3,\t\t0.0006867937541558582,\t\t0.034339687707792915,\t\t2.22, 61.69, 0.004502],\n\t\t[1193,\t\t3,\t\t7.67662696614282e-05,\t\t0.0038383134830714096,\t\t2.22, 61.69, 0.004502],\n\t\t[1194,\t\t3,\t\t0.00028474895382463937,\t\t0.014237447691231968,\t\t2.22, 61.69, 0.004502],\n\t\t[1195,\t\t3,\t\t1.0306118869095803e-05,\t\t0.0005153059434547901,\t\t2.22, 61.69, 0.004502],\n\t\t[1196,\t\t2,\t\t0.010230349597894291,\t\t0.5115174798947145,\t\t0, 0, 0],\n\t\t[1197,\t\t2,\t\t0.005767282789943071,\t\t0.2883641394971536,\t\t0, 0, 0],\n\t\t[1198,\t\t3,\t\t0.002534966273924786,\t\t0.12674831369623932,\t\t2.22, 61.69, 0.004502],\n\t\t[1199,\t\t2,\t\t0.012822920004466005,\t\t0.6411460002233003,\t\t0, 0, 0],\n\t\t[1200,\t\t2,\t\t0.0035658606694853635,\t\t0.1782930334742682,\t\t0, 0, 0],\n\t\t[1201,\t\t3,\t\t0.0014784632729127976,\t\t0.07392316364563987,\t\t2.22, 61.69, 0.004502],\n\t\t[1202,\t\t3,\t\t0.00167850866605698,\t\t0.083925433302849,\t\t2.22, 61.69, 0.004502],\n\t\t[1203,\t\t2,\t\t0.007778224036848344,\t\t0.3889112018424172,\t\t0, 0, 0],\n\t\t[1204,\t\t3,\t\t0.002806588352324693,\t\t0.14032941761623466,\t\t2.22, 61.69, 0.004502],\n\t\t[1205,\t\t3,\t\t1.7451903682756416e-05,\t\t0.0008725951841378209,\t\t2.22, 61.69, 0.004502],\n\t\t[1206,\t\t3,\t\t0.00012220947489172384,\t\t0.006110473744586192,\t\t2.22, 61.69, 0.004502],\n\t\t[1207,\t\t3,\t\t0.00011436645991064715,\t\t0.005718322995532358,\t\t2.22, 61.69, 0.004502],\n\t\t[1208,\t\t3,\t\t9.79215031892657e-05,\t\t0.004896075159463285,\t\t2.22, 61.69, 0.004502],\n\t\t[1209,\t\t3,\t\t3.223308065310387e-05,\t\t0.0016116540326551938,\t\t2.22, 61.69, 0.004502],\n\t\t[1210,\t\t3,\t\t0.00023016567369473433,\t\t0.011508283684736718,\t\t2.22, 61.69, 0.004502],\n\t\t[1211,\t\t3,\t\t0.0008035834968181287,\t\t0.04017917484090643,\t\t2.22, 61.69, 0.004502],\n\t\t[1212,\t\t2,\t\t0.00369975166590346,\t\t0.184987583295173,\t\t0, 0, 0],\n\t\t[1213,\t\t2,\t\t0.0024282547766004544,\t\t0.12141273883002274,\t\t0, 0, 0],\n\t\t[1214,\t\t3,\t\t0.00014321393885031734,\t\t0.007160696942515866,\t\t2.22, 61.69, 0.004502],\n\t\t[1215,\t\t3,\t\t7.187493523458701e-05,\t\t0.00359374676172935,\t\t2.22, 61.69, 0.004502],\n\t\t[1216,\t\t2,\t\t0.0020579380438785558,\t\t0.10289690219392779,\t\t0, 0, 0],\n\t\t[1217,\t\t3,\t\t0.0011873704870251842,\t\t0.05936852435125921,\t\t2.22, 61.69, 0.004502],\n\t\t[1218,\t\t3,\t\t3.205613742816887e-05,\t\t0.0016028068714084433,\t\t2.22, 61.69, 0.004502],\n\t\t[1219,\t\t3,\t\t0.0007266973306283009,\t\t0.03633486653141505,\t\t2.22, 61.69, 0.004502],\n\t\t[1220,\t\t3,\t\t0.0015678186811454082,\t\t0.0783909340572704,\t\t2.22, 61.69, 0.004502],\n\t\t[1221,\t\t2,\t\t0.017128159708612707,\t\t0.8564079854306355,\t\t0, 0, 0],\n\t\t[1222,\t\t2,\t\t0.008603475789761674,\t\t0.4301737894880837,\t\t0, 0, 0],\n\t\t[1223,\t\t3,\t\t0.00021956207757345072,\t\t0.010978103878672537,\t\t2.22, 61.69, 0.004502],\n\t\t[1224,\t\t2,\t\t0.004876806689329913,\t\t0.24384033446649564,\t\t0, 0, 0],\n\t\t[1225,\t\t3,\t\t0.0011950225335098977,\t\t0.059751126675494884,\t\t2.22, 61.69, 0.004502],\n\t\t[1226,\t\t3,\t\t0.00015756951140940593,\t\t0.007878475570470297,\t\t2.22, 61.69, 0.004502],\n\t\t[1227,\t\t3,\t\t0.0011129900410750567,\t\t0.05564950205375283,\t\t2.22, 61.69, 0.004502],\n\t\t[1228,\t\t3,\t\t0.00015853360781082734,\t\t0.007926680390541369,\t\t2.22, 61.69, 0.004502],\n\t\t[1229,\t\t2,\t\t0.00326230849376,\t\t0.16311542468800003,\t\t0, 0, 0],\n\t\t[1230,\t\t3,\t\t4.389169244950451e-05,\t\t0.0021945846224752253,\t\t2.22, 61.69, 0.004502],\n\t\t[1231,\t\t3,\t\t0.0010332308251516718,\t\t0.05166154125758359,\t\t2.22, 61.69, 0.004502],\n\t\t[1232,\t\t2,\t\t0.002433270163749548,\t\t0.12166350818747743,\t\t0, 0, 0],\n\t\t[1233,\t\t2,\t\t0.03662908231521014,\t\t1.831454115760507,\t\t0, 0, 0],\n\t\t[1235,\t\t3,\t\t0.0005753349157073776,\t\t0.028766745785368877,\t\t2.22, 61.69, 0.004502],\n\t\t[1236,\t\t2,\t\t0.005234608320670995,\t\t0.26173041603354974,\t\t0, 0, 0],\n\t\t[1237,\t\t3,\t\t0.0006286967074280747,\t\t0.031434835371403735,\t\t2.22, 61.69, 0.004502],\n\t\t[1238,\t\t2,\t\t0.005807245276323321,\t\t0.29036226381616603,\t\t0, 0, 0],\n\t\t[1239,\t\t3,\t\t0.0001443666373276477,\t\t0.007218331866382386,\t\t2.22, 61.69, 0.004502],\n\t\t[1240,\t\t2,\t\t0.021613910382114798,\t\t1.08069551910574,\t\t0, 0, 0],\n\t\t[1241,\t\t2,\t\t0.024532881090784327,\t\t1.2266440545392163,\t\t0, 0, 0],\n\t\t[1242,\t\t3,\t\t0.0009149585165727553,\t\t0.045747925828637764,\t\t2.22, 61.69, 0.004502],\n\t\t[1243,\t\t2,\t\t0.003220757508132175,\t\t0.16103787540660877,\t\t0, 0, 0],\n\t\t[1244,\t\t2,\t\t0.020592901244747865,\t\t1.0296450622373932,\t\t0, 0, 0],\n\t\t[1245,\t\t3,\t\t0.0003361455675790473,\t\t0.016807278378952364,\t\t2.22, 61.69, 0.004502],\n\t\t[1246,\t\t2,\t\t0.003636870278584459,\t\t0.18184351392922293,\t\t0, 0, 0],\n\t\t[1247,\t\t3,\t\t0.0013284261381938068,\t\t0.06642130690969034,\t\t2.22, 61.69, 0.004502],\n\t\t[1248,\t\t2,\t\t0.005854245631350222,\t\t0.2927122815675111,\t\t0, 0, 0],\n\t\t[1250,\t\t3,\t\t0.0019627317861894665,\t\t0.09813658930947333,\t\t2.22, 61.69, 0.004502],\n\t\t[1251,\t\t3,\t\t0.0014899668826355728,\t\t0.07449834413177864,\t\t2.22, 61.69, 0.004502],\n\t\t[1252,\t\t3,\t\t0.0009477821555247328,\t\t0.047389107776236644,\t\t2.22, 61.69, 0.004502],\n\t\t[1253,\t\t2,\t\t0.004106369053307717,\t\t0.20531845266538587,\t\t0, 0, 0],\n\t\t[1254,\t\t2,\t\t0.005238024431161238,\t\t0.2619012215580619,\t\t0, 0, 0],\n\t\t[1255,\t\t3,\t\t0.0002430881191708174,\t\t0.01215440595854087,\t\t2.22, 61.69, 0.004502],\n\t\t[1256,\t\t3,\t\t0.0009607764830526361,\t\t0.048038824152631804,\t\t2.22, 61.69, 0.004502],\n\t\t[1257,\t\t2,\t\t0.005662916214121937,\t\t0.28314581070609685,\t\t0, 0, 0],\n\t\t[1258,\t\t2,\t\t0.014991588973313675,\t\t0.7495794486656838,\t\t0, 0, 0],\n\t\t[1259,\t\t2,\t\t0.00695753592752513,\t\t0.34787679637625657,\t\t0, 0, 0],\n\t\t[1260,\t\t3,\t\t0.0008419062951145594,\t\t0.042095314755727975,\t\t2.22, 61.69, 0.004502],\n\t\t[1261,\t\t2,\t\t0.006710851033614852,\t\t0.3355425516807426,\t\t0, 0, 0],\n\t\t[1262,\t\t3,\t\t3.3365758929065435e-05,\t\t0.0016682879464532717,\t\t2.22, 61.69, 0.004502],\n\t\t[1263,\t\t3,\t\t2.243579925674327e-05,\t\t0.0011217899628371635,\t\t2.22, 61.69, 0.004502],\n\t\t[1264,\t\t2,\t\t0.0041882010526593134,\t\t0.2094100526329657,\t\t0, 0, 0],\n\t\t[1265,\t\t3,\t\t0.0004236530619172327,\t\t0.021182653095861634,\t\t2.22, 61.69, 0.004502],\n\t\t[1266,\t\t2,\t\t0.007621029313600565,\t\t0.38105146568002835,\t\t0, 0, 0],\n\t\t[1267,\t\t3,\t\t0.002512674942558201,\t\t0.12563374712791006,\t\t2.22, 61.69, 0.004502],\n\t\t[1268,\t\t3,\t\t9.924907648145817e-05,\t\t0.004962453824072908,\t\t2.22, 61.69, 0.004502],\n\t\t[1269,\t\t3,\t\t0.00015052959978416322,\t\t0.0075264799892081615,\t\t2.22, 61.69, 0.004502],\n\t\t[1270,\t\t3,\t\t0.0016168646957355755,\t\t0.08084323478677877,\t\t2.22, 61.69, 0.004502],\n\t\t[1271,\t\t3,\t\t0.0016669004057359642,\t\t0.08334502028679822,\t\t2.22, 61.69, 0.004502],\n\t\t[1272,\t\t3,\t\t4.822828877281981e-05,\t\t0.002411414438640991,\t\t2.22, 61.69, 0.004502],\n\t\t[1273,\t\t3,\t\t9.35244847042396e-05,\t\t0.004676224235211981,\t\t2.22, 61.69, 0.004502],\n\t\t[1274,\t\t2,\t\t0.0033801727100761705,\t\t0.1690086355038085,\t\t0, 0, 0],\n\t\t[1277,\t\t2,\t\t0.004176942042758357,\t\t0.20884710213791788,\t\t0, 0, 0],\n\t\t[1278,\t\t2,\t\t0.010850406134369231,\t\t0.5425203067184615,\t\t0, 0, 0],\n\t\t[1280,\t\t3,\t\t1.625711391745238e-05,\t\t0.000812855695872619,\t\t2.22, 61.69, 0.004502],\n\t\t[1281,\t\t3,\t\t7.61760179495719e-05,\t\t0.0038088008974785947,\t\t2.22, 61.69, 0.004502],\n\t\t[1282,\t\t3,\t\t0.00011692734374436803,\t\t0.005846367187218403,\t\t2.22, 61.69, 0.004502],\n\t\t[1283,\t\t2,\t\t0.08261824948992594,\t\t4.130912474496298,\t\t0, 0, 0],\n\t\t[1284,\t\t3,\t\t0.0010554637642632295,\t\t0.05277318821316148,\t\t2.22, 61.69, 0.004502],\n\t\t[1285,\t\t3,\t\t8.397999451438773e-05,\t\t0.004198999725719387,\t\t2.22, 61.69, 0.004502],\n\t\t[1286,\t\t3,\t\t0.0006754907034233446,\t\t0.03377453517116723,\t\t2.22, 61.69, 0.004502],\n\t\t[1287,\t\t2,\t\t0.005933272587501368,\t\t0.29666362937506835,\t\t0, 0, 0],\n\t\t[1288,\t\t2,\t\t0.00944760882155904,\t\t0.472380441077952,\t\t0, 0, 0],\n\t\t[1289,\t\t2,\t\t0.007722938973806887,\t\t0.3861469486903443,\t\t0, 0, 0],\n\t\t[1290,\t\t3,\t\t0.0002010121833246965,\t\t0.010050609166234827,\t\t2.22, 61.69, 0.004502],\n\t\t[1291,\t\t2,\t\t0.0062575490505418305,\t\t0.31287745252709154,\t\t0, 0, 0],\n\t\t[1292,\t\t3,\t\t0.002653563231501149,\t\t0.13267816157505744,\t\t2.22, 61.69, 0.004502],\n\t\t[1293,\t\t3,\t\t0.00011906041883585744,\t\t0.005953020941792872,\t\t2.22, 61.69, 0.004502],\n\t\t[1294,\t\t3,\t\t0.0002287967408331929,\t\t0.011439837041659646,\t\t2.22, 61.69, 0.004502],\n\t\t[1295,\t\t3,\t\t0.00024404267281427183,\t\t0.012202133640713592,\t\t2.22, 61.69, 0.004502],\n\t\t[1296,\t\t3,\t\t0.0010957781739894226,\t\t0.05478890869947114,\t\t2.22, 61.69, 0.004502],\n\t\t[1297,\t\t2,\t\t0.0076884313139195205,\t\t0.384421565695976,\t\t0, 0, 0],\n\t\t[1298,\t\t3,\t\t0.0001157881569167265,\t\t0.005789407845836326,\t\t2.22, 61.69, 0.004502],\n\t\t[1299,\t\t3,\t\t5.5321886333720313e-05,\t\t0.002766094316686016,\t\t2.22, 61.69, 0.004502],\n\t\t[1300,\t\t3,\t\t0.0011889941662562647,\t\t0.05944970831281324,\t\t2.22, 61.69, 0.004502],\n\t\t[1301,\t\t2,\t\t0.002982938494714554,\t\t0.14914692473572772,\t\t0, 0, 0],\n\t\t[1302,\t\t3,\t\t0.00021110791930017327,\t\t0.010555395965008665,\t\t2.22, 61.69, 0.004502],\n\t\t[1303,\t\t3,\t\t0.00018386470903461073,\t\t0.009193235451730536,\t\t2.22, 61.69, 0.004502],\n\t\t[1304,\t\t3,\t\t0.00048252963322777803,\t\t0.0241264816613889,\t\t2.22, 61.69, 0.004502],\n\t\t[1305,\t\t3,\t\t2.9075695387122924e-07,\t\t1.4537847693561463e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1306,\t\t3,\t\t9.709840402621832e-05,\t\t0.004854920201310916,\t\t2.22, 61.69, 0.004502],\n\t\t[1307,\t\t3,\t\t1.2315278774817185e-05,\t\t0.0006157639387408592,\t\t2.22, 61.69, 0.004502],\n\t\t[1308,\t\t3,\t\t0.00016368763436738158,\t\t0.008184381718369079,\t\t2.22, 61.69, 0.004502],\n\t\t[1309,\t\t3,\t\t0.00015923269014089355,\t\t0.00796163450704468,\t\t2.22, 61.69, 0.004502],\n\t\t[1310,\t\t3,\t\t7.824303350011481e-05,\t\t0.0039121516750057405,\t\t2.22, 61.69, 0.004502],\n\t\t[1311,\t\t3,\t\t0.0003528922081916685,\t\t0.017644610409583428,\t\t2.22, 61.69, 0.004502],\n\t\t[1312,\t\t2,\t\t0.016696303623916272,\t\t0.8348151811958137,\t\t0, 0, 0],\n\t\t[1313,\t\t3,\t\t0.0015394338506803666,\t\t0.07697169253401832,\t\t2.22, 61.69, 0.004502],\n\t\t[1314,\t\t3,\t\t0.0006222258741228121,\t\t0.03111129370614061,\t\t2.22, 61.69, 0.004502],\n\t\t[1315,\t\t3,\t\t0.00045451640542616554,\t\t0.022725820271308282,\t\t2.22, 61.69, 0.004502],\n\t\t[1316,\t\t3,\t\t9.691724577188445e-05,\t\t0.0048458622885942226,\t\t2.22, 61.69, 0.004502],\n\t\t[1317,\t\t3,\t\t0.0015252502049763412,\t\t0.07626251024881707,\t\t2.22, 61.69, 0.004502],\n\t\t[1318,\t\t3,\t\t9.597434008891487e-05,\t\t0.004798717004445744,\t\t2.22, 61.69, 0.004502],\n\t\t[1319,\t\t3,\t\t0.001127343871228203,\t\t0.05636719356141015,\t\t2.22, 61.69, 0.004502],\n\t\t[1320,\t\t3,\t\t0.000926033389869569,\t\t0.04630166949347845,\t\t2.22, 61.69, 0.004502],\n\t\t[1321,\t\t3,\t\t6.563650216760876e-06,\t\t0.00032818251083804385,\t\t2.22, 61.69, 0.004502],\n\t\t[1322,\t\t3,\t\t5.919056262068799e-05,\t\t0.0029595281310344,\t\t2.22, 61.69, 0.004502],\n\t\t[1323,\t\t2,\t\t0.012675857799799822,\t\t0.6337928899899912,\t\t0, 0, 0],\n\t\t[1324,\t\t3,\t\t0.0005933180260596797,\t\t0.029665901302983987,\t\t2.22, 61.69, 0.004502],\n\t\t[1325,\t\t2,\t\t0.0028119835570656984,\t\t0.14059917785328493,\t\t0, 0, 0],\n\t\t[1326,\t\t2,\t\t0.0031288479238220502,\t\t0.1564423961911025,\t\t0, 0, 0],\n\t\t[1327,\t\t2,\t\t0.002937308842453598,\t\t0.14686544212267988,\t\t0, 0, 0],\n\t\t[1328,\t\t3,\t\t0.0010226241895011407,\t\t0.05113120947505704,\t\t2.22, 61.69, 0.004502],\n\t\t[1329,\t\t2,\t\t0.009326145548475419,\t\t0.466307277423771,\t\t0, 0, 0],\n\t\t[1330,\t\t3,\t\t0.0016801875813388148,\t\t0.08400937906694075,\t\t2.22, 61.69, 0.004502],\n\t\t[1331,\t\t3,\t\t1.5551890390618285e-05,\t\t0.0007775945195309142,\t\t2.22, 61.69, 0.004502],\n\t\t[1332,\t\t3,\t\t0.0008682007714643287,\t\t0.04341003857321644,\t\t2.22, 61.69, 0.004502],\n\t\t[1333,\t\t3,\t\t0.002693921704201076,\t\t0.13469608521005383,\t\t2.22, 61.69, 0.004502],\n\t\t[1334,\t\t3,\t\t3.179683681738087e-05,\t\t0.001589841840869044,\t\t2.22, 61.69, 0.004502],\n\t\t[1335,\t\t3,\t\t0.00010900401639700021,\t\t0.0054502008198500105,\t\t2.22, 61.69, 0.004502],\n\t\t[1336,\t\t3,\t\t0.0010469078945042351,\t\t0.052345394725211755,\t\t2.22, 61.69, 0.004502],\n\t\t[1337,\t\t2,\t\t0.007722987880773172,\t\t0.3861493940386586,\t\t0, 0, 0],\n\t\t[1338,\t\t3,\t\t4.240034414934284e-05,\t\t0.002120017207467142,\t\t2.22, 61.69, 0.004502],\n\t\t[1339,\t\t3,\t\t0.0006040222375755207,\t\t0.030201111878776044,\t\t2.22, 61.69, 0.004502],\n\t\t[1340,\t\t2,\t\t0.004462598113304154,\t\t0.22312990566520774,\t\t0, 0, 0],\n\t\t[1341,\t\t2,\t\t0.012454138778813902,\t\t0.6227069389406952,\t\t0, 0, 0],\n\t\t[1342,\t\t3,\t\t2.107145589914786e-05,\t\t0.001053572794957393,\t\t2.22, 61.69, 0.004502],\n\t\t[1344,\t\t3,\t\t1.150269339206081e-05,\t\t0.0005751346696030405,\t\t2.22, 61.69, 0.004502],\n\t\t[1345,\t\t3,\t\t0.00014367912043294746,\t\t0.007183956021647374,\t\t2.22, 61.69, 0.004502],\n\t\t[1346,\t\t2,\t\t0.012174200744236717,\t\t0.6087100372118358,\t\t0, 0, 0],\n\t\t[1347,\t\t2,\t\t0.02636344185792537,\t\t1.3181720928962688,\t\t0, 0, 0],\n\t\t[1348,\t\t3,\t\t0.0010257611025545337,\t\t0.05128805512772669,\t\t2.22, 61.69, 0.004502],\n\t\t[1349,\t\t3,\t\t0.002252243877984711,\t\t0.11261219389923555,\t\t2.22, 61.69, 0.004502],\n\t\t[1350,\t\t3,\t\t3.903825294350926e-06,\t\t0.0001951912647175463,\t\t2.22, 61.69, 0.004502],\n\t\t[1351,\t\t3,\t\t4.1536273196105696e-07,\t\t2.0768136598052847e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1352,\t\t3,\t\t2.2400013343394392e-05,\t\t0.0011200006671697195,\t\t2.22, 61.69, 0.004502],\n\t\t[1355,\t\t3,\t\t8.340841065406822e-05,\t\t0.004170420532703411,\t\t2.22, 61.69, 0.004502],\n\t\t[1356,\t\t2,\t\t0.004678278776831856,\t\t0.23391393884159278,\t\t0, 0, 0],\n\t\t[1357,\t\t2,\t\t0.003594349677217709,\t\t0.17971748386088549,\t\t0, 0, 0],\n\t\t[1358,\t\t3,\t\t1.2279284317907698e-05,\t\t0.000613964215895385,\t\t2.22, 61.69, 0.004502],\n\t\t[1359,\t\t2,\t\t0.004496673943395517,\t\t0.22483369716977586,\t\t0, 0, 0],\n\t\t[1360,\t\t3,\t\t0.0010814260640231302,\t\t0.054071303201156516,\t\t2.22, 61.69, 0.004502],\n\t\t[1361,\t\t2,\t\t0.0040238936307783425,\t\t0.20119468153891715,\t\t0, 0, 0],\n\t\t[1362,\t\t2,\t\t0.005036121783141224,\t\t0.2518060891570612,\t\t0, 0, 0],\n\t\t[1363,\t\t3,\t\t1.972776606625499e-06,\t\t9.863883033127497e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1364,\t\t3,\t\t2.928562966805741e-06,\t\t0.00014642814834028704,\t\t2.22, 61.69, 0.004502],\n\t\t[1365,\t\t3,\t\t2.4440702689396557e-08,\t\t1.2220351344698279e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1366,\t\t3,\t\t5.134694843779688e-05,\t\t0.002567347421889844,\t\t2.22, 61.69, 0.004502],\n\t\t[1367,\t\t3,\t\t0.0016429087910642235,\t\t0.08214543955321117,\t\t2.22, 61.69, 0.004502],\n\t\t[1368,\t\t3,\t\t8.37374518340203e-05,\t\t0.004186872591701015,\t\t2.22, 61.69, 0.004502],\n\t\t[1369,\t\t3,\t\t0.0005073133310147165,\t\t0.025365666550735824,\t\t2.22, 61.69, 0.004502],\n\t\t[1370,\t\t3,\t\t2.185563890765493e-05,\t\t0.0010927819453827466,\t\t2.22, 61.69, 0.004502],\n\t\t[1371,\t\t2,\t\t0.004548508802469359,\t\t0.22742544012346794,\t\t0, 0, 0],\n\t\t[1372,\t\t2,\t\t0.012284634505654547,\t\t0.6142317252827274,\t\t0, 0, 0],\n\t\t[1373,\t\t3,\t\t0.0022409179594482334,\t\t0.11204589797241167,\t\t2.22, 61.69, 0.004502],\n\t\t[1374,\t\t2,\t\t0.006889508467327262,\t\t0.3444754233663631,\t\t0, 0, 0],\n\t\t[1375,\t\t2,\t\t0.003897629175102736,\t\t0.1948814587551368,\t\t0, 0, 0],\n\t\t[1376,\t\t2,\t\t0.011218109707548912,\t\t0.5609054853774457,\t\t0, 0, 0],\n\t\t[1377,\t\t2,\t\t0.01492085689824784,\t\t0.7460428449123921,\t\t0, 0, 0],\n\t\t[1378,\t\t2,\t\t0.01566275025445262,\t\t0.783137512722631,\t\t0, 0, 0],\n\t\t[1379,\t\t3,\t\t3.862832450494938e-05,\t\t0.001931416225247469,\t\t2.22, 61.69, 0.004502],\n\t\t[1380,\t\t3,\t\t6.524023081634991e-05,\t\t0.003262011540817496,\t\t2.22, 61.69, 0.004502],\n\t\t[1381,\t\t3,\t\t4.9423963112647046e-05,\t\t0.002471198155632352,\t\t2.22, 61.69, 0.004502],\n\t\t[1382,\t\t2,\t\t0.008838822964419164,\t\t0.4419411482209583,\t\t0, 0, 0],\n\t\t[1383,\t\t2,\t\t0.006991449967869686,\t\t0.34957249839348425,\t\t0, 0, 0],\n\t\t[1384,\t\t3,\t\t0.0002468852331122136,\t\t0.01234426165561068,\t\t2.22, 61.69, 0.004502],\n\t\t[1385,\t\t3,\t\t5.3000348568765295e-06,\t\t0.0002650017428438265,\t\t2.22, 61.69, 0.004502],\n\t\t[1386,\t\t3,\t\t3.56613054270929e-05,\t\t0.001783065271354645,\t\t2.22, 61.69, 0.004502],\n\t\t[1387,\t\t3,\t\t0.0001966315367738767,\t\t0.009831576838693835,\t\t2.22, 61.69, 0.004502],\n\t\t[1388,\t\t3,\t\t4.9907169338496806e-05,\t\t0.0024953584669248404,\t\t2.22, 61.69, 0.004502],\n\t\t[1389,\t\t3,\t\t1.1481500896329555e-05,\t\t0.0005740750448164778,\t\t2.22, 61.69, 0.004502],\n\t\t[1390,\t\t3,\t\t0.00021009778135362394,\t\t0.010504889067681197,\t\t2.22, 61.69, 0.004502],\n\t\t[1391,\t\t3,\t\t2.8968850691173335e-05,\t\t0.0014484425345586667,\t\t2.22, 61.69, 0.004502],\n\t\t[1392,\t\t3,\t\t0.001078502515031948,\t\t0.0539251257515974,\t\t2.22, 61.69, 0.004502],\n\t\t[1393,\t\t3,\t\t5.101898450820722e-05,\t\t0.0025509492254103612,\t\t2.22, 61.69, 0.004502],\n\t\t[1394,\t\t3,\t\t4.0739602811042454e-05,\t\t0.0020369801405521228,\t\t2.22, 61.69, 0.004502],\n\t\t[1395,\t\t3,\t\t2.888064373457242e-06,\t\t0.00014440321867286213,\t\t2.22, 61.69, 0.004502],\n\t\t[1396,\t\t3,\t\t9.313454224578334e-07,\t\t4.656727112289167e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1397,\t\t3,\t\t0.0015544623612676943,\t\t0.07772311806338472,\t\t2.22, 61.69, 0.004502],\n\t\t[1398,\t\t3,\t\t0.0001741533899619964,\t\t0.00870766949809982,\t\t2.22, 61.69, 0.004502],\n\t\t[1399,\t\t3,\t\t0.0010595579652144338,\t\t0.05297789826072169,\t\t2.22, 61.69, 0.004502],\n\t\t[1400,\t\t3,\t\t4.801306601890191e-05,\t\t0.0024006533009450957,\t\t2.22, 61.69, 0.004502],\n\t\t[1401,\t\t2,\t\t0.005350858183252641,\t\t0.2675429091626321,\t\t0, 0, 0],\n\t\t[1402,\t\t3,\t\t0.001615635761957209,\t\t0.08078178809786045,\t\t2.22, 61.69, 0.004502],\n\t\t[1403,\t\t2,\t\t0.007617262031172502,\t\t0.38086310155862513,\t\t0, 0, 0],\n\t\t[1404,\t\t2,\t\t0.008581667499251882,\t\t0.42908337496259413,\t\t0, 0, 0],\n\t\t[1405,\t\t3,\t\t0.0018812625008740895,\t\t0.09406312504370447,\t\t2.22, 61.69, 0.004502],\n\t\t[1406,\t\t3,\t\t0.0006852566793279422,\t\t0.03426283396639711,\t\t2.22, 61.69, 0.004502],\n\t\t[1407,\t\t3,\t\t1.1007827881990299e-05,\t\t0.0005503913940995149,\t\t2.22, 61.69, 0.004502],\n\t\t[1408,\t\t3,\t\t0.002615151153581973,\t\t0.13075755767909866,\t\t2.22, 61.69, 0.004502],\n\t\t[1409,\t\t3,\t\t0.0007181970206338584,\t\t0.03590985103169292,\t\t2.22, 61.69, 0.004502],\n\t\t[1410,\t\t3,\t\t0.001979422462152508,\t\t0.0989711231076254,\t\t2.22, 61.69, 0.004502],\n\t\t[1411,\t\t3,\t\t0.0025079869254713357,\t\t0.1253993462735668,\t\t2.22, 61.69, 0.004502],\n\t\t[1412,\t\t3,\t\t0.00023953838272179804,\t\t0.011976919136089902,\t\t2.22, 61.69, 0.004502],\n\t\t[1413,\t\t3,\t\t0.00021435540492180258,\t\t0.01071777024609013,\t\t2.22, 61.69, 0.004502],\n\t\t[1414,\t\t3,\t\t0.0008734760444688512,\t\t0.04367380222344257,\t\t2.22, 61.69, 0.004502],\n\t\t[1415,\t\t3,\t\t0.0002631012819506438,\t\t0.013155064097532192,\t\t2.22, 61.69, 0.004502],\n\t\t[1416,\t\t3,\t\t0.000283356788499668,\t\t0.014167839424983402,\t\t2.22, 61.69, 0.004502],\n\t\t[1417,\t\t3,\t\t4.2352818621679584e-08,\t\t2.1176409310839793e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1418,\t\t2,\t\t0.0046229646090909194,\t\t0.23114823045454597,\t\t0, 0, 0],\n\t\t[1419,\t\t3,\t\t0.0016074852937809708,\t\t0.08037426468904853,\t\t2.22, 61.69, 0.004502],\n\t\t[1420,\t\t3,\t\t4.928775627089657e-05,\t\t0.002464387813544829,\t\t2.22, 61.69, 0.004502],\n\t\t[1421,\t\t3,\t\t0.00022313741337165374,\t\t0.011156870668582687,\t\t2.22, 61.69, 0.004502],\n\t\t[1422,\t\t3,\t\t0.00015341516659280228,\t\t0.007670758329640114,\t\t2.22, 61.69, 0.004502],\n\t\t[1423,\t\t3,\t\t6.16283235938917e-05,\t\t0.0030814161796945847,\t\t2.22, 61.69, 0.004502],\n\t\t[1424,\t\t2,\t\t0.01394783725195249,\t\t0.6973918625976245,\t\t0, 0, 0],\n\t\t[1425,\t\t3,\t\t0.0013602274146640447,\t\t0.06801137073320224,\t\t2.22, 61.69, 0.004502],\n\t\t[1426,\t\t2,\t\t0.004377563184547638,\t\t0.2188781592273819,\t\t0, 0, 0],\n\t\t[1427,\t\t2,\t\t0.024911272555690103,\t\t1.2455636277845052,\t\t0, 0, 0],\n\t\t[1428,\t\t2,\t\t0.014677014305772054,\t\t0.7338507152886028,\t\t0, 0, 0],\n\t\t[1429,\t\t3,\t\t0.00040519051651715185,\t\t0.02025952582585759,\t\t2.22, 61.69, 0.004502],\n\t\t[1430,\t\t3,\t\t8.750916861171875e-07,\t\t4.3754584305859375e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1431,\t\t2,\t\t0.014493414492796078,\t\t0.724670724639804,\t\t0, 0, 0],\n\t\t[1432,\t\t3,\t\t0.0005115053250425335,\t\t0.025575266252126676,\t\t2.22, 61.69, 0.004502],\n\t\t[1433,\t\t2,\t\t0.08207564315805406,\t\t4.103782157902703,\t\t0, 0, 0],\n\t\t[1434,\t\t2,\t\t0.006330547929406013,\t\t0.3165273964703006,\t\t0, 0, 0],\n\t\t[1435,\t\t2,\t\t0.005004791524155745,\t\t0.25023957620778725,\t\t0, 0, 0],\n\t\t[1436,\t\t2,\t\t0.00489245971748714,\t\t0.24462298587435702,\t\t0, 0, 0],\n\t\t[1437,\t\t2,\t\t0.014179555719353469,\t\t0.7089777859676736,\t\t0, 0, 0],\n\t\t[1438,\t\t2,\t\t0.025007389641183632,\t\t1.2503694820591817,\t\t0, 0, 0],\n\t\t[1439,\t\t2,\t\t0.0063091033600462575,\t\t0.3154551680023129,\t\t0, 0, 0],\n\t\t[1440,\t\t3,\t\t3.508652230678102e-05,\t\t0.001754326115339051,\t\t2.22, 61.69, 0.004502],\n\t\t[1441,\t\t3,\t\t1.0911201227902155e-05,\t\t0.0005455600613951078,\t\t2.22, 61.69, 0.004502],\n\t\t[1442,\t\t3,\t\t2.4631528956652004e-05,\t\t0.0012315764478326003,\t\t2.22, 61.69, 0.004502],\n\t\t[1443,\t\t2,\t\t0.006557506818224797,\t\t0.3278753409112398,\t\t0, 0, 0],\n\t\t[1444,\t\t3,\t\t0.00029228497812483386,\t\t0.014614248906241694,\t\t2.22, 61.69, 0.004502],\n\t\t[1445,\t\t3,\t\t0.0007761085353813979,\t\t0.038805426769069895,\t\t2.22, 61.69, 0.004502],\n\t\t[1446,\t\t2,\t\t0.04829066125331256,\t\t2.414533062665628,\t\t0, 0, 0],\n\t\t[1447,\t\t2,\t\t0.005696308888305882,\t\t0.2848154444152941,\t\t0, 0, 0],\n\t\t[1448,\t\t3,\t\t0.00047896583949883246,\t\t0.023948291974941624,\t\t2.22, 61.69, 0.004502],\n\t\t[1449,\t\t2,\t\t0.006075750962706547,\t\t0.3037875481353274,\t\t0, 0, 0],\n\t\t[1450,\t\t2,\t\t0.0037724056227270084,\t\t0.18862028113635043,\t\t0, 0, 0],\n\t\t[1451,\t\t2,\t\t0.0043416728967246255,\t\t0.21708364483623127,\t\t0, 0, 0],\n\t\t[1452,\t\t3,\t\t0.0015322750739690742,\t\t0.0766137536984537,\t\t2.22, 61.69, 0.004502],\n\t\t[1453,\t\t2,\t\t0.0038869941478505334,\t\t0.19434970739252666,\t\t0, 0, 0],\n\t\t[1454,\t\t2,\t\t0.007690585354308834,\t\t0.38452926771544177,\t\t0, 0, 0],\n\t\t[1455,\t\t3,\t\t4.1134626086125546e-05,\t\t0.0020567313043062772,\t\t2.22, 61.69, 0.004502],\n\t\t[1456,\t\t2,\t\t0.0031865889687578697,\t\t0.15932944843789354,\t\t0, 0, 0],\n\t\t[1457,\t\t3,\t\t0.00010768051035159833,\t\t0.005384025517579917,\t\t2.22, 61.69, 0.004502],\n\t\t[1458,\t\t3,\t\t1.323774509839772e-05,\t\t0.0006618872549198861,\t\t2.22, 61.69, 0.004502],\n\t\t[1459,\t\t3,\t\t0.00019649729858977385,\t\t0.009824864929488693,\t\t2.22, 61.69, 0.004502],\n\t\t[1460,\t\t2,\t\t0.003058104404363874,\t\t0.15290522021819372,\t\t0, 0, 0],\n\t\t[1461,\t\t3,\t\t0.0009472136297308842,\t\t0.047360681486544216,\t\t2.22, 61.69, 0.004502],\n\t\t[1462,\t\t3,\t\t0.00012704445664811956,\t\t0.006352222832405978,\t\t2.22, 61.69, 0.004502],\n\t\t[1463,\t\t3,\t\t2.9290273615703912e-05,\t\t0.0014645136807851958,\t\t2.22, 61.69, 0.004502],\n\t\t[1464,\t\t2,\t\t0.007352462849908497,\t\t0.3676231424954249,\t\t0, 0, 0],\n\t\t[1465,\t\t3,\t\t0.0002825483748963765,\t\t0.014127418744818826,\t\t2.22, 61.69, 0.004502],\n\t\t[1466,\t\t3,\t\t0.0003619193984034768,\t\t0.01809596992017384,\t\t2.22, 61.69, 0.004502],\n\t\t[1467,\t\t3,\t\t0.00011554610355567584,\t\t0.005777305177783792,\t\t2.22, 61.69, 0.004502],\n\t\t[1468,\t\t3,\t\t0.0012390259126718618,\t\t0.0619512956335931,\t\t2.22, 61.69, 0.004502],\n\t\t[1469,\t\t2,\t\t0.003689810256003302,\t\t0.1844905128001651,\t\t0, 0, 0],\n\t\t[1470,\t\t2,\t\t0.005027084884666319,\t\t0.2513542442333159,\t\t0, 0, 0],\n\t\t[1471,\t\t2,\t\t0.010132763321185349,\t\t0.5066381660592674,\t\t0, 0, 0],\n\t\t[1472,\t\t3,\t\t0.0007026948746284963,\t\t0.03513474373142481,\t\t2.22, 61.69, 0.004502],\n\t\t[1473,\t\t3,\t\t0.00044756250241666793,\t\t0.0223781251208334,\t\t2.22, 61.69, 0.004502],\n\t\t[1474,\t\t3,\t\t8.905977123682595e-05,\t\t0.004452988561841298,\t\t2.22, 61.69, 0.004502],\n\t\t[1475,\t\t3,\t\t2.4802793568456493e-05,\t\t0.0012401396784228248,\t\t2.22, 61.69, 0.004502],\n\t\t[1476,\t\t2,\t\t0.015946059282369706,\t\t0.7973029641184852,\t\t0, 0, 0],\n\t\t[1477,\t\t3,\t\t0.0006788553822913925,\t\t0.03394276911456962,\t\t2.22, 61.69, 0.004502],\n\t\t[1479,\t\t3,\t\t0.00018292287978761312,\t\t0.009146143989380656,\t\t2.22, 61.69, 0.004502],\n\t\t[1480,\t\t3,\t\t0.0006005124014184615,\t\t0.030025620070923076,\t\t2.22, 61.69, 0.004502],\n\t\t[1481,\t\t3,\t\t2.7029143404069624e-06,\t\t0.00013514571702034813,\t\t2.22, 61.69, 0.004502],\n\t\t[1482,\t\t3,\t\t0.0008174132516856945,\t\t0.04087066258428473,\t\t2.22, 61.69, 0.004502],\n\t\t[1483,\t\t3,\t\t0.00019832076187492915,\t\t0.009916038093746457,\t\t2.22, 61.69, 0.004502],\n\t\t[1484,\t\t3,\t\t1.7614334761012627e-06,\t\t8.807167380506313e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1485,\t\t3,\t\t3.318832612399898e-05,\t\t0.0016594163061999494,\t\t2.22, 61.69, 0.004502],\n\t\t[1486,\t\t3,\t\t0.00017074741779759455,\t\t0.008537370889879728,\t\t2.22, 61.69, 0.004502],\n\t\t[1487,\t\t3,\t\t5.714569218947789e-05,\t\t0.002857284609473895,\t\t2.22, 61.69, 0.004502],\n\t\t[1488,\t\t3,\t\t0.00014171436240194895,\t\t0.007085718120097447,\t\t2.22, 61.69, 0.004502],\n\t\t[1489,\t\t3,\t\t4.901899738679962e-06,\t\t0.00024509498693399813,\t\t2.22, 61.69, 0.004502],\n\t\t[1490,\t\t2,\t\t0.04981318633597547,\t\t2.4906593167987734,\t\t0, 0, 0],\n\t\t[1491,\t\t2,\t\t0.005387257187745477,\t\t0.26936285938727383,\t\t0, 0, 0],\n\t\t[1492,\t\t2,\t\t0.014637639488319377,\t\t0.7318819744159688,\t\t0, 0, 0],\n\t\t[1493,\t\t2,\t\t0.005319414988695112,\t\t0.26597074943475557,\t\t0, 0, 0],\n\t\t[1494,\t\t2,\t\t0.0257504251653254,\t\t1.28752125826627,\t\t0, 0, 0],\n\t\t[1495,\t\t2,\t\t0.0029099799727431153,\t\t0.1454989986371558,\t\t0, 0, 0],\n\t\t[1496,\t\t3,\t\t9.517248809851978e-09,\t\t4.7586244049259893e-07,\t\t2.22, 61.69, 0.004502],\n\t\t[1497,\t\t2,\t\t0.005312141284584856,\t\t0.2656070642292428,\t\t0, 0, 0],\n\t\t[1498,\t\t2,\t\t0.006735488235440387,\t\t0.3367744117720194,\t\t0, 0, 0],\n\t\t[1499,\t\t3,\t\t8.028524968500873e-05,\t\t0.004014262484250437,\t\t2.22, 61.69, 0.004502],\n\t\t[1500,\t\t3,\t\t9.85597782087346e-06,\t\t0.000492798891043673,\t\t2.22, 61.69, 0.004502],\n\t\t[1501,\t\t3,\t\t0.00039118660268302205,\t\t0.019559330134151107,\t\t2.22, 61.69, 0.004502],\n\t\t[1502,\t\t3,\t\t2.5144260253737494e-05,\t\t0.0012572130126868746,\t\t2.22, 61.69, 0.004502],\n\t\t[1503,\t\t3,\t\t0.002138132313160578,\t\t0.10690661565802889,\t\t2.22, 61.69, 0.004502],\n\t\t[1504,\t\t2,\t\t0.01191999558446168,\t\t0.5959997792230841,\t\t0, 0, 0],\n\t\t[1505,\t\t3,\t\t0.0010433701034127814,\t\t0.052168505170639076,\t\t2.22, 61.69, 0.004502],\n\t\t[1506,\t\t2,\t\t0.0021957581984886386,\t\t0.10978790992443192,\t\t0, 0, 0],\n\t\t[1507,\t\t3,\t\t0.0005857148078905276,\t\t0.02928574039452638,\t\t2.22, 61.69, 0.004502],\n\t\t[1508,\t\t3,\t\t3.7646066948574653e-06,\t\t0.0001882303347428733,\t\t2.22, 61.69, 0.004502],\n\t\t[1510,\t\t2,\t\t0.004382508134567402,\t\t0.21912540672837008,\t\t0, 0, 0],\n\t\t[1511,\t\t2,\t\t0.00988173435818505,\t\t0.4940867179092525,\t\t0, 0, 0],\n\t\t[1512,\t\t2,\t\t0.002140382141506828,\t\t0.10701910707534143,\t\t0, 0, 0],\n\t\t[1513,\t\t3,\t\t0.0007970700646368477,\t\t0.03985350323184239,\t\t2.22, 61.69, 0.004502],\n\t\t[1516,\t\t3,\t\t1.1234939410447725e-06,\t\t5.6174697052238624e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1517,\t\t3,\t\t6.48879934343794e-05,\t\t0.00324439967171897,\t\t2.22, 61.69, 0.004502],\n\t\t[1518,\t\t3,\t\t3.2060907775895836e-05,\t\t0.001603045388794792,\t\t2.22, 61.69, 0.004502],\n\t\t[1519,\t\t3,\t\t2.2252129554213336e-06,\t\t0.00011126064777106669,\t\t2.22, 61.69, 0.004502]\n\t])\n\tppc[\"branch_switch\"] = array([\n\t\t[586,\t\t1,\t\t0\t\t],\n\t\t[589,\t\t108,\t\t0\t\t],\n\t\t[590,\t\t108,\t\t0\t\t],\n\t\t[593,\t\t112,\t\t0\t\t],\n\t\t[594,\t\t114,\t\t0\t\t],\n\t\t[595,\t\t115,\t\t0\t\t],\n\t\t[598,\t\t118,\t\t0\t\t],\n\t\t[599,\t\t119,\t\t0\t\t],\n\t\t[601,\t\t119,\t\t0\t\t],\n\t\t[602,\t\t121,\t\t0\t\t],\n\t\t[603,\t\t526,\t\t0\t\t],\n\t\t[607,\t\t127,\t\t0\t\t],\n\t\t[608,\t\t127,\t\t0\t\t],\n\t\t[609,\t\t529,\t\t0\t\t],\n\t\t[612,\t\t493,\t\t0\t\t],\n\t\t[613,\t\t130,\t\t0\t\t],\n\t\t[614,\t\t130,\t\t0\t\t],\n\t\t[616,\t\t132,\t\t0\t\t],\n\t\t[617,\t\t133,\t\t0\t\t],\n\t\t[618,\t\t133,\t\t0\t\t],\n\t\t[619,\t\t134,\t\t0\t\t],\n\t\t[621,\t\t136,\t\t0\t\t],\n\t\t[624,\t\t14,\t\t0\t\t],\n\t\t[629,\t\t145,\t\t0\t\t],\n\t\t[632,\t\t145,\t\t0\t\t],\n\t\t[637,\t\t148,\t\t0\t\t],\n\t\t[638,\t\t149,\t\t0\t\t],\n\t\t[640,\t\t153,\t\t0\t\t],\n\t\t[641,\t\t155,\t\t0\t\t],\n\t\t[642,\t\t533,\t\t0\t\t],\n\t\t[643,\t\t534,\t\t0\t\t],\n\t\t[647,\t\t536,\t\t0\t\t],\n\t\t[650,\t\t166,\t\t0\t\t],\n\t\t[652,\t\t167,\t\t0\t\t],\n\t\t[655,\t\t170,\t\t0\t\t],\n\t\t[661,\t\t177,\t\t0\t\t],\n\t\t[663,\t\t178,\t\t0\t\t],\n\t\t[666,\t\t180,\t\t0\t\t],\n\t\t[668,\t\t183,\t\t0\t\t],\n\t\t[670,\t\t183,\t\t0\t\t],\n\t\t[672,\t\t185,\t\t0\t\t],\n\t\t[676,\t\t19,\t\t0\t\t],\n\t\t[681,\t\t197,\t\t0\t\t],\n\t\t[683,\t\t200,\t\t0\t\t],\n\t\t[687,\t\t202,\t\t0\t\t],\n\t\t[691,\t\t209,\t\t0\t\t],\n\t\t[693,\t\t21,\t\t0\t\t],\n\t\t[694,\t\t21,\t\t0\t\t],\n\t\t[695,\t\t210,\t\t0\t\t],\n\t\t[696,\t\t211,\t\t0\t\t],\n\t\t[697,\t\t211,\t\t0\t\t],\n\t\t[698,\t\t212,\t\t0\t\t],\n\t\t[702,\t\t215,\t\t0\t\t],\n\t\t[704,\t\t217,\t\t0\t\t],\n\t\t[705,\t\t217,\t\t0\t\t],\n\t\t[707,\t\t219,\t\t0\t\t],\n\t\t[713,\t\t225,\t\t0\t\t],\n\t\t[714,\t\t225,\t\t0\t\t],\n\t\t[716,\t\t226,\t\t0\t\t],\n\t\t[717,\t\t227,\t\t0\t\t],\n\t\t[719,\t\t229,\t\t0\t\t],\n\t\t[722,\t\t545,\t\t0\t\t],\n\t\t[724,\t\t238,\t\t0\t\t],\n\t\t[727,\t\t243,\t\t0\t\t],\n\t\t[728,\t\t244,\t\t0\t\t],\n\t\t[730,\t\t547,\t\t0\t\t],\n\t\t[732,\t\t247,\t\t0\t\t],\n\t\t[735,\t\t253,\t\t0\t\t],\n\t\t[738,\t\t258,\t\t0\t\t],\n\t\t[741,\t\t264,\t\t0\t\t],\n\t\t[742,\t\t264,\t\t0\t\t],\n\t\t[743,\t\t500,\t\t0\t\t],\n\t\t[746,\t\t273,\t\t0\t\t],\n\t\t[747,\t\t273,\t\t0\t\t],\n\t\t[748,\t\t274,\t\t0\t\t],\n\t\t[749,\t\t274,\t\t0\t\t],\n\t\t[750,\t\t557,\t\t0\t\t],\n\t\t[753,\t\t28,\t\t0\t\t],\n\t\t[758,\t\t286,\t\t0\t\t],\n\t\t[760,\t\t287,\t\t0\t\t],\n\t\t[762,\t\t289,\t\t0\t\t],\n\t\t[763,\t\t560,\t\t0\t\t],\n\t\t[765,\t\t560,\t\t0\t\t],\n\t\t[767,\t\t292,\t\t0\t\t],\n\t\t[769,\t\t293,\t\t0\t\t],\n\t\t[771,\t\t297,\t\t0\t\t],\n\t\t[772,\t\t3,\t\t0\t\t],\n\t\t[774,\t\t300,\t\t0\t\t],\n\t\t[777,\t\t300,\t\t0\t\t],\n\t\t[778,\t\t300,\t\t0\t\t],\n\t\t[781,\t\t303,\t\t0\t\t],\n\t\t[784,\t\t563,\t\t0\t\t],\n\t\t[785,\t\t501,\t\t0\t\t],\n\t\t[787,\t\t308,\t\t0\t\t],\n\t\t[788,\t\t311,\t\t0\t\t],\n\t\t[789,\t\t565,\t\t0\t\t],\n\t\t[791,\t\t314,\t\t0\t\t],\n\t\t[792,\t\t316,\t\t0\t\t],\n\t\t[795,\t\t319,\t\t0\t\t],\n\t\t[800,\t\t326,\t\t0\t\t],\n\t\t[801,\t\t327,\t\t0\t\t],\n\t\t[802,\t\t327,\t\t0\t\t],\n\t\t[805,\t\t328,\t\t0\t\t],\n\t\t[806,\t\t328,\t\t0\t\t],\n\t\t[808,\t\t329,\t\t0\t\t],\n\t\t[809,\t\t329,\t\t0\t\t],\n\t\t[811,\t\t568,\t\t0\t\t],\n\t\t[814,\t\t570,\t\t0\t\t],\n\t\t[816,\t\t335,\t\t0\t\t],\n\t\t[817,\t\t571,\t\t0\t\t],\n\t\t[821,\t\t338,\t\t0\t\t],\n\t\t[822,\t\t339,\t\t0\t\t],\n\t\t[826,\t\t339,\t\t0\t\t],\n\t\t[830,\t\t345,\t\t0\t\t],\n\t\t[834,\t\t572,\t\t0\t\t],\n\t\t[835,\t\t572,\t\t0\t\t],\n\t\t[836,\t\t572,\t\t0\t\t],\n\t\t[837,\t\t350,\t\t0\t\t],\n\t\t[839,\t\t350,\t\t0\t\t],\n\t\t[841,\t\t573,\t\t0\t\t],\n\t\t[843,\t\t352,\t\t0\t\t],\n\t\t[844,\t\t352,\t\t0\t\t],\n\t\t[845,\t\t356,\t\t0\t\t],\n\t\t[849,\t\t574,\t\t0\t\t],\n\t\t[850,\t\t574,\t\t0\t\t],\n\t\t[851,\t\t575,\t\t0\t\t],\n\t\t[853,\t\t362,\t\t0\t\t],\n\t\t[855,\t\t363,\t\t0\t\t],\n\t\t[856,\t\t363,\t\t0\t\t],\n\t\t[857,\t\t365,\t\t0\t\t],\n\t\t[858,\t\t368,\t\t0\t\t],\n\t\t[860,\t\t371,\t\t0\t\t],\n\t\t[862,\t\t372,\t\t0\t\t],\n\t\t[863,\t\t374,\t\t0\t\t],\n\t\t[864,\t\t374,\t\t0\t\t],\n\t\t[865,\t\t375,\t\t0\t\t],\n\t\t[867,\t\t376,\t\t0\t\t],\n\t\t[869,\t\t503,\t\t0\t\t],\n\t\t[870,\t\t503,\t\t0\t\t],\n\t\t[872,\t\t378,\t\t0\t\t],\n\t\t[874,\t\t576,\t\t0\t\t],\n\t\t[875,\t\t381,\t\t0\t\t],\n\t\t[877,\t\t578,\t\t0\t\t],\n\t\t[882,\t\t388,\t\t0\t\t],\n\t\t[883,\t\t388,\t\t0\t\t],\n\t\t[885,\t\t393,\t\t0\t\t],\n\t\t[886,\t\t394,\t\t0\t\t],\n\t\t[889,\t\t397,\t\t0\t\t],\n\t\t[890,\t\t40,\t\t0\t\t],\n\t\t[893,\t\t400,\t\t0\t\t],\n\t\t[894,\t\t400,\t\t0\t\t],\n\t\t[895,\t\t580,\t\t0\t\t],\n\t\t[896,\t\t581,\t\t0\t\t],\n\t\t[898,\t\t403,\t\t0\t\t],\n\t\t[900,\t\t405,\t\t0\t\t],\n\t\t[902,\t\t405,\t\t0\t\t],\n\t\t[903,\t\t406,\t\t0\t\t],\n\t\t[905,\t\t413,\t\t0\t\t],\n\t\t[906,\t\t414,\t\t0\t\t],\n\t\t[907,\t\t583,\t\t0\t\t],\n\t\t[909,\t\t417,\t\t0\t\t],\n\t\t[913,\t\t422,\t\t0\t\t],\n\t\t[915,\t\t423,\t\t0\t\t],\n\t\t[917,\t\t43,\t\t0\t\t],\n\t\t[918,\t\t424,\t\t0\t\t],\n\t\t[920,\t\t428,\t\t0\t\t],\n\t\t[921,\t\t428,\t\t0\t\t],\n\t\t[922,\t\t429,\t\t0\t\t],\n\t\t[923,\t\t432,\t\t0\t\t],\n\t\t[925,\t\t44,\t\t0\t\t],\n\t\t[928,\t\t435,\t\t0\t\t],\n\t\t[931,\t\t439,\t\t0\t\t],\n\t\t[935,\t\t45,\t\t0\t\t],\n\t\t[936,\t\t445,\t\t0\t\t],\n\t\t[937,\t\t447,\t\t0\t\t],\n\t\t[939,\t\t450,\t\t0\t\t],\n\t\t[940,\t\t451,\t\t0\t\t],\n\t\t[942,\t\t458,\t\t0\t\t],\n\t\t[944,\t\t458,\t\t0\t\t],\n\t\t[945,\t\t459,\t\t0\t\t],\n\t\t[950,\t\t462,\t\t0\t\t],\n\t\t[952,\t\t47,\t\t0\t\t],\n\t\t[958,\t\t478,\t\t0\t\t],\n\t\t[959,\t\t478,\t\t0\t\t],\n\t\t[960,\t\t479,\t\t0\t\t],\n\t\t[963,\t\t481,\t\t0\t\t],\n\t\t[965,\t\t49,\t\t0\t\t],\n\t\t[966,\t\t49,\t\t0\t\t],\n\t\t[967,\t\t49,\t\t0\t\t],\n\t\t[968,\t\t486,\t\t0\t\t],\n\t\t[969,\t\t486,\t\t0\t\t],\n\t\t[971,\t\t51,\t\t0\t\t],\n\t\t[973,\t\t506,\t\t0\t\t],\n\t\t[976,\t\t58,\t\t0\t\t],\n\t\t[978,\t\t491,\t\t0\t\t],\n\t\t[980,\t\t508,\t\t0\t\t],\n\t\t[981,\t\t62,\t\t0\t\t],\n\t\t[982,\t\t62,\t\t0\t\t],\n\t\t[983,\t\t62,\t\t0\t\t],\n\t\t[984,\t\t63,\t\t0\t\t],\n\t\t[985,\t\t63,\t\t0\t\t],\n\t\t[986,\t\t64,\t\t0\t\t],\n\t\t[987,\t\t65,\t\t0\t\t],\n\t\t[988,\t\t66,\t\t0\t\t],\n\t\t[993,\t\t67,\t\t0\t\t],\n\t\t[994,\t\t67,\t\t0\t\t],\n\t\t[995,\t\t509,\t\t0\t\t],\n\t\t[997,\t\t510,\t\t0\t\t],\n\t\t[999,\t\t70,\t\t0\t\t],\n\t\t[1000,\t\t71,\t\t0\t\t],\n\t\t[1002,\t\t71,\t\t0\t\t],\n\t\t[1003,\t\t72,\t\t0\t\t],\n\t\t[1007,\t\t511,\t\t0\t\t],\n\t\t[1008,\t\t75,\t\t0\t\t],\n\t\t[1010,\t\t79,\t\t0\t\t],\n\t\t[1011,\t\t79,\t\t0\t\t],\n\t\t[1012,\t\t81,\t\t0\t\t],\n\t\t[1014,\t\t83,\t\t0\t\t],\n\t\t[1026,\t\t518,\t\t0\t\t],\n\t\t[1027,\t\t218,\t\t0\t\t],\n\t\t[1028,\t\t221,\t\t0\t\t],\n\t\t[1029,\t\t268,\t\t0\t\t],\n\t\t[1030,\t\t269,\t\t0\t\t],\n\t\t[1031,\t\t498,\t\t0\t\t],\n\t\t[1032,\t\t1,\t\t0\t\t],\n\t\t[1033,\t\t3,\t\t0\t\t],\n\t\t[1034,\t\t4,\t\t0\t\t],\n\t\t[1035,\t\t6,\t\t0\t\t],\n\t\t[1036,\t\t7,\t\t0\t\t],\n\t\t[1037,\t\t8,\t\t0\t\t],\n\t\t[1038,\t\t9,\t\t0\t\t],\n\t\t[1039,\t\t11,\t\t0\t\t],\n\t\t[1040,\t\t14,\t\t0\t\t],\n\t\t[1041,\t\t16,\t\t0\t\t],\n\t\t[1042,\t\t17,\t\t0\t\t],\n\t\t[1043,\t\t19,\t\t0\t\t],\n\t\t[1044,\t\t21,\t\t0\t\t],\n\t\t[1045,\t\t23,\t\t0\t\t],\n\t\t[1046,\t\t25,\t\t0\t\t],\n\t\t[1047,\t\t27,\t\t0\t\t],\n\t\t[1048,\t\t28,\t\t0\t\t],\n\t\t[1049,\t\t29,\t\t0\t\t],\n\t\t[1050,\t\t31,\t\t0\t\t],\n\t\t[1051,\t\t33,\t\t0\t\t],\n\t\t[1052,\t\t34,\t\t0\t\t],\n\t\t[1053,\t\t35,\t\t0\t\t],\n\t\t[1054,\t\t36,\t\t0\t\t],\n\t\t[1055,\t\t38,\t\t0\t\t],\n\t\t[1056,\t\t39,\t\t0\t\t],\n\t\t[1057,\t\t40,\t\t0\t\t],\n\t\t[1058,\t\t41,\t\t0\t\t],\n\t\t[1059,\t\t43,\t\t0\t\t],\n\t\t[1060,\t\t44,\t\t0\t\t],\n\t\t[1061,\t\t45,\t\t0\t\t],\n\t\t[1062,\t\t47,\t\t0\t\t],\n\t\t[1063,\t\t48,\t\t0\t\t],\n\t\t[1064,\t\t49,\t\t0\t\t],\n\t\t[1065,\t\t50,\t\t0\t\t],\n\t\t[1066,\t\t51,\t\t0\t\t],\n\t\t[1067,\t\t53,\t\t0\t\t],\n\t\t[1068,\t\t54,\t\t0\t\t],\n\t\t[1069,\t\t55,\t\t0\t\t],\n\t\t[1070,\t\t57,\t\t0\t\t],\n\t\t[1071,\t\t58,\t\t0\t\t],\n\t\t[1072,\t\t59,\t\t0\t\t],\n\t\t[1073,\t\t60,\t\t0\t\t],\n\t\t[1074,\t\t62,\t\t0\t\t],\n\t\t[1075,\t\t63,\t\t0\t\t],\n\t\t[1076,\t\t64,\t\t0\t\t],\n\t\t[1077,\t\t65,\t\t0\t\t],\n\t\t[1078,\t\t66,\t\t0\t\t],\n\t\t[1079,\t\t67,\t\t0\t\t],\n\t\t[1080,\t\t70,\t\t0\t\t],\n\t\t[1081,\t\t71,\t\t0\t\t],\n\t\t[1082,\t\t72,\t\t0\t\t],\n\t\t[1083,\t\t73,\t\t0\t\t],\n\t\t[1084,\t\t75,\t\t0\t\t],\n\t\t[1085,\t\t76,\t\t0\t\t],\n\t\t[1086,\t\t77,\t\t0\t\t],\n\t\t[1087,\t\t79,\t\t0\t\t],\n\t\t[1088,\t\t80,\t\t0\t\t],\n\t\t[1089,\t\t81,\t\t0\t\t],\n\t\t[1090,\t\t82,\t\t0\t\t],\n\t\t[1091,\t\t83,\t\t0\t\t],\n\t\t[1092,\t\t84,\t\t0\t\t],\n\t\t[1093,\t\t85,\t\t0\t\t],\n\t\t[1094,\t\t88,\t\t0\t\t],\n\t\t[1095,\t\t89,\t\t0\t\t],\n\t\t[1096,\t\t90,\t\t0\t\t],\n\t\t[1097,\t\t91,\t\t0\t\t],\n\t\t[1098,\t\t92,\t\t0\t\t],\n\t\t[1099,\t\t93,\t\t0\t\t],\n\t\t[1100,\t\t97,\t\t0\t\t],\n\t\t[1101,\t\t98,\t\t0\t\t],\n\t\t[1102,\t\t101,\t\t0\t\t],\n\t\t[1103,\t\t102,\t\t0\t\t],\n\t\t[1104,\t\t103,\t\t0\t\t],\n\t\t[1105,\t\t108,\t\t0\t\t],\n\t\t[1106,\t\t109,\t\t0\t\t],\n\t\t[1107,\t\t110,\t\t0\t\t],\n\t\t[1108,\t\t111,\t\t0\t\t],\n\t\t[1109,\t\t112,\t\t0\t\t],\n\t\t[1110,\t\t113,\t\t0\t\t],\n\t\t[1111,\t\t114,\t\t0\t\t],\n\t\t[1112,\t\t115,\t\t0\t\t],\n\t\t[1113,\t\t116,\t\t0\t\t],\n\t\t[1114,\t\t118,\t\t0\t\t],\n\t\t[1115,\t\t119,\t\t0\t\t],\n\t\t[1116,\t\t121,\t\t0\t\t],\n\t\t[1117,\t\t122,\t\t0\t\t],\n\t\t[1118,\t\t126,\t\t0\t\t],\n\t\t[1119,\t\t127,\t\t0\t\t],\n\t\t[1120,\t\t130,\t\t0\t\t],\n\t\t[1121,\t\t131,\t\t0\t\t],\n\t\t[1122,\t\t132,\t\t0\t\t],\n\t\t[1123,\t\t133,\t\t0\t\t],\n\t\t[1124,\t\t134,\t\t0\t\t],\n\t\t[1125,\t\t135,\t\t0\t\t],\n\t\t[1126,\t\t136,\t\t0\t\t],\n\t\t[1127,\t\t137,\t\t0\t\t],\n\t\t[1128,\t\t139,\t\t0\t\t],\n\t\t[1129,\t\t140,\t\t0\t\t],\n\t\t[1130,\t\t141,\t\t0\t\t],\n\t\t[1131,\t\t142,\t\t0\t\t],\n\t\t[1132,\t\t144,\t\t0\t\t],\n\t\t[1133,\t\t145,\t\t0\t\t],\n\t\t[1134,\t\t146,\t\t0\t\t],\n\t\t[1135,\t\t147,\t\t0\t\t],\n\t\t[1136,\t\t148,\t\t0\t\t],\n\t\t[1137,\t\t149,\t\t0\t\t],\n\t\t[1138,\t\t150,\t\t0\t\t],\n\t\t[1139,\t\t151,\t\t0\t\t],\n\t\t[1140,\t\t152,\t\t0\t\t],\n\t\t[1141,\t\t153,\t\t0\t\t],\n\t\t[1142,\t\t154,\t\t0\t\t],\n\t\t[1143,\t\t155,\t\t0\t\t],\n\t\t[1144,\t\t158,\t\t0\t\t],\n\t\t[1145,\t\t161,\t\t0\t\t],\n\t\t[1146,\t\t162,\t\t0\t\t],\n\t\t[1147,\t\t163,\t\t0\t\t],\n\t\t[1148,\t\t164,\t\t0\t\t],\n\t\t[1149,\t\t166,\t\t0\t\t],\n\t\t[1150,\t\t167,\t\t0\t\t],\n\t\t[1151,\t\t168,\t\t0\t\t],\n\t\t[1152,\t\t169,\t\t0\t\t],\n\t\t[1153,\t\t170,\t\t0\t\t],\n\t\t[1154,\t\t171,\t\t0\t\t],\n\t\t[1155,\t\t172,\t\t0\t\t],\n\t\t[1156,\t\t173,\t\t0\t\t],\n\t\t[1157,\t\t174,\t\t0\t\t],\n\t\t[1158,\t\t175,\t\t0\t\t],\n\t\t[1159,\t\t176,\t\t0\t\t],\n\t\t[1160,\t\t177,\t\t0\t\t],\n\t\t[1161,\t\t178,\t\t0\t\t],\n\t\t[1162,\t\t179,\t\t0\t\t],\n\t\t[1163,\t\t180,\t\t0\t\t],\n\t\t[1164,\t\t181,\t\t0\t\t],\n\t\t[1165,\t\t182,\t\t0\t\t],\n\t\t[1166,\t\t183,\t\t0\t\t],\n\t\t[1167,\t\t185,\t\t0\t\t],\n\t\t[1168,\t\t186,\t\t0\t\t],\n\t\t[1169,\t\t187,\t\t0\t\t],\n\t\t[1170,\t\t188,\t\t0\t\t],\n\t\t[1171,\t\t189,\t\t0\t\t],\n\t\t[1172,\t\t190,\t\t0\t\t],\n\t\t[1173,\t\t192,\t\t0\t\t],\n\t\t[1174,\t\t193,\t\t0\t\t],\n\t\t[1175,\t\t194,\t\t0\t\t],\n\t\t[1176,\t\t196,\t\t0\t\t],\n\t\t[1177,\t\t197,\t\t0\t\t],\n\t\t[1178,\t\t198,\t\t0\t\t],\n\t\t[1179,\t\t199,\t\t0\t\t],\n\t\t[1180,\t\t200,\t\t0\t\t],\n\t\t[1181,\t\t202,\t\t0\t\t],\n\t\t[1182,\t\t203,\t\t0\t\t],\n\t\t[1183,\t\t204,\t\t0\t\t],\n\t\t[1184,\t\t205,\t\t0\t\t],\n\t\t[1185,\t\t206,\t\t0\t\t],\n\t\t[1186,\t\t207,\t\t0\t\t],\n\t\t[1187,\t\t208,\t\t0\t\t],\n\t\t[1188,\t\t209,\t\t0\t\t],\n\t\t[1189,\t\t210,\t\t0\t\t],\n\t\t[1190,\t\t211,\t\t0\t\t],\n\t\t[1191,\t\t212,\t\t0\t\t],\n\t\t[1192,\t\t213,\t\t0\t\t],\n\t\t[1193,\t\t214,\t\t0\t\t],\n\t\t[1194,\t\t215,\t\t0\t\t],\n\t\t[1195,\t\t216,\t\t0\t\t],\n\t\t[1196,\t\t217,\t\t0\t\t],\n\t\t[1197,\t\t218,\t\t0\t\t],\n\t\t[1198,\t\t219,\t\t0\t\t],\n\t\t[1199,\t\t221,\t\t0\t\t],\n\t\t[1200,\t\t222,\t\t0\t\t],\n\t\t[1201,\t\t223,\t\t0\t\t],\n\t\t[1202,\t\t224,\t\t0\t\t],\n\t\t[1203,\t\t225,\t\t0\t\t],\n\t\t[1204,\t\t226,\t\t0\t\t],\n\t\t[1205,\t\t227,\t\t0\t\t],\n\t\t[1206,\t\t228,\t\t0\t\t],\n\t\t[1207,\t\t229,\t\t0\t\t],\n\t\t[1208,\t\t230,\t\t0\t\t],\n\t\t[1209,\t\t234,\t\t0\t\t],\n\t\t[1210,\t\t235,\t\t0\t\t],\n\t\t[1211,\t\t237,\t\t0\t\t],\n\t\t[1212,\t\t238,\t\t0\t\t],\n\t\t[1213,\t\t239,\t\t0\t\t],\n\t\t[1214,\t\t240,\t\t0\t\t],\n\t\t[1215,\t\t241,\t\t0\t\t],\n\t\t[1216,\t\t242,\t\t0\t\t],\n\t\t[1217,\t\t243,\t\t0\t\t],\n\t\t[1218,\t\t244,\t\t0\t\t],\n\t\t[1219,\t\t247,\t\t0\t\t],\n\t\t[1220,\t\t251,\t\t0\t\t],\n\t\t[1221,\t\t252,\t\t0\t\t],\n\t\t[1222,\t\t253,\t\t0\t\t],\n\t\t[1223,\t\t254,\t\t0\t\t],\n\t\t[1224,\t\t255,\t\t0\t\t],\n\t\t[1225,\t\t256,\t\t0\t\t],\n\t\t[1226,\t\t257,\t\t0\t\t],\n\t\t[1227,\t\t258,\t\t0\t\t],\n\t\t[1228,\t\t260,\t\t0\t\t],\n\t\t[1229,\t\t263,\t\t0\t\t],\n\t\t[1230,\t\t264,\t\t0\t\t],\n\t\t[1231,\t\t266,\t\t0\t\t],\n\t\t[1232,\t\t267,\t\t0\t\t],\n\t\t[1233,\t\t268,\t\t0\t\t],\n\t\t[1235,\t\t271,\t\t0\t\t],\n\t\t[1236,\t\t272,\t\t0\t\t],\n\t\t[1237,\t\t273,\t\t0\t\t],\n\t\t[1238,\t\t274,\t\t0\t\t],\n\t\t[1239,\t\t275,\t\t0\t\t],\n\t\t[1240,\t\t276,\t\t0\t\t],\n\t\t[1241,\t\t278,\t\t0\t\t],\n\t\t[1242,\t\t281,\t\t0\t\t],\n\t\t[1243,\t\t282,\t\t0\t\t],\n\t\t[1244,\t\t283,\t\t0\t\t],\n\t\t[1245,\t\t284,\t\t0\t\t],\n\t\t[1246,\t\t285,\t\t0\t\t],\n\t\t[1247,\t\t286,\t\t0\t\t],\n\t\t[1248,\t\t287,\t\t0\t\t],\n\t\t[1250,\t\t289,\t\t0\t\t],\n\t\t[1251,\t\t291,\t\t0\t\t],\n\t\t[1252,\t\t292,\t\t0\t\t],\n\t\t[1253,\t\t293,\t\t0\t\t],\n\t\t[1254,\t\t294,\t\t0\t\t],\n\t\t[1255,\t\t295,\t\t0\t\t],\n\t\t[1256,\t\t296,\t\t0\t\t],\n\t\t[1257,\t\t297,\t\t0\t\t],\n\t\t[1258,\t\t298,\t\t0\t\t],\n\t\t[1259,\t\t299,\t\t0\t\t],\n\t\t[1260,\t\t300,\t\t0\t\t],\n\t\t[1261,\t\t302,\t\t0\t\t],\n\t\t[1262,\t\t303,\t\t0\t\t],\n\t\t[1263,\t\t304,\t\t0\t\t],\n\t\t[1264,\t\t307,\t\t0\t\t],\n\t\t[1265,\t\t308,\t\t0\t\t],\n\t\t[1266,\t\t309,\t\t0\t\t],\n\t\t[1267,\t\t311,\t\t0\t\t],\n\t\t[1268,\t\t312,\t\t0\t\t],\n\t\t[1269,\t\t314,\t\t0\t\t],\n\t\t[1270,\t\t316,\t\t0\t\t],\n\t\t[1271,\t\t317,\t\t0\t\t],\n\t\t[1272,\t\t318,\t\t0\t\t],\n\t\t[1273,\t\t319,\t\t0\t\t],\n\t\t[1274,\t\t321,\t\t0\t\t],\n\t\t[1277,\t\t324,\t\t0\t\t],\n\t\t[1278,\t\t325,\t\t0\t\t],\n\t\t[1280,\t\t327,\t\t0\t\t],\n\t\t[1281,\t\t328,\t\t0\t\t],\n\t\t[1282,\t\t329,\t\t0\t\t],\n\t\t[1283,\t\t331,\t\t0\t\t],\n\t\t[1284,\t\t333,\t\t0\t\t],\n\t\t[1285,\t\t335,\t\t0\t\t],\n\t\t[1286,\t\t337,\t\t0\t\t],\n\t\t[1287,\t\t338,\t\t0\t\t],\n\t\t[1288,\t\t339,\t\t0\t\t],\n\t\t[1289,\t\t340,\t\t0\t\t],\n\t\t[1290,\t\t341,\t\t0\t\t],\n\t\t[1291,\t\t342,\t\t0\t\t],\n\t\t[1292,\t\t343,\t\t0\t\t],\n\t\t[1293,\t\t344,\t\t0\t\t],\n\t\t[1294,\t\t345,\t\t0\t\t],\n\t\t[1295,\t\t346,\t\t0\t\t],\n\t\t[1296,\t\t347,\t\t0\t\t],\n\t\t[1297,\t\t348,\t\t0\t\t],\n\t\t[1298,\t\t350,\t\t0\t\t],\n\t\t[1299,\t\t352,\t\t0\t\t],\n\t\t[1300,\t\t353,\t\t0\t\t],\n\t\t[1301,\t\t354,\t\t0\t\t],\n\t\t[1302,\t\t355,\t\t0\t\t],\n\t\t[1303,\t\t356,\t\t0\t\t],\n\t\t[1304,\t\t357,\t\t0\t\t],\n\t\t[1305,\t\t359,\t\t0\t\t],\n\t\t[1306,\t\t361,\t\t0\t\t],\n\t\t[1307,\t\t362,\t\t0\t\t],\n\t\t[1308,\t\t363,\t\t0\t\t],\n\t\t[1309,\t\t364,\t\t0\t\t],\n\t\t[1310,\t\t365,\t\t0\t\t],\n\t\t[1311,\t\t366,\t\t0\t\t],\n\t\t[1312,\t\t367,\t\t0\t\t],\n\t\t[1313,\t\t368,\t\t0\t\t],\n\t\t[1314,\t\t369,\t\t0\t\t],\n\t\t[1315,\t\t370,\t\t0\t\t],\n\t\t[1316,\t\t371,\t\t0\t\t],\n\t\t[1317,\t\t372,\t\t0\t\t],\n\t\t[1318,\t\t373,\t\t0\t\t],\n\t\t[1319,\t\t374,\t\t0\t\t],\n\t\t[1320,\t\t375,\t\t0\t\t],\n\t\t[1321,\t\t376,\t\t0\t\t],\n\t\t[1322,\t\t377,\t\t0\t\t],\n\t\t[1323,\t\t378,\t\t0\t\t],\n\t\t[1324,\t\t379,\t\t0\t\t],\n\t\t[1325,\t\t381,\t\t0\t\t],\n\t\t[1326,\t\t384,\t\t0\t\t],\n\t\t[1327,\t\t385,\t\t0\t\t],\n\t\t[1328,\t\t386,\t\t0\t\t],\n\t\t[1329,\t\t387,\t\t0\t\t],\n\t\t[1330,\t\t388,\t\t0\t\t],\n\t\t[1331,\t\t390,\t\t0\t\t],\n\t\t[1332,\t\t391,\t\t0\t\t],\n\t\t[1333,\t\t392,\t\t0\t\t],\n\t\t[1334,\t\t393,\t\t0\t\t],\n\t\t[1335,\t\t394,\t\t0\t\t],\n\t\t[1336,\t\t395,\t\t0\t\t],\n\t\t[1337,\t\t396,\t\t0\t\t],\n\t\t[1338,\t\t397,\t\t0\t\t],\n\t\t[1339,\t\t398,\t\t0\t\t],\n\t\t[1340,\t\t399,\t\t0\t\t],\n\t\t[1341,\t\t400,\t\t0\t\t],\n\t\t[1342,\t\t403,\t\t0\t\t],\n\t\t[1344,\t\t405,\t\t0\t\t],\n\t\t[1345,\t\t406,\t\t0\t\t],\n\t\t[1346,\t\t407,\t\t0\t\t],\n\t\t[1347,\t\t408,\t\t0\t\t],\n\t\t[1348,\t\t410,\t\t0\t\t],\n\t\t[1349,\t\t411,\t\t0\t\t],\n\t\t[1350,\t\t412,\t\t0\t\t],\n\t\t[1351,\t\t413,\t\t0\t\t],\n\t\t[1352,\t\t414,\t\t0\t\t],\n\t\t[1355,\t\t418,\t\t0\t\t],\n\t\t[1356,\t\t419,\t\t0\t\t],\n\t\t[1357,\t\t420,\t\t0\t\t],\n\t\t[1358,\t\t421,\t\t0\t\t],\n\t\t[1359,\t\t422,\t\t0\t\t],\n\t\t[1360,\t\t423,\t\t0\t\t],\n\t\t[1361,\t\t424,\t\t0\t\t],\n\t\t[1362,\t\t425,\t\t0\t\t],\n\t\t[1363,\t\t426,\t\t0\t\t],\n\t\t[1364,\t\t427,\t\t0\t\t],\n\t\t[1365,\t\t428,\t\t0\t\t],\n\t\t[1366,\t\t429,\t\t0\t\t],\n\t\t[1367,\t\t430,\t\t0\t\t],\n\t\t[1368,\t\t431,\t\t0\t\t],\n\t\t[1369,\t\t432,\t\t0\t\t],\n\t\t[1370,\t\t433,\t\t0\t\t],\n\t\t[1371,\t\t434,\t\t0\t\t],\n\t\t[1372,\t\t435,\t\t0\t\t],\n\t\t[1373,\t\t436,\t\t0\t\t],\n\t\t[1374,\t\t437,\t\t0\t\t],\n\t\t[1375,\t\t438,\t\t0\t\t],\n\t\t[1376,\t\t439,\t\t0\t\t],\n\t\t[1377,\t\t440,\t\t0\t\t],\n\t\t[1378,\t\t441,\t\t0\t\t],\n\t\t[1379,\t\t442,\t\t0\t\t],\n\t\t[1380,\t\t443,\t\t0\t\t],\n\t\t[1381,\t\t445,\t\t0\t\t],\n\t\t[1382,\t\t446,\t\t0\t\t],\n\t\t[1383,\t\t447,\t\t0\t\t],\n\t\t[1384,\t\t448,\t\t0\t\t],\n\t\t[1385,\t\t449,\t\t0\t\t],\n\t\t[1386,\t\t450,\t\t0\t\t],\n\t\t[1387,\t\t451,\t\t0\t\t],\n\t\t[1388,\t\t453,\t\t0\t\t],\n\t\t[1389,\t\t454,\t\t0\t\t],\n\t\t[1390,\t\t455,\t\t0\t\t],\n\t\t[1391,\t\t456,\t\t0\t\t],\n\t\t[1392,\t\t457,\t\t0\t\t],\n\t\t[1393,\t\t458,\t\t0\t\t],\n\t\t[1394,\t\t459,\t\t0\t\t],\n\t\t[1395,\t\t460,\t\t0\t\t],\n\t\t[1396,\t\t461,\t\t0\t\t],\n\t\t[1397,\t\t462,\t\t0\t\t],\n\t\t[1398,\t\t463,\t\t0\t\t],\n\t\t[1399,\t\t464,\t\t0\t\t],\n\t\t[1400,\t\t465,\t\t0\t\t],\n\t\t[1401,\t\t466,\t\t0\t\t],\n\t\t[1402,\t\t467,\t\t0\t\t],\n\t\t[1403,\t\t468,\t\t0\t\t],\n\t\t[1404,\t\t469,\t\t0\t\t],\n\t\t[1405,\t\t470,\t\t0\t\t],\n\t\t[1406,\t\t471,\t\t0\t\t],\n\t\t[1407,\t\t472,\t\t0\t\t],\n\t\t[1408,\t\t473,\t\t0\t\t],\n\t\t[1409,\t\t474,\t\t0\t\t],\n\t\t[1410,\t\t475,\t\t0\t\t],\n\t\t[1411,\t\t476,\t\t0\t\t],\n\t\t[1412,\t\t477,\t\t0\t\t],\n\t\t[1413,\t\t478,\t\t0\t\t],\n\t\t[1414,\t\t479,\t\t0\t\t],\n\t\t[1415,\t\t480,\t\t0\t\t],\n\t\t[1416,\t\t481,\t\t0\t\t],\n\t\t[1417,\t\t482,\t\t0\t\t],\n\t\t[1418,\t\t483,\t\t0\t\t],\n\t\t[1419,\t\t484,\t\t0\t\t],\n\t\t[1420,\t\t485,\t\t0\t\t],\n\t\t[1421,\t\t486,\t\t0\t\t],\n\t\t[1422,\t\t487,\t\t0\t\t],\n\t\t[1423,\t\t488,\t\t0\t\t],\n\t\t[1424,\t\t489,\t\t0\t\t],\n\t\t[1425,\t\t490,\t\t0\t\t],\n\t\t[1426,\t\t491,\t\t0\t\t],\n\t\t[1427,\t\t492,\t\t0\t\t],\n\t\t[1428,\t\t493,\t\t0\t\t],\n\t\t[1429,\t\t494,\t\t0\t\t],\n\t\t[1430,\t\t495,\t\t0\t\t],\n\t\t[1431,\t\t496,\t\t0\t\t],\n\t\t[1432,\t\t497,\t\t0\t\t],\n\t\t[1433,\t\t498,\t\t0\t\t],\n\t\t[1434,\t\t499,\t\t0\t\t],\n\t\t[1435,\t\t500,\t\t0\t\t],\n\t\t[1436,\t\t501,\t\t0\t\t],\n\t\t[1437,\t\t502,\t\t0\t\t],\n\t\t[1438,\t\t503,\t\t0\t\t],\n\t\t[1439,\t\t504,\t\t0\t\t],\n\t\t[1440,\t\t505,\t\t0\t\t],\n\t\t[1441,\t\t506,\t\t0\t\t],\n\t\t[1442,\t\t507,\t\t0\t\t],\n\t\t[1443,\t\t508,\t\t0\t\t],\n\t\t[1444,\t\t509,\t\t0\t\t],\n\t\t[1445,\t\t510,\t\t0\t\t],\n\t\t[1446,\t\t511,\t\t0\t\t],\n\t\t[1447,\t\t512,\t\t0\t\t],\n\t\t[1448,\t\t513,\t\t0\t\t],\n\t\t[1449,\t\t514,\t\t0\t\t],\n\t\t[1450,\t\t515,\t\t0\t\t],\n\t\t[1451,\t\t516,\t\t0\t\t],\n\t\t[1452,\t\t517,\t\t0\t\t],\n\t\t[1453,\t\t518,\t\t0\t\t],\n\t\t[1454,\t\t519,\t\t0\t\t],\n\t\t[1455,\t\t520,\t\t0\t\t],\n\t\t[1456,\t\t521,\t\t0\t\t],\n\t\t[1457,\t\t522,\t\t0\t\t],\n\t\t[1458,\t\t523,\t\t0\t\t],\n\t\t[1459,\t\t524,\t\t0\t\t],\n\t\t[1460,\t\t525,\t\t0\t\t],\n\t\t[1461,\t\t526,\t\t0\t\t],\n\t\t[1462,\t\t527,\t\t0\t\t],\n\t\t[1463,\t\t528,\t\t0\t\t],\n\t\t[1464,\t\t529,\t\t0\t\t],\n\t\t[1465,\t\t530,\t\t0\t\t],\n\t\t[1466,\t\t531,\t\t0\t\t],\n\t\t[1467,\t\t532,\t\t0\t\t],\n\t\t[1468,\t\t533,\t\t0\t\t],\n\t\t[1469,\t\t534,\t\t0\t\t],\n\t\t[1470,\t\t535,\t\t0\t\t],\n\t\t[1471,\t\t536,\t\t0\t\t],\n\t\t[1472,\t\t537,\t\t0\t\t],\n\t\t[1473,\t\t538,\t\t0\t\t],\n\t\t[1474,\t\t539,\t\t0\t\t],\n\t\t[1475,\t\t540,\t\t0\t\t],\n\t\t[1476,\t\t541,\t\t0\t\t],\n\t\t[1477,\t\t542,\t\t0\t\t],\n\t\t[1479,\t\t544,\t\t0\t\t],\n\t\t[1480,\t\t545,\t\t0\t\t],\n\t\t[1481,\t\t546,\t\t0\t\t],\n\t\t[1482,\t\t547,\t\t0\t\t],\n\t\t[1483,\t\t548,\t\t0\t\t],\n\t\t[1484,\t\t549,\t\t0\t\t],\n\t\t[1485,\t\t550,\t\t0\t\t],\n\t\t[1486,\t\t551,\t\t0\t\t],\n\t\t[1487,\t\t552,\t\t0\t\t],\n\t\t[1488,\t\t554,\t\t0\t\t],\n\t\t[1489,\t\t555,\t\t0\t\t],\n\t\t[1490,\t\t556,\t\t0\t\t],\n\t\t[1491,\t\t557,\t\t0\t\t],\n\t\t[1492,\t\t558,\t\t0\t\t],\n\t\t[1493,\t\t559,\t\t0\t\t],\n\t\t[1494,\t\t560,\t\t0\t\t],\n\t\t[1495,\t\t561,\t\t0\t\t],\n\t\t[1496,\t\t562,\t\t0\t\t],\n\t\t[1497,\t\t563,\t\t0\t\t],\n\t\t[1498,\t\t564,\t\t0\t\t],\n\t\t[1499,\t\t565,\t\t0\t\t],\n\t\t[1500,\t\t566,\t\t0\t\t],\n\t\t[1501,\t\t567,\t\t0\t\t],\n\t\t[1502,\t\t568,\t\t0\t\t],\n\t\t[1503,\t\t569,\t\t0\t\t],\n\t\t[1504,\t\t570,\t\t0\t\t],\n\t\t[1505,\t\t571,\t\t0\t\t],\n\t\t[1506,\t\t572,\t\t0\t\t],\n\t\t[1507,\t\t573,\t\t0\t\t],\n\t\t[1508,\t\t574,\t\t0\t\t],\n\t\t[1510,\t\t576,\t\t0\t\t],\n\t\t[1511,\t\t577,\t\t0\t\t],\n\t\t[1512,\t\t578,\t\t0\t\t],\n\t\t[1513,\t\t579,\t\t0\t\t],\n\t\t[1516,\t\t582,\t\t0\t\t],\n\t\t[1517,\t\t583,\t\t0\t\t],\n\t\t[1518,\t\t584,\t\t0\t\t],\n\t\t[1519,\t\t585,\t\t0\t\t],\n\t\t[1,\t\t490,\t\t0\t\t],\n\t\t[3,\t\t4,\t\t1\t\t],\n\t\t[491,\t\t6,\t\t0\t\t],\n\t\t[7,\t\t5,\t\t0\t\t],\n\t\t[8,\t\t9,\t\t0\t\t],\n\t\t[492,\t\t11,\t\t0\t\t],\n\t\t[11,\t\t493,\t\t0\t\t],\n\t\t[492,\t\t493,\t\t1\t\t],\n\t\t[494,\t\t14,\t\t0\t\t],\n\t\t[13,\t\t15,\t\t0\t\t],\n\t\t[16,\t\t5,\t\t0\t\t],\n\t\t[17,\t\t18,\t\t1\t\t],\n\t\t[17,\t\t12,\t\t0\t\t],\n\t\t[14,\t\t495,\t\t0\t\t],\n\t\t[494,\t\t19,\t\t0\t\t],\n\t\t[20,\t\t21,\t\t0\t\t],\n\t\t[20,\t\t22,\t\t1\t\t],\n\t\t[497,\t\t23,\t\t0\t\t],\n\t\t[23,\t\t499,\t\t1\t\t],\n\t\t[25,\t\t26,\t\t0\t\t],\n\t\t[25,\t\t22,\t\t0\t\t],\n\t\t[23,\t\t27,\t\t0\t\t],\n\t\t[28,\t\t23,\t\t0\t\t],\n\t\t[8,\t\t21,\t\t0\t\t],\n\t\t[9,\t\t29,\t\t0\t\t],\n\t\t[30,\t\t25,\t\t1\t\t],\n\t\t[31,\t\t32,\t\t1\t\t],\n\t\t[32,\t\t33,\t\t1\t\t],\n\t\t[34,\t\t35,\t\t0\t\t],\n\t\t[35,\t\t36,\t\t0\t\t],\n\t\t[490,\t\t6,\t\t1\t\t],\n\t\t[37,\t\t10,\t\t1\t\t],\n\t\t[10,\t\t38,\t\t0\t\t],\n\t\t[37,\t\t38,\t\t1\t\t],\n\t\t[39,\t\t40,\t\t1\t\t],\n\t\t[39,\t\t41,\t\t1\t\t],\n\t\t[42,\t\t41,\t\t1\t\t],\n\t\t[18,\t\t42,\t\t1\t\t],\n\t\t[492,\t\t43,\t\t1\t\t],\n\t\t[44,\t\t45,\t\t0\t\t],\n\t\t[44,\t\t505,\t\t0\t\t],\n\t\t[46,\t\t12,\t\t0\t\t],\n\t\t[47,\t\t48,\t\t0\t\t],\n\t\t[49,\t\t50,\t\t0\t\t],\n\t\t[31,\t\t33,\t\t1\t\t],\n\t\t[31,\t\t51,\t\t0\t\t],\n\t\t[52,\t\t53,\t\t1\t\t],\n\t\t[52,\t\t54,\t\t0\t\t],\n\t\t[506,\t\t55,\t\t0\t\t],\n\t\t[506,\t\t507,\t\t1\t\t],\n\t\t[57,\t\t506,\t\t0\t\t],\n\t\t[57,\t\t58,\t\t0\t\t],\n\t\t[58,\t\t506,\t\t0\t\t],\n\t\t[59,\t\t60,\t\t1\t\t],\n\t\t[508,\t\t62,\t\t0\t\t],\n\t\t[30,\t\t61,\t\t1\t\t],\n\t\t[63,\t\t506,\t\t0\t\t],\n\t\t[13,\t\t64,\t\t0\t\t],\n\t\t[65,\t\t66,\t\t1\t\t],\n\t\t[59,\t\t67,\t\t0\t\t],\n\t\t[61,\t\t67,\t\t0\t\t],\n\t\t[68,\t\t69,\t\t1\t\t],\n\t\t[70,\t\t69,\t\t1\t\t],\n\t\t[71,\t\t72,\t\t1\t\t],\n\t\t[73,\t\t74,\t\t1\t\t],\n\t\t[37,\t\t75,\t\t1\t\t],\n\t\t[72,\t\t75,\t\t0\t\t],\n\t\t[37,\t\t72,\t\t1\t\t],\n\t\t[76,\t\t77,\t\t1\t\t],\n\t\t[77,\t\t51,\t\t0\t\t],\n\t\t[73,\t\t72,\t\t1\t\t],\n\t\t[18,\t\t40,\t\t1\t\t],\n\t\t[492,\t\t45,\t\t1\t\t],\n\t\t[10,\t\t74,\t\t1\t\t],\n\t\t[45,\t\t511,\t\t1\t\t],\n\t\t[78,\t\t32,\t\t1\t\t],\n\t\t[79,\t\t80,\t\t0\t\t],\n\t\t[81,\t\t79,\t\t1\t\t],\n\t\t[34,\t\t82,\t\t0\t\t],\n\t\t[83,\t\t84,\t\t0\t\t],\n\t\t[83,\t\t499,\t\t0\t\t],\n\t\t[85,\t\t86,\t\t0\t\t],\n\t\t[87,\t\t86,\t\t1\t\t],\n\t\t[88,\t\t89,\t\t0\t\t],\n\t\t[90,\t\t86,\t\t1\t\t],\n\t\t[91,\t\t86,\t\t0\t\t],\n\t\t[86,\t\t92,\t\t0\t\t],\n\t\t[86,\t\t93,\t\t0\t\t],\n\t\t[94,\t\t86,\t\t1\t\t],\n\t\t[86,\t\t95,\t\t1\t\t],\n\t\t[513,\t\t517,\t\t0\t\t],\n\t\t[97,\t\t66,\t\t1\t\t],\n\t\t[42,\t\t98,\t\t0\t\t],\n\t\t[99,\t\t100,\t\t1\t\t],\n\t\t[42,\t\t101,\t\t0\t\t],\n\t\t[102,\t\t42,\t\t1\t\t],\n\t\t[103,\t\t87,\t\t0\t\t],\n\t\t[104,\t\t103,\t\t0\t\t],\n\t\t[105,\t\t87,\t\t0\t\t],\n\t\t[106,\t\t107,\t\t0\t\t],\n\t\t[108,\t\t107,\t\t0\t\t],\n\t\t[109,\t\t106,\t\t0\t\t],\n\t\t[110,\t\t111,\t\t1\t\t],\n\t\t[87,\t\t112,\t\t0\t\t],\n\t\t[113,\t\t87,\t\t0\t\t],\n\t\t[87,\t\t85,\t\t1\t\t],\n\t\t[110,\t\t114,\t\t1\t\t],\n\t\t[115,\t\t116,\t\t0\t\t],\n\t\t[117,\t\t118,\t\t0\t\t],\n\t\t[117,\t\t119,\t\t0\t\t],\n\t\t[117,\t\t120,\t\t1\t\t],\n\t\t[121,\t\t122,\t\t0\t\t],\n\t\t[123,\t\t124,\t\t0\t\t],\n\t\t[125,\t\t126,\t\t0\t\t],\n\t\t[127,\t\t119,\t\t0\t\t],\n\t\t[118,\t\t128,\t\t0\t\t],\n\t\t[121,\t\t119,\t\t0\t\t],\n\t\t[530,\t\t527,\t\t0\t\t],\n\t\t[125,\t\t130,\t\t0\t\t],\n\t\t[125,\t\t123,\t\t0\t\t],\n\t\t[131,\t\t132,\t\t0\t\t],\n\t\t[133,\t\t123,\t\t0\t\t],\n\t\t[524,\t\t134,\t\t0\t\t],\n\t\t[135,\t\t136,\t\t0\t\t],\n\t\t[123,\t\t131,\t\t0\t\t],\n\t\t[117,\t\t128,\t\t1\t\t],\n\t\t[137,\t\t521,\t\t0\t\t],\n\t\t[531,\t\t514,\t\t0\t\t],\n\t\t[139,\t\t521,\t\t0\t\t],\n\t\t[140,\t\t514,\t\t0\t\t],\n\t\t[522,\t\t141,\t\t0\t\t],\n\t\t[142,\t\t523,\t\t0\t\t],\n\t\t[530,\t\t526,\t\t0\t\t],\n\t\t[140,\t\t532,\t\t0\t\t],\n\t\t[142,\t\t144,\t\t0\t\t],\n\t\t[140,\t\t522,\t\t0\t\t],\n\t\t[145,\t\t146,\t\t0\t\t],\n\t\t[147,\t\t523,\t\t0\t\t],\n\t\t[144,\t\t523,\t\t0\t\t],\n\t\t[139,\t\t523,\t\t0\t\t],\n\t\t[140,\t\t141,\t\t0\t\t],\n\t\t[528,\t\t526,\t\t0\t\t],\n\t\t[528,\t\t148,\t\t0\t\t],\n\t\t[149,\t\t150,\t\t0\t\t],\n\t\t[145,\t\t528,\t\t0\t\t],\n\t\t[530,\t\t151,\t\t0\t\t],\n\t\t[524,\t\t152,\t\t0\t\t],\n\t\t[149,\t\t525,\t\t1\t\t],\n\t\t[139,\t\t514,\t\t0\t\t],\n\t\t[126,\t\t120,\t\t1\t\t],\n\t\t[530,\t\t153,\t\t0\t\t],\n\t\t[528,\t\t147,\t\t1\t\t],\n\t\t[528,\t\t154,\t\t0\t\t],\n\t\t[130,\t\t120,\t\t1\t\t],\n\t\t[528,\t\t155,\t\t1\t\t],\n\t\t[524,\t\t533,\t\t0\t\t],\n\t\t[524,\t\t149,\t\t0\t\t],\n\t\t[154,\t\t150,\t\t0\t\t],\n\t\t[157,\t\t110,\t\t1\t\t],\n\t\t[119,\t\t158,\t\t0\t\t],\n\t\t[159,\t\t60,\t\t0\t\t],\n\t\t[536,\t\t161,\t\t0\t\t],\n\t\t[115,\t\t151,\t\t0\t\t],\n\t\t[162,\t\t134,\t\t0\t\t],\n\t\t[115,\t\t526,\t\t0\t\t],\n\t\t[138,\t\t87,\t\t0\t\t],\n\t\t[123,\t\t163,\t\t0\t\t],\n\t\t[112,\t\t164,\t\t0\t\t],\n\t\t[112,\t\t165,\t\t0\t\t],\n\t\t[166,\t\t165,\t\t0\t\t],\n\t\t[167,\t\t537,\t\t0\t\t],\n\t\t[168,\t\t104,\t\t0\t\t],\n\t\t[531,\t\t520,\t\t0\t\t],\n\t\t[139,\t\t520,\t\t0\t\t],\n\t\t[520,\t\t169,\t\t0\t\t],\n\t\t[168,\t\t105,\t\t0\t\t],\n\t\t[520,\t\t170,\t\t0\t\t],\n\t\t[171,\t\t89,\t\t0\t\t],\n\t\t[521,\t\t172,\t\t0\t\t],\n\t\t[123,\t\t173,\t\t0\t\t],\n\t\t[521,\t\t174,\t\t0\t\t],\n\t\t[37,\t\t39,\t\t0\t\t],\n\t\t[530,\t\t175,\t\t0\t\t],\n\t\t[530,\t\t176,\t\t0\t\t],\n\t\t[88,\t\t530,\t\t0\t\t],\n\t\t[177,\t\t496,\t\t1\t\t],\n\t\t[178,\t\t525,\t\t0\t\t],\n\t\t[179,\t\t493,\t\t1\t\t],\n\t\t[180,\t\t181,\t\t1\t\t],\n\t\t[182,\t\t180,\t\t0\t\t],\n\t\t[179,\t\t181,\t\t0\t\t],\n\t\t[180,\t\t493,\t\t1\t\t],\n\t\t[183,\t\t30,\t\t0\t\t],\n\t\t[183,\t\t21,\t\t0\t\t],\n\t\t[538,\t\t185,\t\t0\t\t],\n\t\t[538,\t\t89,\t\t0\t\t],\n\t\t[184,\t\t186,\t\t0\t\t],\n\t\t[184,\t\t187,\t\t0\t\t],\n\t\t[520,\t\t172,\t\t0\t\t],\n\t\t[89,\t\t175,\t\t0\t\t],\n\t\t[185,\t\t89,\t\t0\t\t],\n\t\t[89,\t\t188,\t\t0\t\t],\n\t\t[189,\t\t190,\t\t0\t\t],\n\t\t[539,\t\t172,\t\t0\t\t],\n\t\t[504,\t\t192,\t\t0\t\t],\n\t\t[105,\t\t186,\t\t0\t\t],\n\t\t[105,\t\t187,\t\t0\t\t],\n\t\t[539,\t\t193,\t\t0\t\t],\n\t\t[187,\t\t194,\t\t0\t\t],\n\t\t[539,\t\t540,\t\t0\t\t],\n\t\t[539,\t\t196,\t\t0\t\t],\n\t\t[197,\t\t540,\t\t0\t\t],\n\t\t[110,\t\t198,\t\t0\t\t],\n\t\t[197,\t\t539,\t\t0\t\t],\n\t\t[199,\t\t537,\t\t0\t\t],\n\t\t[134,\t\t526,\t\t0\t\t],\n\t\t[200,\t\t193,\t\t0\t\t],\n\t\t[4,\t\t201,\t\t1\t\t],\n\t\t[202,\t\t86,\t\t0\t\t],\n\t\t[85,\t\t203,\t\t0\t\t],\n\t\t[147,\t\t204,\t\t0\t\t],\n\t\t[147,\t\t205,\t\t0\t\t],\n\t\t[123,\t\t206,\t\t0\t\t],\n\t\t[537,\t\t207,\t\t0\t\t],\n\t\t[165,\t\t208,\t\t0\t\t],\n\t\t[4,\t\t94,\t\t1\t\t],\n\t\t[4,\t\t2,\t\t0\t\t],\n\t\t[209,\t\t4,\t\t0\t\t],\n\t\t[119,\t\t163,\t\t0\t\t],\n\t\t[210,\t\t3,\t\t0\t\t],\n\t\t[99,\t\t211,\t\t0\t\t],\n\t\t[99,\t\t69,\t\t1\t\t],\n\t\t[212,\t\t99,\t\t0\t\t],\n\t\t[213,\t\t214,\t\t0\t\t],\n\t\t[510,\t\t215,\t\t0\t\t],\n\t\t[128,\t\t69,\t\t1\t\t],\n\t\t[216,\t\t69,\t\t1\t\t],\n\t\t[217,\t\t98,\t\t0\t\t],\n\t\t[504,\t\t218,\t\t0\t\t],\n\t\t[177,\t\t504,\t\t1\t\t],\n\t\t[219,\t\t209,\t\t0\t\t],\n\t\t[219,\t\t220,\t\t0\t\t],\n\t\t[94,\t\t95,\t\t1\t\t],\n\t\t[159,\t\t221,\t\t1\t\t],\n\t\t[34,\t\t161,\t\t0\t\t],\n\t\t[222,\t\t221,\t\t0\t\t],\n\t\t[211,\t\t52,\t\t1\t\t],\n\t\t[215,\t\t223,\t\t1\t\t],\n\t\t[224,\t\t215,\t\t0\t\t],\n\t\t[225,\t\t224,\t\t1\t\t],\n\t\t[224,\t\t223,\t\t0\t\t],\n\t\t[226,\t\t6,\t\t0\t\t],\n\t\t[7,\t\t3,\t\t1\t\t],\n\t\t[216,\t\t227,\t\t1\t\t],\n\t\t[228,\t\t229,\t\t0\t\t],\n\t\t[227,\t\t230,\t\t0\t\t],\n\t\t[231,\t\t53,\t\t1\t\t],\n\t\t[544,\t\t545,\t\t0\t\t],\n\t\t[234,\t\t235,\t\t1\t\t],\n\t\t[546,\t\t214,\t\t1\t\t],\n\t\t[233,\t\t227,\t\t0\t\t],\n\t\t[237,\t\t238,\t\t0\t\t],\n\t\t[212,\t\t100,\t\t0\t\t],\n\t\t[519,\t\t239,\t\t0\t\t],\n\t\t[238,\t\t519,\t\t0\t\t],\n\t\t[213,\t\t240,\t\t0\t\t],\n\t\t[241,\t\t242,\t\t1\t\t],\n\t\t[70,\t\t241,\t\t0\t\t],\n\t\t[509,\t\t213,\t\t0\t\t],\n\t\t[68,\t\t243,\t\t0\t\t],\n\t\t[243,\t\t244,\t\t0\t\t],\n\t\t[68,\t\t244,\t\t0\t\t],\n\t\t[544,\t\t547,\t\t1\t\t],\n\t\t[245,\t\t227,\t\t1\t\t],\n\t\t[246,\t\t208,\t\t0\t\t],\n\t\t[112,\t\t208,\t\t0\t\t],\n\t\t[165,\t\t247,\t\t0\t\t],\n\t\t[537,\t\t549,\t\t0\t\t],\n\t\t[537,\t\t550,\t\t0\t\t],\n\t\t[537,\t\t551,\t\t0\t\t],\n\t\t[110,\t\t251,\t\t0\t\t],\n\t\t[510,\t\t252,\t\t1\t\t],\n\t\t[529,\t\t253,\t\t1\t\t],\n\t\t[237,\t\t239,\t\t1\t\t],\n\t\t[254,\t\t238,\t\t1\t\t],\n\t\t[69,\t\t255,\t\t0\t\t],\n\t\t[510,\t\t225,\t\t1\t\t],\n\t\t[256,\t\t257,\t\t0\t\t],\n\t\t[258,\t\t190,\t\t0\t\t],\n\t\t[258,\t\t259,\t\t0\t\t],\n\t\t[260,\t\t261,\t\t1\t\t],\n\t\t[554,\t\t553,\t\t1\t\t],\n\t\t[515,\t\t263,\t\t0\t\t],\n\t\t[14,\t\t264,\t\t1\t\t],\n\t\t[116,\t\t555,\t\t0\t\t],\n\t\t[151,\t\t116,\t\t0\t\t],\n\t\t[111,\t\t114,\t\t1\t\t],\n\t\t[77,\t\t111,\t\t0\t\t],\n\t\t[266,\t\t525,\t\t0\t\t],\n\t\t[267,\t\t120,\t\t1\t\t],\n\t\t[268,\t\t269,\t\t0\t\t],\n\t\t[556,\t\t271,\t\t0\t\t],\n\t\t[556,\t\t272,\t\t0\t\t],\n\t\t[529,\t\t273,\t\t0\t\t],\n\t\t[128,\t\t274,\t\t0\t\t],\n\t\t[34,\t\t275,\t\t0\t\t],\n\t\t[503,\t\t276,\t\t0\t\t],\n\t\t[503,\t\t504,\t\t1\t\t],\n\t\t[177,\t\t218,\t\t1\t\t],\n\t\t[277,\t\t278,\t\t1\t\t],\n\t\t[557,\t\t558,\t\t1\t\t],\n\t\t[557,\t\t559,\t\t1\t\t],\n\t\t[559,\t\t558,\t\t1\t\t],\n\t\t[277,\t\t78,\t\t1\t\t],\n\t\t[277,\t\t279,\t\t1\t\t],\n\t\t[78,\t\t279,\t\t0\t\t],\n\t\t[281,\t\t282,\t\t0\t\t],\n\t\t[283,\t\t161,\t\t1\t\t],\n\t\t[268,\t\t161,\t\t1\t\t],\n\t\t[256,\t\t284,\t\t0\t\t],\n\t\t[515,\t\t516,\t\t1\t\t],\n\t\t[263,\t\t516,\t\t0\t\t],\n\t\t[516,\t\t285,\t\t0\t\t],\n\t\t[63,\t\t286,\t\t0\t\t],\n\t\t[287,\t\t516,\t\t0\t\t],\n\t\t[8,\t\t102,\t\t1\t\t],\n\t\t[8,\t\t101,\t\t1\t\t],\n\t\t[80,\t\t288,\t\t0\t\t],\n\t\t[80,\t\t289,\t\t0\t\t],\n\t\t[276,\t\t560,\t\t0\t\t],\n\t\t[37,\t\t290,\t\t0\t\t],\n\t\t[290,\t\t74,\t\t1\t\t],\n\t\t[512,\t\t291,\t\t0\t\t],\n\t\t[78,\t\t292,\t\t1\t\t],\n\t\t[199,\t\t548,\t\t0\t\t],\n\t\t[491,\t\t293,\t\t0\t\t],\n\t\t[4,\t\t294,\t\t0\t\t],\n\t\t[490,\t\t541,\t\t1\t\t],\n\t\t[491,\t\t295,\t\t0\t\t],\n\t\t[491,\t\t296,\t\t0\t\t],\n\t\t[295,\t\t297,\t\t0\t\t],\n\t\t[508,\t\t161,\t\t0\t\t],\n\t\t[117,\t\t123,\t\t0\t\t],\n\t\t[133,\t\t117,\t\t0\t\t],\n\t\t[71,\t\t74,\t\t1\t\t],\n\t\t[74,\t\t278,\t\t1\t\t],\n\t\t[298,\t\t515,\t\t0\t\t],\n\t\t[5,\t\t299,\t\t0\t\t],\n\t\t[32,\t\t292,\t\t1\t\t],\n\t\t[5,\t\t29,\t\t1\t\t],\n\t\t[503,\t\t560,\t\t0\t\t],\n\t\t[300,\t\t301,\t\t1\t\t],\n\t\t[51,\t\t300,\t\t0\t\t],\n\t\t[244,\t\t302,\t\t1\t\t],\n\t\t[31,\t\t302,\t\t1\t\t],\n\t\t[51,\t\t282,\t\t1\t\t],\n\t\t[303,\t\t304,\t\t0\t\t],\n\t\t[305,\t\t304,\t\t0\t\t],\n\t\t[305,\t\t259,\t\t0\t\t],\n\t\t[306,\t\t307,\t\t1\t\t],\n\t\t[305,\t\t308,\t\t0\t\t],\n\t\t[305,\t\t309,\t\t0\t\t],\n\t\t[310,\t\t309,\t\t1\t\t],\n\t\t[306,\t\t309,\t\t1\t\t],\n\t\t[311,\t\t280,\t\t0\t\t],\n\t\t[280,\t\t278,\t\t1\t\t],\n\t\t[311,\t\t32,\t\t1\t\t],\n\t\t[13,\t\t312,\t\t1\t\t],\n\t\t[313,\t\t314,\t\t0\t\t],\n\t\t[312,\t\t313,\t\t1\t\t],\n\t\t[547,\t\t566,\t\t1\t\t],\n\t\t[245,\t\t315,\t\t1\t\t],\n\t\t[312,\t\t316,\t\t0\t\t],\n\t\t[312,\t\t314,\t\t0\t\t],\n\t\t[554,\t\t546,\t\t1\t\t],\n\t\t[262,\t\t216,\t\t1\t\t],\n\t\t[317,\t\t233,\t\t0\t\t],\n\t\t[318,\t\t317,\t\t0\t\t],\n\t\t[231,\t\t52,\t\t1\t\t],\n\t\t[319,\t\t567,\t\t0\t\t],\n\t\t[557,\t\t321,\t\t0\t\t],\n\t\t[277,\t\t65,\t\t1\t\t],\n\t\t[322,\t\t288,\t\t1\t\t],\n\t\t[322,\t\t323,\t\t0\t\t],\n\t\t[277,\t\t324,\t\t1\t\t],\n\t\t[324,\t\t325,\t\t0\t\t],\n\t\t[277,\t\t325,\t\t0\t\t],\n\t\t[326,\t\t327,\t\t0\t\t],\n\t\t[328,\t\t326,\t\t1\t\t],\n\t\t[328,\t\t327,\t\t1\t\t],\n\t\t[326,\t\t329,\t\t0\t\t],\n\t\t[568,\t\t329,\t\t1\t\t],\n\t\t[568,\t\t326,\t\t0\t\t],\n\t\t[332,\t\t78,\t\t1\t\t],\n\t\t[333,\t\t306,\t\t0\t\t],\n\t\t[332,\t\t333,\t\t0\t\t],\n\t\t[332,\t\t334,\t\t0\t\t],\n\t\t[66,\t\t334,\t\t1\t\t],\n\t\t[330,\t\t335,\t\t1\t\t],\n\t\t[336,\t\t66,\t\t0\t\t],\n\t\t[330,\t\t336,\t\t1\t\t],\n\t\t[68,\t\t70,\t\t0\t\t],\n\t\t[509,\t\t337,\t\t1\t\t],\n\t\t[324,\t\t288,\t\t0\t\t],\n\t\t[338,\t\t559,\t\t0\t\t],\n\t\t[339,\t\t559,\t\t0\t\t],\n\t\t[339,\t\t340,\t\t1\t\t],\n\t\t[559,\t\t340,\t\t1\t\t],\n\t\t[341,\t\t292,\t\t0\t\t],\n\t\t[557,\t\t342,\t\t0\t\t],\n\t\t[558,\t\t343,\t\t0\t\t],\n\t\t[502,\t\t340,\t\t1\t\t],\n\t\t[72,\t\t32,\t\t1\t\t],\n\t\t[344,\t\t345,\t\t0\t\t],\n\t\t[346,\t\t47,\t\t0\t\t],\n\t\t[46,\t\t47,\t\t0\t\t],\n\t\t[346,\t\t345,\t\t0\t\t],\n\t\t[347,\t\t328,\t\t0\t\t],\n\t\t[347,\t\t348,\t\t1\t\t],\n\t\t[571,\t\t348,\t\t1\t\t],\n\t\t[347,\t\t572,\t\t0\t\t],\n\t\t[571,\t\t570,\t\t1\t\t],\n\t\t[14,\t\t350,\t\t0\t\t],\n\t\t[350,\t\t573,\t\t0\t\t],\n\t\t[15,\t\t351,\t\t1\t\t],\n\t\t[352,\t\t15,\t\t0\t\t],\n\t\t[15,\t\t335,\t\t1\t\t],\n\t\t[232,\t\t227,\t\t0\t\t],\n\t\t[565,\t\t544,\t\t1\t\t],\n\t\t[235,\t\t567,\t\t1\t\t],\n\t\t[567,\t\t286,\t\t0\t\t],\n\t\t[353,\t\t519,\t\t0\t\t],\n\t\t[354,\t\t353,\t\t0\t\t],\n\t\t[355,\t\t354,\t\t0\t\t],\n\t\t[354,\t\t356,\t\t0\t\t],\n\t\t[357,\t\t358,\t\t0\t\t],\n\t\t[574,\t\t359,\t\t0\t\t],\n\t\t[235,\t\t575,\t\t0\t\t],\n\t\t[167,\t\t361,\t\t0\t\t],\n\t\t[528,\t\t362,\t\t0\t\t],\n\t\t[363,\t\t344,\t\t0\t\t],\n\t\t[259,\t\t364,\t\t1\t\t],\n\t\t[54,\t\t56,\t\t0\t\t],\n\t\t[365,\t\t364,\t\t0\t\t],\n\t\t[231,\t\t366,\t\t0\t\t],\n\t\t[30,\t\t367,\t\t0\t\t],\n\t\t[61,\t\t367,\t\t1\t\t],\n\t\t[254,\t\t368,\t\t0\t\t],\n\t\t[254,\t\t369,\t\t0\t\t],\n\t\t[254,\t\t370,\t\t0\t\t],\n\t\t[99,\t\t358,\t\t0\t\t],\n\t\t[354,\t\t519,\t\t0\t\t],\n\t\t[571,\t\t371,\t\t0\t\t],\n\t\t[207,\t\t372,\t\t0\t\t],\n\t\t[57,\t\t373,\t\t0\t\t],\n\t\t[209,\t\t374,\t\t0\t\t],\n\t\t[375,\t\t376,\t\t0\t\t],\n\t\t[376,\t\t377,\t\t0\t\t],\n\t\t[16,\t\t49,\t\t0\t\t],\n\t\t[318,\t\t377,\t\t0\t\t],\n\t\t[378,\t\t297,\t\t0\t\t],\n\t\t[562,\t\t379,\t\t0\t\t],\n\t\t[576,\t\t563,\t\t0\t\t],\n\t\t[576,\t\t381,\t\t0\t\t],\n\t\t[577,\t\t576,\t\t1\t\t],\n\t\t[244,\t\t383,\t\t0\t\t],\n\t\t[244,\t\t306,\t\t1\t\t],\n\t\t[383,\t\t306,\t\t1\t\t],\n\t\t[380,\t\t306,\t\t0\t\t],\n\t\t[252,\t\t225,\t\t0\t\t],\n\t\t[220,\t\t76,\t\t0\t\t],\n\t\t[542,\t\t384,\t\t0\t\t],\n\t\t[385,\t\t384,\t\t0\t\t],\n\t\t[542,\t\t385,\t\t0\t\t],\n\t\t[386,\t\t385,\t\t0\t\t],\n\t\t[387,\t\t578,\t\t0\t\t],\n\t\t[332,\t\t388,\t\t1\t\t],\n\t\t[382,\t\t332,\t\t1\t\t],\n\t\t[382,\t\t388,\t\t0\t\t],\n\t\t[579,\t\t578,\t\t0\t\t],\n\t\t[577,\t\t387,\t\t1\t\t],\n\t\t[144,\t\t390,\t\t0\t\t],\n\t\t[37,\t\t49,\t\t0\t\t],\n\t\t[391,\t\t233,\t\t0\t\t],\n\t\t[392,\t\t310,\t\t0\t\t],\n\t\t[260,\t\t393,\t\t0\t\t],\n\t\t[394,\t\t230,\t\t0\t\t],\n\t\t[395,\t\t282,\t\t1\t\t],\n\t\t[395,\t\t244,\t\t0\t\t],\n\t\t[25,\t\t396,\t\t1\t\t],\n\t\t[81,\t\t74,\t\t0\t\t],\n\t\t[278,\t\t80,\t\t1\t\t],\n\t\t[81,\t\t278,\t\t1\t\t],\n\t\t[569,\t\t570,\t\t0\t\t],\n\t\t[397,\t\t552,\t\t0\t\t],\n\t\t[542,\t\t398,\t\t0\t\t],\n\t\t[398,\t\t385,\t\t0\t\t],\n\t\t[399,\t\t499,\t\t0\t\t],\n\t\t[83,\t\t399,\t\t0\t\t],\n\t\t[498,\t\t400,\t\t0\t\t],\n\t\t[518,\t\t239,\t\t1\t\t],\n\t\t[575,\t\t543,\t\t0\t\t],\n\t\t[401,\t\t360,\t\t0\t\t],\n\t\t[580,\t\t581,\t\t0\t\t],\n\t\t[401,\t\t402,\t\t0\t\t],\n\t\t[403,\t\t231,\t\t0\t\t],\n\t\t[189,\t\t360,\t\t1\t\t],\n\t\t[234,\t\t404,\t\t0\t\t],\n\t\t[235,\t\t404,\t\t1\t\t],\n\t\t[235,\t\t580,\t\t0\t\t],\n\t\t[216,\t\t259,\t\t0\t\t],\n\t\t[405,\t\t259,\t\t0\t\t],\n\t\t[405,\t\t318,\t\t0\t\t],\n\t\t[406,\t\t230,\t\t0\t\t],\n\t\t[542,\t\t407,\t\t0\t\t],\n\t\t[23,\t\t408,\t\t0\t\t],\n\t\t[577,\t\t348,\t\t0\t\t],\n\t\t[562,\t\t564,\t\t1\t\t],\n\t\t[582,\t\t507,\t\t0\t\t],\n\t\t[27,\t\t410,\t\t0\t\t],\n\t\t[501,\t\t27,\t\t0\t\t],\n\t\t[27,\t\t411,\t\t0\t\t],\n\t\t[411,\t\t410,\t\t0\t\t],\n\t\t[403,\t\t360,\t\t0\t\t],\n\t\t[412,\t\t360,\t\t0\t\t],\n\t\t[326,\t\t413,\t\t0\t\t],\n\t\t[414,\t\t413,\t\t0\t\t],\n\t\t[6,\t\t297,\t\t0\t\t],\n\t\t[554,\t\t580,\t\t1\t\t],\n\t\t[262,\t\t401,\t\t1\t\t],\n\t\t[499,\t\t556,\t\t1\t\t],\n\t\t[224,\t\t229,\t\t0\t\t],\n\t\t[583,\t\t507,\t\t0\t\t],\n\t\t[415,\t\t307,\t\t0\t\t],\n\t\t[416,\t\t507,\t\t0\t\t],\n\t\t[284,\t\t561,\t\t0\t\t],\n\t\t[543,\t\t417,\t\t0\t\t],\n\t\t[418,\t\t506,\t\t0\t\t],\n\t\t[220,\t\t157,\t\t0\t\t],\n\t\t[295,\t\t419,\t\t0\t\t],\n\t\t[295,\t\t420,\t\t0\t\t],\n\t\t[541,\t\t62,\t\t0\t\t],\n\t\t[52,\t\t421,\t\t0\t\t],\n\t\t[60,\t\t160,\t\t0\t\t],\n\t\t[535,\t\t161,\t\t0\t\t],\n\t\t[267,\t\t282,\t\t0\t\t],\n\t\t[52,\t\t365,\t\t0\t\t],\n\t\t[28,\t\t27,\t\t0\t\t],\n\t\t[30,\t\t201,\t\t1\t\t],\n\t\t[422,\t\t81,\t\t0\t\t],\n\t\t[119,\t\t425,\t\t0\t\t],\n\t\t[423,\t\t425,\t\t0\t\t],\n\t\t[424,\t\t425,\t\t0\t\t],\n\t\t[426,\t\t428,\t\t0\t\t],\n\t\t[427,\t\t428,\t\t0\t\t],\n\t\t[19,\t\t428,\t\t1\t\t],\n\t\t[45,\t\t429,\t\t0\t\t],\n\t\t[44,\t\t429,\t\t0\t\t],\n\t\t[505,\t\t429,\t\t0\t\t],\n\t\t[231,\t\t431,\t\t1\t\t],\n\t\t[190,\t\t431,\t\t1\t\t],\n\t\t[430,\t\t431,\t\t0\t\t],\n\t\t[286,\t\t433,\t\t0\t\t],\n\t\t[432,\t\t433,\t\t0\t\t],\n\t\t[506,\t\t433,\t\t0\t\t],\n\t\t[23,\t\t434,\t\t0\t\t],\n\t\t[400,\t\t434,\t\t0\t\t],\n\t\t[500,\t\t434,\t\t0\t\t],\n\t\t[32,\t\t436,\t\t0\t\t],\n\t\t[435,\t\t436,\t\t0\t\t],\n\t\t[78,\t\t436,\t\t1\t\t],\n\t\t[86,\t\t438,\t\t1\t\t],\n\t\t[437,\t\t438,\t\t0\t\t],\n\t\t[221,\t\t438,\t\t0\t\t],\n\t\t[207,\t\t439,\t\t0\t\t],\n\t\t[516,\t\t439,\t\t0\t\t],\n\t\t[513,\t\t439,\t\t0\t\t],\n\t\t[181,\t\t441,\t\t1\t\t],\n\t\t[440,\t\t441,\t\t0\t\t],\n\t\t[504,\t\t441,\t\t1\t\t],\n\t\t[135,\t\t442,\t\t0\t\t],\n\t\t[109,\t\t442,\t\t0\t\t],\n\t\t[112,\t\t442,\t\t0\t\t],\n\t\t[113,\t\t443,\t\t0\t\t],\n\t\t[132,\t\t443,\t\t0\t\t],\n\t\t[107,\t\t443,\t\t0\t\t],\n\t\t[444,\t\t445,\t\t0\t\t],\n\t\t[112,\t\t445,\t\t0\t\t],\n\t\t[109,\t\t445,\t\t0\t\t],\n\t\t[119,\t\t447,\t\t1\t\t],\n\t\t[100,\t\t447,\t\t1\t\t],\n\t\t[446,\t\t447,\t\t0\t\t],\n\t\t[124,\t\t448,\t\t0\t\t],\n\t\t[125,\t\t448,\t\t0\t\t],\n\t\t[131,\t\t448,\t\t0\t\t],\n\t\t[449,\t\t450,\t\t0\t\t],\n\t\t[173,\t\t450,\t\t0\t\t],\n\t\t[184,\t\t450,\t\t0\t\t],\n\t\t[144,\t\t451,\t\t0\t\t],\n\t\t[140,\t\t451,\t\t0\t\t],\n\t\t[514,\t\t451,\t\t0\t\t],\n\t\t[537,\t\t585,\t\t1\t\t],\n\t\t[141,\t\t585,\t\t0\t\t],\n\t\t[584,\t\t585,\t\t0\t\t],\n\t\t[522,\t\t454,\t\t0\t\t],\n\t\t[144,\t\t454,\t\t0\t\t],\n\t\t[453,\t\t454,\t\t0\t\t],\n\t\t[199,\t\t456,\t\t0\t\t],\n\t\t[140,\t\t456,\t\t0\t\t],\n\t\t[455,\t\t456,\t\t0\t\t],\n\t\t[537,\t\t456,\t\t0\t\t],\n\t\t[538,\t\t457,\t\t0\t\t],\n\t\t[153,\t\t457,\t\t0\t\t],\n\t\t[176,\t\t457,\t\t0\t\t],\n\t\t[524,\t\t459,\t\t0\t\t],\n\t\t[458,\t\t459,\t\t0\t\t],\n\t\t[134,\t\t459,\t\t0\t\t],\n\t\t[460,\t\t461,\t\t0\t\t],\n\t\t[150,\t\t461,\t\t0\t\t],\n\t\t[149,\t\t461,\t\t0\t\t],\n\t\t[521,\t\t463,\t\t0\t\t],\n\t\t[462,\t\t463,\t\t0\t\t],\n\t\t[538,\t\t463,\t\t0\t\t],\n\t\t[110,\t\t464,\t\t0\t\t],\n\t\t[90,\t\t464,\t\t0\t\t],\n\t\t[165,\t\t464,\t\t0\t\t],\n\t\t[458,\t\t465,\t\t0\t\t],\n\t\t[134,\t\t465,\t\t0\t\t],\n\t\t[524,\t\t465,\t\t0\t\t],\n\t\t[466,\t\t467,\t\t0\t\t],\n\t\t[110,\t\t467,\t\t0\t\t],\n\t\t[165,\t\t467,\t\t0\t\t],\n\t\t[468,\t\t469,\t\t0\t\t],\n\t\t[541,\t\t469,\t\t0\t\t],\n\t\t[490,\t\t469,\t\t0\t\t],\n\t\t[263,\t\t471,\t\t0\t\t],\n\t\t[470,\t\t471,\t\t0\t\t],\n\t\t[534,\t\t471,\t\t0\t\t],\n\t\t[136,\t\t472,\t\t0\t\t],\n\t\t[110,\t\t472,\t\t0\t\t],\n\t\t[251,\t\t472,\t\t0\t\t],\n\t\t[226,\t\t474,\t\t0\t\t],\n\t\t[473,\t\t474,\t\t0\t\t],\n\t\t[257,\t\t474,\t\t0\t\t],\n\t\t[6,\t\t474,\t\t1\t\t],\n\t\t[299,\t\t475,\t\t1\t\t],\n\t\t[3,\t\t475,\t\t0\t\t],\n\t\t[210,\t\t475,\t\t0\t\t],\n\t\t[297,\t\t476,\t\t0\t\t],\n\t\t[296,\t\t476,\t\t0\t\t],\n\t\t[295,\t\t476,\t\t0\t\t],\n\t\t[313,\t\t478,\t\t1\t\t],\n\t\t[477,\t\t478,\t\t0\t\t],\n\t\t[245,\t\t478,\t\t0\t\t],\n\t\t[479,\t\t481,\t\t0\t\t],\n\t\t[565,\t\t481,\t\t0\t\t],\n\t\t[480,\t\t481,\t\t0\t\t],\n\t\t[415,\t\t482,\t\t0\t\t],\n\t\t[56,\t\t482,\t\t0\t\t],\n\t\t[409,\t\t482,\t\t0\t\t],\n\t\t[483,\t\t484,\t\t0\t\t],\n\t\t[3,\t\t484,\t\t0\t\t],\n\t\t[301,\t\t484,\t\t0\t\t],\n\t\t[233,\t\t485,\t\t0\t\t],\n\t\t[392,\t\t485,\t\t0\t\t],\n\t\t[391,\t\t485,\t\t0\t\t],\n\t\t[579,\t\t488,\t\t0\t\t],\n\t\t[486,\t\t488,\t\t0\t\t],\n\t\t[487,\t\t488,\t\t0\t\t],\n\t\t[270,\t\t489,\t\t0\t\t],\n\t\t[331,\t\t489,\t\t0\t\t],\n\t\t[396,\t\t489,\t\t1\t\t],\n\t\t[519,\t\t253,\t\t0\t\t],\n\t\t[382,\t\t349,\t\t1\t\t],\n\t\t[349,\t\t351,\t\t0\t\t],\n\t\t[459,\t\t465,\t\t0\t\t],\n\t\t[549,\t\t550,\t\t0\t\t],\n\t\t[550,\t\t551,\t\t0\t\t],\n\t\t[194,\t\t195,\t\t0\t\t],\n\t\t[247,\t\t248,\t\t0\t\t],\n\t\t[2,\t\t294,\t\t0\t\t],\n\t\t[549,\t\t551,\t\t0\t\t],\n\t\t[54,\t\t365,\t\t0\t\t],\n\t\t[131,\t\t265,\t\t0\t\t],\n\t\t[91,\t\t92,\t\t0\t\t],\n\t\t[247,\t\t249,\t\t0\t\t],\n\t\t[186,\t\t191,\t\t0\t\t],\n\t\t[129,\t\t173,\t\t0\t\t],\n\t\t[96,\t\t202,\t\t0\t\t],\n\t\t[53,\t\t320,\t\t0\t\t],\n\t\t[24,\t\t396,\t\t0\t\t],\n\t\t[133,\t\t156,\t\t0\t\t],\n\t\t[442,\t\t452,\t\t0\t\t],\n\t\t[445,\t\t452,\t\t0\t\t],\n\t\t[247,\t\t250,\t\t0\t\t],\n\t\t[187,\t\t195,\t\t0\t\t],\n\t\t[216,\t\t236,\t\t0\t\t],\n\t\t[244,\t\t389,\t\t0\t\t],\n\t\t[394,\t\t406,\t\t0\t\t],\n\t\t[442,\t\t445,\t\t0\t\t],\n\t\t[442,\t\t444,\t\t0\t\t],\n\t\t[198,\t\t472,\t\t0\t\t],\n\t\t[464,\t\t467,\t\t0\t\t],\n\t\t[198,\t\t251,\t\t0\t\t],\n\t\t[112,\t\t143,\t\t0\t\t],\n\t\t[2,\t\t490,\t\t0\t\t],\n\t\t[5,\t\t491,\t\t0\t\t],\n\t\t[10,\t\t492,\t\t0\t\t],\n\t\t[12,\t\t493,\t\t0\t\t],\n\t\t[13,\t\t494,\t\t0\t\t],\n\t\t[15,\t\t495,\t\t0\t\t],\n\t\t[18,\t\t496,\t\t0\t\t],\n\t\t[20,\t\t497,\t\t0\t\t],\n\t\t[22,\t\t498,\t\t0\t\t],\n\t\t[24,\t\t499,\t\t0\t\t],\n\t\t[26,\t\t500,\t\t0\t\t],\n\t\t[30,\t\t501,\t\t0\t\t],\n\t\t[32,\t\t502,\t\t0\t\t],\n\t\t[37,\t\t503,\t\t0\t\t],\n\t\t[42,\t\t504,\t\t0\t\t],\n\t\t[46,\t\t505,\t\t0\t\t],\n\t\t[52,\t\t506,\t\t0\t\t],\n\t\t[56,\t\t507,\t\t0\t\t],\n\t\t[61,\t\t508,\t\t0\t\t],\n\t\t[68,\t\t509,\t\t0\t\t],\n\t\t[69,\t\t510,\t\t0\t\t],\n\t\t[74,\t\t511,\t\t0\t\t],\n\t\t[78,\t\t512,\t\t0\t\t],\n\t\t[86,\t\t513,\t\t0\t\t],\n\t\t[87,\t\t514,\t\t0\t\t],\n\t\t[94,\t\t515,\t\t0\t\t],\n\t\t[95,\t\t516,\t\t0\t\t],\n\t\t[96,\t\t517,\t\t0\t\t],\n\t\t[99,\t\t518,\t\t0\t\t],\n\t\t[100,\t\t519,\t\t0\t\t],\n\t\t[104,\t\t520,\t\t0\t\t],\n\t\t[105,\t\t521,\t\t0\t\t],\n\t\t[106,\t\t522,\t\t0\t\t],\n\t\t[107,\t\t523,\t\t0\t\t],\n\t\t[117,\t\t524,\t\t0\t\t],\n\t\t[120,\t\t525,\t\t0\t\t],\n\t\t[123,\t\t526,\t\t0\t\t],\n\t\t[124,\t\t527,\t\t0\t\t],\n\t\t[125,\t\t528,\t\t0\t\t],\n\t\t[128,\t\t529,\t\t0\t\t],\n\t\t[129,\t\t530,\t\t0\t\t],\n\t\t[138,\t\t531,\t\t0\t\t],\n\t\t[143,\t\t532,\t\t0\t\t],\n\t\t[156,\t\t533,\t\t0\t\t],\n\t\t[157,\t\t534,\t\t0\t\t],\n\t\t[159,\t\t535,\t\t0\t\t],\n\t\t[160,\t\t536,\t\t0\t\t],\n\t\t[165,\t\t537,\t\t0\t\t],\n\t\t[184,\t\t538,\t\t0\t\t],\n\t\t[191,\t\t539,\t\t0\t\t],\n\t\t[195,\t\t540,\t\t0\t\t],\n\t\t[201,\t\t541,\t\t0\t\t],\n\t\t[220,\t\t542,\t\t0\t\t],\n\t\t[231,\t\t543,\t\t0\t\t],\n\t\t[232,\t\t544,\t\t0\t\t],\n\t\t[233,\t\t545,\t\t0\t\t],\n\t\t[236,\t\t546,\t\t0\t\t],\n\t\t[245,\t\t547,\t\t0\t\t],\n\t\t[246,\t\t548,\t\t0\t\t],\n\t\t[248,\t\t549,\t\t0\t\t],\n\t\t[249,\t\t550,\t\t0\t\t],\n\t\t[250,\t\t551,\t\t0\t\t],\n\t\t[259,\t\t552,\t\t0\t\t],\n\t\t[261,\t\t553,\t\t0\t\t],\n\t\t[262,\t\t554,\t\t0\t\t],\n\t\t[265,\t\t555,\t\t0\t\t],\n\t\t[270,\t\t556,\t\t0\t\t],\n\t\t[277,\t\t557,\t\t0\t\t],\n\t\t[279,\t\t558,\t\t0\t\t],\n\t\t[280,\t\t559,\t\t0\t\t],\n\t\t[290,\t\t560,\t\t0\t\t],\n\t\t[301,\t\t561,\t\t0\t\t],\n\t\t[305,\t\t562,\t\t0\t\t],\n\t\t[306,\t\t563,\t\t0\t\t],\n\t\t[310,\t\t564,\t\t0\t\t],\n\t\t[313,\t\t565,\t\t0\t\t],\n\t\t[315,\t\t566,\t\t0\t\t],\n\t\t[320,\t\t567,\t\t0\t\t],\n\t\t[330,\t\t568,\t\t0\t\t],\n\t\t[332,\t\t569,\t\t0\t\t],\n\t\t[334,\t\t570,\t\t0\t\t],\n\t\t[336,\t\t571,\t\t0\t\t],\n\t\t[349,\t\t572,\t\t0\t\t],\n\t\t[351,\t\t573,\t\t0\t\t],\n\t\t[358,\t\t574,\t\t0\t\t],\n\t\t[360,\t\t575,\t\t0\t\t],\n\t\t[380,\t\t576,\t\t0\t\t],\n\t\t[382,\t\t577,\t\t0\t\t],\n\t\t[383,\t\t578,\t\t0\t\t],\n\t\t[389,\t\t579,\t\t0\t\t],\n\t\t[401,\t\t580,\t\t0\t\t],\n\t\t[402,\t\t581,\t\t0\t\t],\n\t\t[409,\t\t582,\t\t0\t\t],\n\t\t[415,\t\t583,\t\t0\t\t],\n\t\t[444,\t\t584,\t\t0\t\t],\n\t\t[452,\t\t585,\t\t0\t\t]\n\t])\n\tppc[\"parameters\"] = {\n\t\t\"x_trans_sg\": 0.003, \n\t\t\"x_trans_fm\": 0.001, \n\t\t\"x_trans_fl\": 0.001, \n\t\t\"d_l\": 1e-3, \n\t\t\"d_l_perturb\": 1e-5, \n\t\t\"w_1_ij\": 1, \n\t\t\"w_2_ij\": 1, \n\t\t\"w_3_ij\": 1, \n\t\t\"w_4_ij\": 1, \n\t\t\"b_r\": 238, \n\t\t\"b_c\": 248 }\n\treturn ppc", "from numpy import array\ndef scigrid_2011_01_08_02():\n\tppc = {\"version\": '2'}\n\tppc[\"baseMVA\"] = 100.0\n\tppc[\"bus\"] = array([\n\t\t[586,\t\t3,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[589,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[590,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[593,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[595,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[598,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[599,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[601,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[602,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[603,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[607,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[608,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[609,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[612,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[614,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[616,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[617,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[618,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[619,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[624,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[629,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[632,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[637,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[638,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[640,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[641,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[642,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[643,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[647,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[652,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[655,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[661,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[663,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[666,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[668,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[670,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[672,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[681,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[683,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[687,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[694,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[695,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[696,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[697,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[698,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[702,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[704,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[705,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[707,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[713,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[714,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[716,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[717,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[719,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[724,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[730,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[732,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[735,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[738,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[741,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[742,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[743,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[747,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[748,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[749,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[750,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[753,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[758,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[761,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[762,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[763,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[765,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[767,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[772,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[774,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[777,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[778,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[781,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[784,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[785,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[788,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[789,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[791,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[792,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[795,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[800,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[801,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[802,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[805,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[806,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[808,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[809,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[811,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[814,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[816,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[817,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[821,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[822,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[826,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[830,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[835,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[836,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[839,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[841,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[844,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[845,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[849,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[850,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[851,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[853,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[855,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[856,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[857,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[858,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[860,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[865,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[869,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[870,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[872,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[874,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[875,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[882,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[883,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[885,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[886,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[889,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[890,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[893,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[894,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[895,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[896,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[898,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[900,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[902,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[903,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[905,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[906,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[907,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[909,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[913,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[915,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[917,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[918,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[920,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[921,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[922,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[923,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[925,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[931,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[935,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[936,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[937,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[939,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[940,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[944,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[950,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[952,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[958,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[959,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[960,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[963,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[965,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[966,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[967,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[969,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999649,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[971,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[973,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[976,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[978,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[980,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[981,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[982,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[983,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[984,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[985,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[986,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[987,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[988,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[993,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[994,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[995,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[997,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[999,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1000,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1002,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1003,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1007,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1008,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1010,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1011,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1012,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1014,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1026,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1027,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1028,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1029,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1030,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1031,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1032,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1033,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1034,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1035,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1036,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1037,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1038,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1039,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1040,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1041,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1042,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1043,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1044,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1045,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1046,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1047,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1048,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1049,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1050,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1051,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1052,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1053,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1054,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1055,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1056,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1057,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1058,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1059,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1060,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1061,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1062,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1063,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1064,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1065,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1066,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1067,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1068,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1069,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1070,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1071,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1072,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1073,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1074,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1075,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1076,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1077,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1078,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1079,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1080,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1081,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1082,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1083,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1084,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1085,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1086,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1087,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1088,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1089,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1090,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1091,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1092,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1093,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1094,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1095,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1096,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1097,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1098,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1099,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1100,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1101,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1102,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1103,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1104,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1105,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1106,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1107,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1108,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1109,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1110,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1111,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1112,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1113,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1114,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1115,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1116,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1117,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1118,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1119,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1120,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1121,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1122,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1123,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1124,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1125,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1126,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1127,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1128,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1129,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1130,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1131,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1132,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1133,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1134,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1135,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1136,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1137,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1138,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1139,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1140,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1141,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1142,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1143,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1144,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1145,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1146,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1147,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1148,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1149,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1150,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1151,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1152,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1153,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1154,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1155,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1156,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1157,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1158,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1159,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1160,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1161,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1162,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1163,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1164,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1165,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1166,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1167,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1168,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1169,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1170,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1171,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1172,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1173,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1174,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1175,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1176,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1177,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1178,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1179,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1180,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1181,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1182,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1183,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1184,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1185,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1186,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1187,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1188,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1189,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1190,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1191,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1192,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1193,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1194,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1195,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1196,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1197,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1198,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1199,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1200,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1201,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1202,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1203,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1204,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1205,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1206,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1207,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1208,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1209,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1210,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1211,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1212,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1213,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1214,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1215,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1216,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1217,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1218,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1219,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1220,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1221,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1222,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1223,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1224,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1225,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1226,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1227,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1228,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1229,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1230,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1231,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1232,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1233,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1235,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1236,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1237,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1238,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1239,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1240,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1241,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1242,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1243,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1244,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1245,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1246,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1247,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1248,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1249,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1250,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1251,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1252,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1253,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1254,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1255,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1256,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1257,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1258,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1259,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1260,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1261,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1262,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1263,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1264,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1265,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1266,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1267,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1268,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1269,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1270,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1271,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1272,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1273,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1274,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1275,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1276,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1277,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1278,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1279,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1280,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1281,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1282,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1283,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1284,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1285,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1286,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1287,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1288,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1289,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1290,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1291,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1292,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1293,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1294,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1295,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1296,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1297,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1298,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1299,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1300,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1301,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1302,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1303,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1304,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1305,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1306,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1307,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1308,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1309,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1310,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1311,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1312,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1313,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1314,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1315,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1316,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1317,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1318,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1319,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1320,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1321,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1322,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1323,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1324,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1325,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1326,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1327,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1328,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1329,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1330,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1331,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1332,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1333,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1334,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1335,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1336,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1337,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1338,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1339,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1340,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1341,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1342,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1343,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1344,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1345,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1346,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1347,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1348,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1349,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1350,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1352,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1355,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1356,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1357,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1358,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1359,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1360,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1361,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1362,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1363,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1364,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1365,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1366,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1367,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1368,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1369,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1370,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1371,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1372,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1373,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1374,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1375,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1376,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1377,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1378,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1379,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1380,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1381,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1382,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1383,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1384,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1385,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1386,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1387,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1388,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1389,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1390,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1391,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1392,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1393,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1394,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1395,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1396,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1397,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1398,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1399,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1400,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1401,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1402,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1403,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1404,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1405,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1406,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1407,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1408,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1409,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1410,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1411,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1412,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1413,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1414,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1415,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1416,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1417,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1418,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1419,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1420,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1421,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999649,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1422,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1423,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1424,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1425,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1426,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1427,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1428,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1429,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1430,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1431,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1432,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1433,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1434,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1435,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1436,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1437,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1438,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1439,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1440,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1441,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1442,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1443,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1444,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1445,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1446,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1447,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1448,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1449,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1450,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1451,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1452,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1453,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1454,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1455,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1456,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1457,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1458,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1459,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1460,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1461,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1462,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1463,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1464,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1465,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1466,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1467,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1468,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1469,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1470,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1471,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1472,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1473,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1474,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1475,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1476,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1477,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1479,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1480,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1481,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1482,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1483,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1484,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1485,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1486,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1487,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1488,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1489,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1490,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1491,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1492,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1493,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1494,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1495,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1496,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1497,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1498,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1499,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1500,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1501,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1502,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1503,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1504,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1505,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1506,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1507,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1508,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1510,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1511,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1512,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1513,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1514,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1516,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1517,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1518,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1519,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1,\t\t1,\t\t227.674498,\t\t45.5349,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[2,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000012,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[3,\t\t1,\t\t39.905216,\t\t7.981043,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[4,\t\t1,\t\t65.625451,\t\t13.12509,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[5,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999455,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[6,\t\t1,\t\t192.703526,\t\t38.540705,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[7,\t\t1,\t\t145.22607,\t\t29.045214,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[8,\t\t1,\t\t121.5148,\t\t24.30296,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[9,\t\t1,\t\t82.17856,\t\t16.435712,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[10,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999253,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[11,\t\t1,\t\t72.002427,\t\t14.400485,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[12,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000559,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[13,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999952,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[14,\t\t1,\t\t172.203392,\t\t34.440678,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[15,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000367,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[16,\t\t1,\t\t293.686602,\t\t58.73732,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[17,\t\t1,\t\t69.17091,\t\t13.834182,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[18,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.002402,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[19,\t\t1,\t\t170.895243,\t\t34.179049,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[20,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.997353,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[21,\t\t1,\t\t734.875759,\t\t146.975152,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[22,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999861,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[23,\t\t1,\t\t96.220153,\t\t19.244031,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[24,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999988,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[25,\t\t1,\t\t46.02277,\t\t9.204554,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[26,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000526,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[27,\t\t1,\t\t56.494224,\t\t11.298845,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[28,\t\t1,\t\t166.923508,\t\t33.384702,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[29,\t\t1,\t\t61.31448,\t\t12.262896,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[30,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999287,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[31,\t\t1,\t\t120.665313,\t\t24.133063,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[32,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999286,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[33,\t\t1,\t\t151.291627,\t\t30.258325,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[34,\t\t1,\t\t30.015549,\t\t6.00311,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[35,\t\t1,\t\t1.987188,\t\t0.397438,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[36,\t\t1,\t\t6.579294,\t\t1.315859,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[37,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.003418,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[38,\t\t1,\t\t158.509874,\t\t31.701975,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[39,\t\t1,\t\t51.903818,\t\t10.380764,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[40,\t\t1,\t\t54.21516,\t\t10.843032,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[41,\t\t1,\t\t58.26901,\t\t11.653802,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[42,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001245,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[43,\t\t1,\t\t89.358153,\t\t17.871631,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[44,\t\t1,\t\t114.320508,\t\t22.864102,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[45,\t\t1,\t\t60.683882,\t\t12.136776,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[46,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000198,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[47,\t\t1,\t\t263.85839,\t\t52.771678,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[48,\t\t1,\t\t181.367506,\t\t36.273501,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[49,\t\t1,\t\t45.876829,\t\t9.175366,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[50,\t\t1,\t\t66.802855,\t\t13.360571,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[51,\t\t1,\t\t86.572139,\t\t17.314428,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[52,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000194,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[53,\t\t1,\t\t131.359356,\t\t26.271871,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[54,\t\t1,\t\t66.738201,\t\t13.34764,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[55,\t\t1,\t\t65.450672,\t\t13.090134,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[56,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999776,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[57,\t\t1,\t\t78.127657,\t\t15.625531,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[58,\t\t1,\t\t178.963281,\t\t35.792656,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[59,\t\t1,\t\t51.113007,\t\t10.222601,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[60,\t\t1,\t\t26.948195,\t\t5.389639,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[61,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000864,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[62,\t\t1,\t\t205.447094,\t\t41.089419,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[63,\t\t1,\t\t121.273661,\t\t24.254732,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[64,\t\t1,\t\t1286.959306,\t\t257.391861,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[65,\t\t1,\t\t4.28817,\t\t0.857634,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[66,\t\t1,\t\t136.058744,\t\t27.211749,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[67,\t\t1,\t\t291.868925,\t\t58.373785,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[68,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999046,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[69,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000268,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[70,\t\t1,\t\t552.149436,\t\t110.429887,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[71,\t\t1,\t\t128.312417,\t\t25.662483,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[72,\t\t1,\t\t210.158131,\t\t42.031626,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[73,\t\t1,\t\t67.279537,\t\t13.455907,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[74,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001262,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[75,\t\t1,\t\t83.853983,\t\t16.770797,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[76,\t\t1,\t\t80.937491,\t\t16.187498,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[77,\t\t1,\t\t78.393492,\t\t15.678698,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[78,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998244,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[79,\t\t1,\t\t80.947322,\t\t16.189464,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[80,\t\t1,\t\t85.978546,\t\t17.195709,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[81,\t\t1,\t\t97.058069,\t\t19.411614,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[82,\t\t1,\t\t3.23015,\t\t0.64603,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[83,\t\t1,\t\t216.120823,\t\t43.224165,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[84,\t\t1,\t\t21.275762,\t\t4.255152,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[85,\t\t1,\t\t73.78021,\t\t14.756042,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[86,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999977,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[87,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999822,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[88,\t\t1,\t\t59.550408,\t\t11.910082,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[89,\t\t1,\t\t73.881396,\t\t14.776279,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[90,\t\t1,\t\t85.329751,\t\t17.06595,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[91,\t\t1,\t\t29.639307,\t\t5.927861,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[92,\t\t1,\t\t32.346882,\t\t6.469376,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[93,\t\t1,\t\t31.725811,\t\t6.345162,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[94,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001073,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[95,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000574,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[96,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[97,\t\t1,\t\t4.461998,\t\t0.8924,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[98,\t\t1,\t\t82.038201,\t\t16.40764,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[99,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000583,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[100,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001633,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[101,\t\t1,\t\t58.091413,\t\t11.618283,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[102,\t\t1,\t\t112.438637,\t\t22.487727,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[103,\t\t1,\t\t131.462524,\t\t26.292505,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[104,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999975,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[105,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000117,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[106,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999954,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[107,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999995,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[108,\t\t1,\t\t92.730783,\t\t18.546157,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[109,\t\t1,\t\t37.545112,\t\t7.509022,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[110,\t\t1,\t\t48.73506,\t\t9.747012,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[111,\t\t1,\t\t85.884344,\t\t17.176869,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[112,\t\t1,\t\t43.468304,\t\t8.693661,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[113,\t\t1,\t\t68.521794,\t\t13.704359,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[114,\t\t1,\t\t100.915847,\t\t20.183169,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[115,\t\t1,\t\t65.054514,\t\t13.010903,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[116,\t\t1,\t\t108.859782,\t\t21.771956,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[117,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000505,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[118,\t\t1,\t\t168.553796,\t\t33.710759,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[119,\t\t1,\t\t32.672647,\t\t6.534529,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[120,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001305,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[121,\t\t1,\t\t44.36947,\t\t8.873894,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[122,\t\t1,\t\t38.84502,\t\t7.769004,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[123,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000236,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[124,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000004,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[125,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99991,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[126,\t\t1,\t\t203.665405,\t\t40.733081,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[127,\t\t1,\t\t157.454784,\t\t31.490957,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[128,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001307,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[129,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000001,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[130,\t\t1,\t\t217.101505,\t\t43.420301,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[131,\t\t1,\t\t47.935824,\t\t9.587165,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[132,\t\t1,\t\t124.817639,\t\t24.963528,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[133,\t\t1,\t\t41.809019,\t\t8.361804,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[134,\t\t1,\t\t41.637812,\t\t8.327562,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[135,\t\t1,\t\t41.693017,\t\t8.338603,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[136,\t\t1,\t\t40.389256,\t\t8.077851,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[137,\t\t1,\t\t32.307687,\t\t6.461537,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[138,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999757,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[139,\t\t1,\t\t63.287484,\t\t12.657497,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[140,\t\t1,\t\t43.766005,\t\t8.753201,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[141,\t\t1,\t\t51.854991,\t\t10.370998,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[142,\t\t1,\t\t57.059001,\t\t11.4118,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[143,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999989,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[144,\t\t1,\t\t51.974851,\t\t10.39497,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[145,\t\t1,\t\t151.196216,\t\t30.239243,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[146,\t\t1,\t\t194.920365,\t\t38.984073,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[147,\t\t1,\t\t119.474706,\t\t23.894941,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[148,\t\t1,\t\t168.600743,\t\t33.720149,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[149,\t\t1,\t\t108.695679,\t\t21.739136,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[150,\t\t1,\t\t141.913495,\t\t28.382699,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[151,\t\t1,\t\t33.441698,\t\t6.68834,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[152,\t\t1,\t\t69.421498,\t\t13.8843,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[153,\t\t1,\t\t123.859243,\t\t24.771849,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[154,\t\t1,\t\t127.228021,\t\t25.445604,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[155,\t\t1,\t\t132.519228,\t\t26.503846,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[156,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999993,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[157,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001254,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[158,\t\t1,\t\t34.914403,\t\t6.982881,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[159,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001284,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[160,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000004,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[161,\t\t1,\t\t108.389229,\t\t21.677846,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[162,\t\t1,\t\t162.009775,\t\t32.401955,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[163,\t\t1,\t\t32.400425,\t\t6.480085,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[164,\t\t1,\t\t32.530726,\t\t6.506145,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[165,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000028,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[166,\t\t1,\t\t38.033682,\t\t7.606736,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[167,\t\t1,\t\t53.503817,\t\t10.700763,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[168,\t\t1,\t\t36.515672,\t\t7.303134,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[169,\t\t1,\t\t125.003675,\t\t25.000735,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[170,\t\t1,\t\t93.929721,\t\t18.785944,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[171,\t\t1,\t\t80.168981,\t\t16.033796,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[172,\t\t1,\t\t39.344752,\t\t7.86895,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[173,\t\t1,\t\t37.585883,\t\t7.517177,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[174,\t\t1,\t\t56.402943,\t\t11.280589,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[175,\t\t1,\t\t37.561249,\t\t7.51225,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[176,\t\t1,\t\t130.887008,\t\t26.177402,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[177,\t\t1,\t\t21.343034,\t\t4.268607,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[178,\t\t1,\t\t113.037942,\t\t22.607588,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[179,\t\t1,\t\t41.65058,\t\t8.330116,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[180,\t\t1,\t\t36.611926,\t\t7.322385,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[181,\t\t1,\t\t27.633627,\t\t5.526725,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[182,\t\t1,\t\t1.251816,\t\t0.250363,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[183,\t\t1,\t\t374.707969,\t\t74.941594,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[184,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000067,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[185,\t\t1,\t\t80.129132,\t\t16.025826,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[186,\t\t1,\t\t43.149121,\t\t8.629824,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[187,\t\t1,\t\t25.237842,\t\t5.047568,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[188,\t\t1,\t\t37.561249,\t\t7.51225,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[189,\t\t1,\t\t137.826242,\t\t27.565248,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[190,\t\t1,\t\t182.300992,\t\t36.460198,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[191,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999997,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[192,\t\t1,\t\t43.903601,\t\t8.78072,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[193,\t\t1,\t\t37.50066,\t\t7.500132,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[194,\t\t1,\t\t25.887306,\t\t5.177461,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[195,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[196,\t\t1,\t\t36.318382,\t\t7.263676,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[197,\t\t1,\t\t57.541655,\t\t11.508331,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[198,\t\t1,\t\t34.05007,\t\t6.810014,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[199,\t\t1,\t\t43.838332,\t\t8.767666,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[200,\t\t1,\t\t37.562121,\t\t7.512424,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[201,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.997717,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[202,\t\t1,\t\t38.490512,\t\t7.698102,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[203,\t\t1,\t\t5.07147,\t\t1.014294,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[204,\t\t1,\t\t148.64377,\t\t29.728754,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[205,\t\t1,\t\t74.328576,\t\t14.865715,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[206,\t\t1,\t\t35.672522,\t\t7.134504,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[207,\t\t1,\t\t106.074717,\t\t21.214943,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[208,\t\t1,\t\t31.234821,\t\t6.246964,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[209,\t\t1,\t\t43.405486,\t\t8.681097,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[210,\t\t1,\t\t49.86478,\t\t9.972956,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[211,\t\t1,\t\t175.236014,\t\t35.047203,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[212,\t\t1,\t\t43.920435,\t\t8.784087,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[213,\t\t1,\t\t205.889182,\t\t41.177836,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[214,\t\t1,\t\t138.537321,\t\t27.707464,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[215,\t\t1,\t\t292.94408,\t\t58.588816,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[216,\t\t1,\t\t98.776857,\t\t19.755371,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[217,\t\t1,\t\t31.651613,\t\t6.330323,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[218,\t\t1,\t\t96.427741,\t\t19.285548,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[219,\t\t1,\t\t154.971132,\t\t30.994226,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[220,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999933,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[221,\t\t1,\t\t88.403764,\t\t17.680753,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[222,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[223,\t\t1,\t\t87.613602,\t\t17.52272,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[224,\t\t1,\t\t101.88255,\t\t20.37651,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[225,\t\t1,\t\t182.935963,\t\t36.587193,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[226,\t\t1,\t\t63.905185,\t\t12.781037,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[227,\t\t1,\t\t79.6129,\t\t15.92258,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[228,\t\t1,\t\t78.058016,\t\t15.611603,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[229,\t\t1,\t\t172.729077,\t\t34.545815,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[230,\t\t1,\t\t41.430297,\t\t8.286059,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[231,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000749,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[232,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99997,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[233,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999749,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[234,\t\t1,\t\t147.579325,\t\t29.515865,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[235,\t\t1,\t\t47.990829,\t\t9.598166,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[236,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[237,\t\t1,\t\t0.397179,\t\t0.079436,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[238,\t\t1,\t\t54.302497,\t\t10.860499,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[239,\t\t1,\t\t75.025708,\t\t15.005142,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[240,\t\t1,\t\t473.247779,\t\t94.649556,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[241,\t\t1,\t\t350.186917,\t\t70.037383,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[242,\t\t1,\t\t127.509393,\t\t25.501879,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[243,\t\t1,\t\t102.874654,\t\t20.574931,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[244,\t\t1,\t\t122.567508,\t\t24.513502,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[245,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001414,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[246,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999934,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[247,\t\t1,\t\t24.322829,\t\t4.864566,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[248,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[249,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[250,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999997,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[251,\t\t1,\t\t60.363745,\t\t12.072749,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[252,\t\t1,\t\t154.805393,\t\t30.961079,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[253,\t\t1,\t\t67.965475,\t\t13.593095,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[254,\t\t1,\t\t21.700248,\t\t4.34005,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[255,\t\t1,\t\t106.720012,\t\t21.344002,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[256,\t\t1,\t\t122.389284,\t\t24.477857,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[257,\t\t1,\t\t59.067776,\t\t11.813555,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[258,\t\t1,\t\t192.494748,\t\t38.49895,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[259,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999541,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[260,\t\t1,\t\t119.801169,\t\t23.960234,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[261,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.002014,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[262,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000043,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[263,\t\t1,\t\t171.854621,\t\t34.370924,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[264,\t\t1,\t\t222.475076,\t\t44.495015,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[265,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000011,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[266,\t\t1,\t\t107.218167,\t\t21.443633,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[267,\t\t1,\t\t135.607718,\t\t27.121544,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[268,\t\t1,\t\t47.15655,\t\t9.43131,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[269,\t\t1,\t\t37.868478,\t\t7.573696,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[270,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000003,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[271,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[272,\t\t1,\t\t0.772656,\t\t0.154531,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[273,\t\t1,\t\t105.66113,\t\t21.132226,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[274,\t\t1,\t\t205.391317,\t\t41.078263,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[275,\t\t1,\t\t38.450376,\t\t7.690075,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[276,\t\t1,\t\t149.88934,\t\t29.977868,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[277,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999512,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[278,\t\t1,\t\t117.013134,\t\t23.402627,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[279,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999472,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[280,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999631,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[281,\t\t1,\t\t154.560336,\t\t30.912067,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[282,\t\t1,\t\t218.572252,\t\t43.71445,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[283,\t\t1,\t\t87.613249,\t\t17.52265,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[284,\t\t1,\t\t132.913357,\t\t26.582671,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[285,\t\t1,\t\t59.274694,\t\t11.854939,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[286,\t\t1,\t\t124.230185,\t\t24.846037,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[287,\t\t1,\t\t76.354601,\t\t15.27092,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[288,\t\t1,\t\t49.110748,\t\t9.82215,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[289,\t\t1,\t\t77.236962,\t\t15.447392,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[290,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.004374,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[291,\t\t1,\t\t50.828733,\t\t10.165747,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[292,\t\t1,\t\t100.206517,\t\t20.041303,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[293,\t\t1,\t\t88.315793,\t\t17.663159,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[294,\t\t1,\t\t23.534824,\t\t4.706965,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[295,\t\t1,\t\t49.24305,\t\t9.84861,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[296,\t\t1,\t\t139.801134,\t\t27.960227,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[297,\t\t1,\t\t146.932561,\t\t29.386512,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[298,\t\t1,\t\t77.583313,\t\t15.516663,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[299,\t\t1,\t\t75.138922,\t\t15.027784,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[300,\t\t1,\t\t204.698771,\t\t40.939754,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[301,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999818,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[302,\t\t1,\t\t172.433674,\t\t34.486735,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[303,\t\t1,\t\t88.566935,\t\t17.713387,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[304,\t\t1,\t\t76.05249,\t\t15.210498,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[305,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999875,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[306,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001618,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[307,\t\t1,\t\t90.20532,\t\t18.041064,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[308,\t\t1,\t\t111.211141,\t\t22.242228,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[309,\t\t1,\t\t181.957067,\t\t36.391413,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[310,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00022,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[311,\t\t1,\t\t154.555966,\t\t30.911193,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[312,\t\t1,\t\t69.508118,\t\t13.901624,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[313,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000231,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[314,\t\t1,\t\t215.291906,\t\t43.058381,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[315,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001482,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[316,\t\t1,\t\t84.354168,\t\t16.870834,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[317,\t\t1,\t\t113.579797,\t\t22.715959,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[318,\t\t1,\t\t186.653536,\t\t37.330707,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[319,\t\t1,\t\t6.686676,\t\t1.337335,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[320,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[321,\t\t1,\t\t158.175895,\t\t31.635179,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[322,\t\t1,\t\t20.13681,\t\t4.027362,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[323,\t\t1,\t\t2.095064,\t\t0.419013,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[324,\t\t1,\t\t370.356564,\t\t74.071313,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[325,\t\t1,\t\t120.645247,\t\t24.129049,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[326,\t\t1,\t\t9.781542,\t\t1.956308,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[327,\t\t1,\t\t84.176849,\t\t16.83537,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[328,\t\t1,\t\t143.450288,\t\t28.690058,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[329,\t\t1,\t\t215.762021,\t\t43.152404,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[330,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001722,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[331,\t\t1,\t\t17.130771,\t\t3.426154,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[332,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.997834,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[333,\t\t1,\t\t179.997544,\t\t35.999509,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[334,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999854,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[335,\t\t1,\t\t183.701074,\t\t36.740215,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[336,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998486,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[337,\t\t1,\t\t73.070901,\t\t14.61418,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[338,\t\t1,\t\t198.324808,\t\t39.664962,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[339,\t\t1,\t\t122.661151,\t\t24.53223,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[340,\t\t1,\t\t103.707524,\t\t20.741505,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[341,\t\t1,\t\t93.753673,\t\t18.750735,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[342,\t\t1,\t\t162.631774,\t\t32.526355,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[343,\t\t1,\t\t89.222162,\t\t17.844432,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[344,\t\t1,\t\t223.701331,\t\t44.740266,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[345,\t\t1,\t\t244.608597,\t\t48.921719,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[346,\t\t1,\t\t242.833976,\t\t48.566795,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[347,\t\t1,\t\t84.923256,\t\t16.984651,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[348,\t\t1,\t\t221.994985,\t\t44.398997,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[349,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000773,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[350,\t\t1,\t\t116.461809,\t\t23.292362,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[351,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000495,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[352,\t\t1,\t\t770.894988,\t\t154.178998,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[353,\t\t1,\t\t2.317568,\t\t0.463514,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[354,\t\t1,\t\t15.745356,\t\t3.149071,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[355,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[356,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[357,\t\t1,\t\t0.039469,\t\t0.007894,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[358,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001194,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[359,\t\t1,\t\t2.304434,\t\t0.460887,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[360,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000765,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[361,\t\t1,\t\t58.979909,\t\t11.795982,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[362,\t\t1,\t\t168.123266,\t\t33.624653,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[363,\t\t1,\t\t247.531933,\t\t49.506387,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[364,\t\t1,\t\t58.401752,\t\t11.68035,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[365,\t\t1,\t\t52.418673,\t\t10.483735,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[366,\t\t1,\t\t103.893643,\t\t20.778729,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[367,\t\t1,\t\t50.217872,\t\t10.043574,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[368,\t\t1,\t\t24.728106,\t\t4.945621,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[369,\t\t1,\t\t20.319914,\t\t4.063983,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[370,\t\t1,\t\t59.822407,\t\t11.964481,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[371,\t\t1,\t\t301.000014,\t\t60.200003,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[372,\t\t1,\t\t174.554233,\t\t34.910847,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[373,\t\t1,\t\t117.789322,\t\t23.557864,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[374,\t\t1,\t\t60.400371,\t\t12.080074,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[375,\t\t1,\t\t198.134187,\t\t39.626837,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[376,\t\t1,\t\t217.315886,\t\t43.463177,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[377,\t\t1,\t\t155.507891,\t\t31.101578,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[378,\t\t1,\t\t155.208571,\t\t31.041714,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[379,\t\t1,\t\t53.493746,\t\t10.698749,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[380,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001541,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[381,\t\t1,\t\t178.88635,\t\t35.77727,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[382,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000669,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[383,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99966,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[384,\t\t1,\t\t63.124549,\t\t12.62491,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[385,\t\t1,\t\t79.67557,\t\t15.935114,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[386,\t\t1,\t\t64.016932,\t\t12.803386,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[387,\t\t1,\t\t130.373096,\t\t26.074619,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[388,\t\t1,\t\t700.10162,\t\t140.020324,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[389,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999944,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[390,\t\t1,\t\t57.805753,\t\t11.561151,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[391,\t\t1,\t\t65.845683,\t\t13.169137,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[392,\t\t1,\t\t126.357203,\t\t25.271441,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[393,\t\t1,\t\t157.796506,\t\t31.559301,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[394,\t\t1,\t\t56.754867,\t\t11.350973,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[395,\t\t1,\t\t78.658738,\t\t15.731748,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[396,\t\t1,\t\t55.713179,\t\t11.142636,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[397,\t\t1,\t\t446.75833,\t\t89.351666,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[398,\t\t1,\t\t193.500683,\t\t38.700137,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[399,\t\t1,\t\t82.445384,\t\t16.489077,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[400,\t\t1,\t\t43.925519,\t\t8.785104,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[401,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000672,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[402,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000464,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[403,\t\t1,\t\t21.810042,\t\t4.362008,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[404,\t\t1,\t\t76.838127,\t\t15.367625,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[405,\t\t1,\t\t579.283512,\t\t115.856702,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[406,\t\t1,\t\t43.890743,\t\t8.778149,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[407,\t\t1,\t\t86.882688,\t\t17.376538,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[408,\t\t1,\t\t251.216009,\t\t50.243202,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[409,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99997,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[410,\t\t1,\t\t32.524912,\t\t6.504982,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[411,\t\t1,\t\t30.753636,\t\t6.150727,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[412,\t\t1,\t\t2.160106,\t\t0.432021,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[413,\t\t1,\t\t107.836406,\t\t21.567281,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[414,\t\t1,\t\t9.156478,\t\t1.831296,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[415,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000371,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[416,\t\t1,\t\t130.397874,\t\t26.079575,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[417,\t\t1,\t\t5.10222,\t\t1.020444,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[418,\t\t1,\t\t106.327191,\t\t21.265438,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[419,\t\t1,\t\t56.831128,\t\t11.366226,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[420,\t\t1,\t\t57.217397,\t\t11.443479,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[421,\t\t1,\t\t82.420201,\t\t16.48404,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[422,\t\t1,\t\t60.383802,\t\t12.07676,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[423,\t\t1,\t\t126.819327,\t\t25.363865,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[424,\t\t1,\t\t9.143347,\t\t1.828669,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[425,\t\t1,\t\t75.089947,\t\t15.017989,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[426,\t\t1,\t\t6.221433,\t\t1.244287,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[427,\t\t1,\t\t52.285027,\t\t10.457005,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[428,\t\t1,\t\t23.442983,\t\t4.688597,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[429,\t\t1,\t\t264.548503,\t\t52.909701,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[430,\t\t1,\t\t140.915889,\t\t28.183178,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[431,\t\t1,\t\t94.232619,\t\t18.846524,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[432,\t\t1,\t\t110.152151,\t\t22.03043,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[433,\t\t1,\t\t56.306843,\t\t11.261369,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[434,\t\t1,\t\t29.304823,\t\t5.860965,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[435,\t\t1,\t\t117.200845,\t\t23.440169,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[436,\t\t1,\t\t62.571565,\t\t12.514313,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[437,\t\t1,\t\t14.250017,\t\t2.850003,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[438,\t\t1,\t\t38.243144,\t\t7.648629,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[439,\t\t1,\t\t71.203792,\t\t14.240758,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[440,\t\t1,\t\t60.17448,\t\t12.034896,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[441,\t\t1,\t\t46.131801,\t\t9.22636,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[442,\t\t1,\t\t61.047989,\t\t12.209598,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[443,\t\t1,\t\t132.357787,\t\t26.471557,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[444,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[445,\t\t1,\t\t60.141849,\t\t12.02837,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[446,\t\t1,\t\t27.887236,\t\t5.577447,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[447,\t\t1,\t\t53.019084,\t\t10.603817,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[448,\t\t1,\t\t38.963643,\t\t7.792729,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[449,\t\t1,\t\t196.46788,\t\t39.293576,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[450,\t\t1,\t\t120.228968,\t\t24.045794,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[451,\t\t1,\t\t51.374431,\t\t10.274886,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[452,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[453,\t\t1,\t\t34.430836,\t\t6.886167,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[454,\t\t1,\t\t24.021222,\t\t4.804244,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[455,\t\t1,\t\t39.164582,\t\t7.832916,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[456,\t\t1,\t\t39.164582,\t\t7.832916,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[457,\t\t1,\t\t120.107951,\t\t24.02159,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[458,\t\t1,\t\t114.237806,\t\t22.847561,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[459,\t\t1,\t\t139.03166,\t\t27.806332,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[460,\t\t1,\t\t182.716246,\t\t36.543249,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[461,\t\t1,\t\t190.064516,\t\t38.012903,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[462,\t\t1,\t\t58.141721,\t\t11.628344,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[463,\t\t1,\t\t29.792182,\t\t5.958436,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[464,\t\t1,\t\t29.828194,\t\t5.965639,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[465,\t\t1,\t\t48.180685,\t\t9.636137,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[466,\t\t1,\t\t39.116621,\t\t7.823324,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[467,\t\t1,\t\t36.098164,\t\t7.219633,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[468,\t\t1,\t\t59.186721,\t\t11.837344,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[469,\t\t1,\t\t36.676825,\t\t7.335365,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[470,\t\t1,\t\t93.401797,\t\t18.680359,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[471,\t\t1,\t\t91.962492,\t\t18.392498,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[472,\t\t1,\t\t32.165708,\t\t6.433142,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[473,\t\t1,\t\t59.063909,\t\t11.812782,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[474,\t\t1,\t\t30.505891,\t\t6.101178,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[475,\t\t1,\t\t29.936908,\t\t5.987382,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[476,\t\t1,\t\t33.833631,\t\t6.766726,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[477,\t\t1,\t\t54.600163,\t\t10.920033,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[478,\t\t1,\t\t68.587757,\t\t13.717551,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[479,\t\t1,\t\t124.296248,\t\t24.85925,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[480,\t\t1,\t\t54.481297,\t\t10.896259,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[481,\t\t1,\t\t47.314081,\t\t9.462816,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[482,\t\t1,\t\t53.723103,\t\t10.744621,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[483,\t\t1,\t\t45.687562,\t\t9.137512,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[484,\t\t1,\t\t35.816826,\t\t7.163365,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[485,\t\t1,\t\t53.500859,\t\t10.700172,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[486,\t\t1,\t\t492.181766,\t\t98.436353,\t\t0,\t\t0,\t\t0,\t\t0.999649,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[487,\t\t1,\t\t124.716585,\t\t24.943317,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[488,\t\t1,\t\t359.364944,\t\t71.872989,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[489,\t\t1,\t\t94.583831,\t\t18.916766,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[490,\t\t1,\t\t29.430961,\t\t5.886192,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[491,\t\t1,\t\t40.467949,\t\t8.09359,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[492,\t\t1,\t\t63.106141,\t\t12.621228,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[493,\t\t1,\t\t81.336262,\t\t16.267252,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[494,\t\t1,\t\t111.164357,\t\t22.232871,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[495,\t\t1,\t\t87.506217,\t\t17.501243,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[496,\t\t1,\t\t6.198211,\t\t1.239642,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[497,\t\t1,\t\t775.084395,\t\t155.016879,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[498,\t\t1,\t\t36.350759,\t\t7.270152,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[499,\t\t1,\t\t50.739704,\t\t10.147941,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[500,\t\t1,\t\t27.779391,\t\t5.555878,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[501,\t\t1,\t\t46.99794,\t\t9.399588,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[502,\t\t1,\t\t185.491137,\t\t37.098227,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[503,\t\t1,\t\t56.808699,\t\t11.36174,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[504,\t\t1,\t\t37.201005,\t\t7.440201,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[505,\t\t1,\t\t263.85839,\t\t52.771678,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[506,\t\t1,\t\t82.821902,\t\t16.56438,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[507,\t\t1,\t\t78.781156,\t\t15.756231,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[508,\t\t1,\t\t114.530558,\t\t22.906112,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[509,\t\t1,\t\t150.928559,\t\t30.185712,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[510,\t\t1,\t\t95.350587,\t\t19.070117,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[511,\t\t1,\t\t83.174843,\t\t16.634969,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[512,\t\t1,\t\t54.942119,\t\t10.988424,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[513,\t\t1,\t\t30.267245,\t\t6.053449,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[514,\t\t1,\t\t75.332243,\t\t15.066449,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[515,\t\t1,\t\t67.200836,\t\t13.440167,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[516,\t\t1,\t\t75.181923,\t\t15.036385,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[517,\t\t1,\t\t35.314749,\t\t7.06295,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[518,\t\t1,\t\t198.894901,\t\t39.77898,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[519,\t\t1,\t\t19.5749,\t\t3.91498,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[520,\t\t1,\t\t79.031447,\t\t15.806289,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[521,\t\t1,\t\t71.392235,\t\t14.278447,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[522,\t\t1,\t\t61.126609,\t\t12.225322,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[523,\t\t1,\t\t32.903759,\t\t6.580752,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[524,\t\t1,\t\t95.502871,\t\t19.100574,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[525,\t\t1,\t\t113.776267,\t\t22.755253,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[526,\t\t1,\t\t34.494824,\t\t6.898965,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[527,\t\t1,\t\t37.872893,\t\t7.574579,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[528,\t\t1,\t\t82.66113,\t\t16.532226,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[529,\t\t1,\t\t105.959329,\t\t21.191866,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[530,\t\t1,\t\t44.901236,\t\t8.980247,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[531,\t\t1,\t\t45.652693,\t\t9.130539,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[532,\t\t1,\t\t43.818627,\t\t8.763725,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[533,\t\t1,\t\t39.266778,\t\t7.853356,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[534,\t\t1,\t\t108.319748,\t\t21.66395,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[535,\t\t1,\t\t135.609372,\t\t27.121874,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[536,\t\t1,\t\t106.88941,\t\t21.377882,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[537,\t\t1,\t\t35.557702,\t\t7.11154,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[538,\t\t1,\t\t26.580512,\t\t5.316102,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[539,\t\t1,\t\t28.203557,\t\t5.640711,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[540,\t\t1,\t\t25.396065,\t\t5.079213,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[541,\t\t1,\t\t65.600226,\t\t13.120045,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[542,\t\t1,\t\t90.114434,\t\t18.022887,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[543,\t\t1,\t\t49.220061,\t\t9.844012,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[544,\t\t1,\t\t91.673054,\t\t18.334611,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[545,\t\t1,\t\t197.38712,\t\t39.477424,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[546,\t\t1,\t\t98.933406,\t\t19.786681,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[547,\t\t1,\t\t127.877927,\t\t25.575585,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[548,\t\t1,\t\t41.394614,\t\t8.278923,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[549,\t\t1,\t\t35.395934,\t\t7.079187,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[550,\t\t1,\t\t29.207665,\t\t5.841533,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[551,\t\t1,\t\t28.155485,\t\t5.631097,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[552,\t\t1,\t\t139.816966,\t\t27.963393,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[553,\t\t1,\t\t0.967317,\t\t0.193463,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[554,\t\t1,\t\t141.649183,\t\t28.329837,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[555,\t\t1,\t\t53.969907,\t\t10.793981,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[556,\t\t1,\t\t83.493242,\t\t16.698648,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[557,\t\t1,\t\t177.393102,\t\t35.47862,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[558,\t\t1,\t\t104.601385,\t\t20.920277,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[559,\t\t1,\t\t55.981654,\t\t11.196331,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[560,\t\t1,\t\t87.456587,\t\t17.491317,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[561,\t\t1,\t\t47.958639,\t\t9.591728,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[562,\t\t1,\t\t131.01941,\t\t26.203882,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[563,\t\t1,\t\t92.117323,\t\t18.423465,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[564,\t\t1,\t\t181.88591,\t\t36.377182,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[565,\t\t1,\t\t137.241932,\t\t27.448386,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[566,\t\t1,\t\t0.22044,\t\t0.044088,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[567,\t\t1,\t\t223.092915,\t\t44.618583,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[568,\t\t1,\t\t206.306969,\t\t41.261394,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[569,\t\t1,\t\t145.159033,\t\t29.031807,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[570,\t\t1,\t\t226.619389,\t\t45.323878,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[571,\t\t1,\t\t166.854439,\t\t33.370888,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[572,\t\t1,\t\t294.303372,\t\t58.860674,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[573,\t\t1,\t\t85.667853,\t\t17.133571,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[574,\t\t1,\t\t163.229975,\t\t32.645995,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[575,\t\t1,\t\t3.067384,\t\t0.613477,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[576,\t\t1,\t\t198.486554,\t\t39.697311,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[577,\t\t1,\t\t218.810734,\t\t43.762147,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[578,\t\t1,\t\t208.913162,\t\t41.782632,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[579,\t\t1,\t\t76.217223,\t\t15.243445,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[580,\t\t1,\t\t15.867292,\t\t3.173458,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[581,\t\t1,\t\t0.091175,\t\t0.018235,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[582,\t\t1,\t\t57.407943,\t\t11.481589,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[583,\t\t1,\t\t65.8448,\t\t13.16896,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[584,\t\t1,\t\t37.778593,\t\t7.555719,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[585,\t\t1,\t\t65.588286,\t\t13.117657,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t]\n\t])\n\tppc[\"gen\"] = array([\n\t\t[586,\t\t0.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t272.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[589,\t\t63.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t63.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[590,\t\t38.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[593,\t\t11.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[595,\t\t1597.40916,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4730.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[598,\t\t12.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[599,\t\t9.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[601,\t\t61.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[602,\t\t24.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[603,\t\t1550.920527,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3455.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[607,\t\t1800.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1800.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[608,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[609,\t\t36.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[612,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[614,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[616,\t\t29.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[617,\t\t137.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t137.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[618,\t\t33.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[619,\t\t118.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t118.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[624,\t\t27.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[629,\t\t75.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t75.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[632,\t\t45.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[637,\t\t53.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t53.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[638,\t\t128.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t128.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[640,\t\t12.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[641,\t\t12.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[642,\t\t28.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[643,\t\t857.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t857.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[647,\t\t14.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[652,\t\t46.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t46.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[655,\t\t61.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[661,\t\t32.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t32.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[663,\t\t15.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[666,\t\t28.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[668,\t\t361.796579,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t766.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[670,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[672,\t\t33.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[681,\t\t40.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t40.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[683,\t\t27.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[687,\t\t1329.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1329.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[694,\t\t16.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[695,\t\t14.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[696,\t\t721.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t721.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[697,\t\t11.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[698,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[702,\t\t73.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[704,\t\t508.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t508.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[705,\t\t17.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[707,\t\t34.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[713,\t\t13.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[714,\t\t15.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[716,\t\t0.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[717,\t\t11.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[719,\t\t1191.312791,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1958.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[724,\t\t12.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[730,\t\t633.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t633.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[732,\t\t14.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[735,\t\t84.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[738,\t\t138.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t138.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[741,\t\t214.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t214.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[742,\t\t9.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[743,\t\t581.35488,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1410.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[747,\t\t12.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[748,\t\t110.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t110.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[749,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[750,\t\t90.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[753,\t\t311.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t311.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[758,\t\t18.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[761,\t\t12.510758,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[762,\t\t1105.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1105.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[763,\t\t20.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[765,\t\t59.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t59.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[767,\t\t11.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[772,\t\t18.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[774,\t\t33.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[777,\t\t79.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[778,\t\t14.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[781,\t\t943.856473,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1310.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[784,\t\t777.117676,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1275.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[785,\t\t3.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[788,\t\t875.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t875.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[789,\t\t77.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t77.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[791,\t\t10.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[792,\t\t62.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t62.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[795,\t\t13.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[800,\t\t36.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[801,\t\t50.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[802,\t\t500.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t500.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[805,\t\t695.105244,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1410.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[806,\t\t35.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[808,\t\t217.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t217.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[809,\t\t12.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[811,\t\t25.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[814,\t\t89.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[816,\t\t80.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t80.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[817,\t\t54.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t54.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[821,\t\t82.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[822,\t\t134.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t134.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[826,\t\t58.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t58.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[830,\t\t89.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[835,\t\t63.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t63.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[836,\t\t25.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[839,\t\t73.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[841,\t\t23.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[844,\t\t40.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t40.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[845,\t\t318.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t318.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[849,\t\t779.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t779.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[850,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[851,\t\t79.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[853,\t\t11.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[855,\t\t688.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t688.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[856,\t\t36.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[857,\t\t1402.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1402.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[858,\t\t56.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[860,\t\t25.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[865,\t\t11.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[869,\t\t1360.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1360.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[870,\t\t58.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t58.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[872,\t\t22.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[874,\t\t20.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[875,\t\t24.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[882,\t\t17.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[883,\t\t18.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[885,\t\t30.421453,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t490.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[886,\t\t2572.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2572.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[889,\t\t9.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[890,\t\t48.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t48.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[893,\t\t60.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[894,\t\t158.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t158.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[895,\t\t19.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[896,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[898,\t\t84.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[900,\t\t112.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t112.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[902,\t\t19.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[903,\t\t20.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[905,\t\t137.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t137.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[906,\t\t66.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[907,\t\t67.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[909,\t\t36.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[913,\t\t74.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t74.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[915,\t\t12.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[917,\t\t17.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[918,\t\t38.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[920,\t\t12.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[921,\t\t124.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t124.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[922,\t\t164.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t164.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[923,\t\t146.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t146.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[925,\t\t26.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[931,\t\t217.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t217.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[935,\t\t23.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[936,\t\t104.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t104.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[937,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[939,\t\t0.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[940,\t\t29.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[944,\t\t25.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[950,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[952,\t\t31.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t31.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[958,\t\t66.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[959,\t\t45.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[960,\t\t26.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[963,\t\t692.05483,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t875.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[965,\t\t352.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t352.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[966,\t\t66.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[967,\t\t37.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t37.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[969,\t\t56.9,\t\t0,\t\t9999,\t\t-9999,\t\t0.999649,\t\t100,\t\t1,\t\t56.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[971,\t\t20.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[973,\t\t1220.325389,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1347.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[976,\t\t26.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[978,\t\t4.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[980,\t\t157.144133,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t350.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[981,\t\t119.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[982,\t\t9.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[983,\t\t44.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t44.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[984,\t\t465.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t465.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[985,\t\t22.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[986,\t\t11.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[987,\t\t164.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t164.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[988,\t\t5.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[993,\t\t392.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t392.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[994,\t\t33.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[995,\t\t4.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[997,\t\t18.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[999,\t\t15.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1000,\t\t49.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1002,\t\t9.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1003,\t\t900.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t900.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1007,\t\t23.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1008,\t\t49.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1010,\t\t750.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t750.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1011,\t\t18.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1012,\t\t2462.9027,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2835.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1014,\t\t385.076083,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t750.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1026,\t\t655.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t655.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1027,\t\t6.040329,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t48.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1028,\t\t400.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t400.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1029,\t\t60.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1030,\t\t533.235009,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1018.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1031,\t\t1447.199962,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1447.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1032,\t\t29.844186,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t153.510391,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1033,\t\t8.985972,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.164506,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1034,\t\t19.7547,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.262779,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1035,\t\t10.164552,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.886469,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1036,\t\t14.378555,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.223077,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1037,\t\t14.908769,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t94.684044,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1038,\t\t14.468579,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t85.798525,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1039,\t\t4.952647,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t132.724114,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1040,\t\t0.000162,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.064179,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1041,\t\t27.002116,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t204.187624,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1042,\t\t1.876171,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.70053,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1043,\t\t0.256706,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t6.035538,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1044,\t\t6.760194,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.163532,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1045,\t\t13.177215,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.836204,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1046,\t\t37.140984,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t106.787063,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1047,\t\t2.842177,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.029581,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1048,\t\t19.296148,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t71.656883,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1049,\t\t54.587867,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t293.755375,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1050,\t\t7.647906,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.781606,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1051,\t\t46.91242,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t304.42978,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1052,\t\t9.466615,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.66869,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1053,\t\t7.825963,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.368087,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1054,\t\t151.648181,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t273.855776,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1055,\t\t0.123934,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.856069,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1056,\t\t38.680309,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t603.943953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1057,\t\t29.767545,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t426.979979,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1058,\t\t68.333477,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1055.735174,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1059,\t\t22.900926,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t414.871332,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1060,\t\t0.376777,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.351632,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1061,\t\t10.540044,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t161.862597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1062,\t\t0.116182,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.878561,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1063,\t\t0.316374,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.670916,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1064,\t\t21.950141,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t209.786524,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1065,\t\t45.872428,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t339.421643,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1066,\t\t23.855959,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t134.399019,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1067,\t\t15.509375,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t32.653526,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1068,\t\t2.548672,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.009022,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1069,\t\t1.438102,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.190759,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1070,\t\t0.432601,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.788599,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1071,\t\t3.009735,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.328696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1072,\t\t37.609442,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t112.606433,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1073,\t\t32.83069,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t77.81765,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1074,\t\t46.814391,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t153.592986,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1075,\t\t12.049145,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.783448,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1076,\t\t0.052942,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.29551,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1077,\t\t3.292977,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.120041,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1078,\t\t2.211612,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.413246,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1079,\t\t22.560499,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t72.327992,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1080,\t\t26.586979,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t132.149983,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1081,\t\t65.295331,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t405.642115,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1082,\t\t81.939018,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t510.054159,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1083,\t\t68.84883,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t633.681488,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1084,\t\t59.664493,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t602.719371,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1085,\t\t22.851223,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t113.714399,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1086,\t\t33.471797,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t225.59917,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1087,\t\t30.008292,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t116.66597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1088,\t\t11.933065,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.782492,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1089,\t\t41.766225,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t384.449592,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1090,\t\t43.445109,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.140897,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1091,\t\t9.233422,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.7939,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1092,\t\t8.756292,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t54.002032,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1093,\t\t38.669088,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.605298,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1094,\t\t1.26057,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.759038,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1095,\t\t0.067239,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.204951,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1096,\t\t20.948697,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.50612,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1097,\t\t1.213837,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.601122,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1098,\t\t25.024093,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t71.025499,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1099,\t\t129.962001,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t290.937198,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1100,\t\t0.000242,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.026696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1101,\t\t5.85763,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.930665,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1102,\t\t30.950589,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t350.979988,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1103,\t\t28.244451,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t245.381701,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1104,\t\t0.062515,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.206918,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1105,\t\t0.751491,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.178593,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1106,\t\t0.770411,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.289793,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1107,\t\t15.268611,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t76.221615,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1108,\t\t49.858727,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t320.422751,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1109,\t\t0.214822,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.77821,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1110,\t\t0.523611,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.654557,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1111,\t\t13.461971,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.637993,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1112,\t\t23.190742,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t69.53429,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1113,\t\t1.174294,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.536361,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1114,\t\t5.303138,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.446889,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1115,\t\t19.30443,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.575278,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1116,\t\t11.327057,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t32.601142,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1117,\t\t33.662415,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.792541,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1118,\t\t2.443543,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.725012,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1119,\t\t14.553801,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t43.254023,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1120,\t\t0.78508,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.416001,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1121,\t\t0.175997,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.540589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1122,\t\t0.488671,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.462883,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1123,\t\t0.373835,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.464336,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1124,\t\t0.428198,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.288283,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1125,\t\t6.938827,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.818899,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1126,\t\t7.7927,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.154893,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1127,\t\t34.593243,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t105.296621,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1128,\t\t0.94399,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.06139,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1129,\t\t1.422045,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.738747,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1130,\t\t0.373314,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.025754,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1131,\t\t0.874138,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.897078,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1132,\t\t0.135526,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.359497,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1133,\t\t0.234276,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.719597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1134,\t\t0.165533,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.508453,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1135,\t\t2.223437,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.117819,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1136,\t\t0.126193,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.4027,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1137,\t\t0.774927,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.669012,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1138,\t\t0.357482,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.254278,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1139,\t\t6.604429,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.822769,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1140,\t\t12.206213,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.389457,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1141,\t\t45.398908,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.46456,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1142,\t\t0.370493,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.215733,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1143,\t\t6.789998,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.239356,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1144,\t\t20.4498,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.527382,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1145,\t\t85.534771,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t175.889627,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1146,\t\t0.280415,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.861317,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1147,\t\t16.37186,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.703707,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1148,\t\t4.563939,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.645529,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1149,\t\t2.308567,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.556784,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1150,\t\t0.990631,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.62256,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1151,\t\t4.555326,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.036113,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1152,\t\t0.038176,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.116518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1153,\t\t0.017679,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.068788,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1154,\t\t0.041281,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.160625,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1155,\t\t0.210285,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.609451,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1156,\t\t5.347054,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.022334,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1157,\t\t1.519684,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.354147,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1158,\t\t0.348151,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.04304,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1159,\t\t4.513539,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.498087,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1160,\t\t20.09839,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t238.377761,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1161,\t\t2.879323,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.263391,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1162,\t\t15.680934,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t502.409178,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1163,\t\t8.453922,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t330.03194,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1164,\t\t20.453127,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t285.625412,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1165,\t\t2.281195,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.188579,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1166,\t\t18.002192,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.277163,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1167,\t\t1.694939,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.05378,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1168,\t\t0.44436,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.345774,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1169,\t\t0.921454,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.721845,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1170,\t\t0.087758,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.26599,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1171,\t\t2.96652,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.029885,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1172,\t\t0.505643,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.584043,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1173,\t\t19.666411,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t254.253327,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1174,\t\t0.42222,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.260082,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1175,\t\t0.27874,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.855454,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1176,\t\t0.079765,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.23222,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1177,\t\t8.927407,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.87401,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1178,\t\t0.815572,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.167999,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1179,\t\t0.3534,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.306293,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1180,\t\t0.228208,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.688545,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1181,\t\t29.147096,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t85.739557,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1182,\t\t33.853886,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.319579,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1183,\t\t8.871985,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.222575,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1184,\t\t1.170012,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.219005,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1185,\t\t3.780758,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.343971,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1186,\t\t9.665312,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.916368,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1187,\t\t2.589197,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.814574,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1188,\t\t50.425204,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t179.712741,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1189,\t\t3.477711,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.261805,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1190,\t\t133.675903,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t220.533673,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1191,\t\t40.638417,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.079413,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1192,\t\t6.115959,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.454569,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1193,\t\t0.806855,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.399953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1194,\t\t2.84698,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.986036,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1195,\t\t0.074644,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.202359,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1196,\t\t16.319617,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t160.697956,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1197,\t\t6.128931,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.592266,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1198,\t\t11.166784,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.819157,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1199,\t\t98.598691,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t201.421956,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1200,\t\t31.655425,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.012408,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1201,\t\t13.438423,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.166667,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1202,\t\t16.096576,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.89238,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1203,\t\t78.23571,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t182.623256,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1204,\t\t7.967165,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t47.541821,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1205,\t\t0.072355,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.548843,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1206,\t\t1.290931,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.806894,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1207,\t\t1.202053,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.575453,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1208,\t\t0.780694,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.242031,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1209,\t\t0.010429,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.268261,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1210,\t\t1.078692,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.02599,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1211,\t\t6.761624,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.005229,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1212,\t\t37.155589,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t91.171888,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1213,\t\t30.409402,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.342704,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1214,\t\t0.999115,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.505907,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1215,\t\t0.327465,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.252965,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1216,\t\t8.430158,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.754469,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1217,\t\t5.077463,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.871617,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1218,\t\t0.178472,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.980482,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1219,\t\t3.291596,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.33953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1220,\t\t6.015934,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.597849,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1221,\t\t120.543673,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t593.230436,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1222,\t\t109.959356,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t211.057769,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1223,\t\t2.299012,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.806101,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1224,\t\t33.351368,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t160.523778,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1225,\t\t1.90485,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.931481,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1226,\t\t0.308202,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.982858,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1227,\t\t13.524572,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.482807,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1228,\t\t0.423975,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.021367,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1229,\t\t16.723179,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t51.244222,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1230,\t\t0.109766,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.681276,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1231,\t\t3.487866,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.55478,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1232,\t\t8.125541,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t75.075088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1233,\t\t391.02234,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t575.36828,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1235,\t\t2.94467,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.03734,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1236,\t\t26.035307,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.225035,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1237,\t\t5.675926,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.605409,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1238,\t\t32.451734,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t188.691049,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1239,\t\t1.038648,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.267706,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1240,\t\t30.583565,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t339.51051,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1241,\t\t86.99848,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t385.361595,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1242,\t\t3.416173,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.074038,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1243,\t\t14.918069,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.079842,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1244,\t\t129.061747,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t323.472536,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1245,\t\t0.79248,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.080896,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1246,\t\t22.519773,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.127825,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1247,\t\t10.659624,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.833396,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1248,\t\t23.760882,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t91.958275,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1249,\t\t21.835866,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t76.135177,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1250,\t\t10.115905,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.830519,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1251,\t\t5.819648,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.404345,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1252,\t\t3.51164,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.887727,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1253,\t\t11.49864,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.502694,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1254,\t\t21.309506,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.278695,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1255,\t\t0.843197,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.818419,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1256,\t\t3.40585,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.091842,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1257,\t\t18.853352,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t88.95288,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1258,\t\t76.353255,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t235.487329,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1259,\t\t23.160638,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t109.288719,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1260,\t\t2.019037,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.168717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1261,\t\t22.533206,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t201.699555,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1262,\t\t0.343562,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.524108,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1263,\t\t0.274945,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.352421,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1264,\t\t72.442823,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.035361,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1265,\t\t5.587517,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t6.654727,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1266,\t\t105.047659,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.710849,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1267,\t\t10.674638,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.469006,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1268,\t\t0.217077,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.4295,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1269,\t\t0.210864,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.105829,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1270,\t\t5.176882,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.950511,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1271,\t\t9.400314,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t47.371792,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1272,\t\t0.210493,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.23166,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1273,\t\t0.635286,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.169201,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1274,\t\t16.185422,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t53.095629,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1275,\t\t32.461993,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.0753,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1276,\t\t9.127963,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.655641,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1277,\t\t14.052028,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t65.611252,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1278,\t\t39.94534,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t170.437781,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1279,\t\t3.4e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.004344,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1280,\t\t0.003701,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.626494,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1281,\t\t0.000281,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.51246,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1282,\t\t0.26134,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.363037,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1283,\t\t603.177299,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1297.764428,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1284,\t\t3.2512,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.426322,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1285,\t\t0.00074,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.937048,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1286,\t\t11.213673,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.872201,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1287,\t\t20.351163,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t93.199628,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1288,\t\t36.574418,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t148.402692,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1289,\t\t14.014734,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t184.149235,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1290,\t\t0.681034,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.901974,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1291,\t\t27.191207,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t98.293351,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1292,\t\t9.289509,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t41.682074,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1293,\t\t0.091628,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.402107,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1294,\t\t0.214591,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.39743,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1295,\t\t0.225275,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.873666,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1296,\t\t1.258175,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.356489,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1297,\t\t11.345166,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t177.778742,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1298,\t\t0.009849,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.014603,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1299,\t\t0.005031,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.158207,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1300,\t\t12.856278,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.74405,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1301,\t\t32.248002,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.863304,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1302,\t\t2.032992,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.877299,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1303,\t\t1.721597,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.335516,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1304,\t\t3.445086,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.594319,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1305,\t\t0.002827,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.004567,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1306,\t\t0.497422,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.827014,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1307,\t\t0.097085,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.29894,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1308,\t\t0.125721,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.278321,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1309,\t\t1.706233,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.34909,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1310,\t\t0.839491,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.64589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1311,\t\t2.003781,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.854004,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1312,\t\t107.426818,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t262.264924,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1313,\t\t11.764822,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.836748,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1314,\t\t5.274489,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.003987,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1315,\t\t4.759194,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.879027,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1316,\t\t0.052504,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.757497,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1317,\t\t5.874189,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.958574,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1318,\t\t1.004484,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.956332,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1319,\t\t4.437416,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.708276,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1320,\t\t5.915069,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.75859,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1321,\t\t0.028866,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.161123,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1322,\t\t0.502605,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.929763,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1323,\t\t39.385197,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t199.111909,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1324,\t\t10.272005,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.063258,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1325,\t\t5.743733,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.497559,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1326,\t\t12.317533,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.928865,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1327,\t\t11.714988,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.796895,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1328,\t\t4.173775,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.063343,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1329,\t\t147.79385,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t218.675424,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1330,\t\t5.447129,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.131028,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1331,\t\t0.10904,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.289238,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1332,\t\t5.887405,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.293088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1333,\t\t22.598628,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.650254,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1334,\t\t0.036299,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.215341,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1335,\t\t0.501822,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.306939,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1336,\t\t5.405693,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.773035,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1337,\t\t29.490227,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t121.31241,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1338,\t\t0.307091,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.832524,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1339,\t\t2.515521,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.086482,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1340,\t\t14.526861,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t70.098327,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1341,\t\t63.339523,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t205.513321,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1342,\t\t0.050132,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.734589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1343,\t\t0.013456,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.102108,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1344,\t\t0.083225,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.226057,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1345,\t\t0.685421,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.971188,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1346,\t\t41.999959,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t214.719215,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1347,\t\t70.671183,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t414.115976,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1348,\t\t4.836222,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.707927,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1349,\t\t12.825227,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t42.352342,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1350,\t\t0.025315,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.094971,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1352,\t\t0.000311,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.83726,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1355,\t\t0.868152,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.688324,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1356,\t\t13.387001,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.486231,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1357,\t\t10.749804,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.459913,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1358,\t\t0.127287,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.247293,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1359,\t\t16.180806,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t70.633589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1360,\t\t5.892976,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.135983,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1361,\t\t26.218548,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t63.207173,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1362,\t\t27.992783,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.107216,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1363,\t\t0.006338,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.036158,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1364,\t\t0.008624,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.061068,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1365,\t\t8e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.000456,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1366,\t\t0.048094,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.229992,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1367,\t\t14.811282,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t43.863891,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1368,\t\t0.207834,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.298243,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1369,\t\t6.272539,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.968859,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1370,\t\t0.188201,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.343308,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1371,\t\t26.998869,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t81.767208,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1372,\t\t33.527137,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t192.966588,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1373,\t\t5.568543,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.200257,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1374,\t\t58.131655,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t108.220146,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1375,\t\t30.515379,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.223816,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1376,\t\t43.284862,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t176.213655,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1377,\t\t22.918821,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t234.376272,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1378,\t\t17.37033,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t246.029906,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1379,\t\t0.223424,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.805984,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1380,\t\t0.457422,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.213356,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1381,\t\t0.279805,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.01257,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1382,\t\t91.83514,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t138.839906,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1383,\t\t70.616417,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t109.821439,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1384,\t\t1.556145,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.669135,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1385,\t\t0.032056,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.124455,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1386,\t\t0.22366,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.673858,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1387,\t\t1.048378,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.493561,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1388,\t\t0.349917,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.928188,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1389,\t\t0.080501,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.213536,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1390,\t\t1.120176,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.732816,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1391,\t\t0.146772,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.521719,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1392,\t\t6.771416,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.306386,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1393,\t\t0.335929,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.376509,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1394,\t\t0.27924,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.077886,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1395,\t\t0.019982,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.073776,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1396,\t\t0.005576,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.026112,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1397,\t\t8.352907,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.084545,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1398,\t\t0.907681,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.779641,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1399,\t\t4.725968,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.868157,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1400,\t\t0.315198,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.297197,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1401,\t\t20.920377,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.339497,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1402,\t\t6.667781,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.328902,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1403,\t\t30.382022,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.651672,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1404,\t\t38.841518,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t134.800518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1405,\t\t7.541807,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.550802,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1406,\t\t3.138034,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.763987,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1407,\t\t0.042579,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.211614,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1408,\t\t8.224277,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t41.078698,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1409,\t\t1.988762,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.019786,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1410,\t\t6.179106,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t37.466518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1411,\t\t8.465677,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.395367,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1412,\t\t0.529221,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.987601,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1413,\t\t0.423867,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.679791,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1414,\t\t1.917854,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.992489,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1415,\t\t0.500282,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.454501,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1416,\t\t0.485191,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.958002,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1417,\t\t0.000273,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.001311,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1418,\t\t15.947042,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t88.264613,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1419,\t\t4.672588,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.260903,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1420,\t\t0.34409,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.399757,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1421,\t\t1.272324,\t\t0,\t\t9999,\t\t-9999,\t\t0.999649,\t\t100,\t\t1,\t\t6.972369,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1422,\t\t0.875077,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.730495,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1423,\t\t0.356488,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.931017,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1424,\t\t62.583301,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t219.092115,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1425,\t\t5.449472,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.366402,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1426,\t\t14.532746,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t68.762602,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1427,\t\t21.176674,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t480.698671,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1428,\t\t11.162325,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t334.885743,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1429,\t\t0.337671,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.279826,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1430,\t\t0.000238,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.034248,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1431,\t\t29.258384,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t227.662022,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1432,\t\t2.360978,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.058931,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1433,\t\t764.000303,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1289.241188,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1434,\t\t17.773283,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.440014,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1435,\t\t29.43774,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t86.713217,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1436,\t\t22.076884,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t98.434116,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1437,\t\t32.587238,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t238.321958,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1438,\t\t29.459162,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t392.815158,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1439,\t\t8.088661,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.103164,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1440,\t\t0.033676,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.833609,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1441,\t\t0.092906,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.171578,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1442,\t\t0.263566,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.715522,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1443,\t\t33.135857,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t103.005076,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1444,\t\t2.780508,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.981696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1445,\t\t6.281854,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.036799,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1446,\t\t112.956767,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t758.547933,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1447,\t\t19.764952,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.477411,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1448,\t\t1.967845,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.523578,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1449,\t\t24.695245,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t95.437673,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1450,\t\t19.206402,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t59.256809,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1451,\t\t21.653014,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t68.198838,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1452,\t\t6.143506,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.068921,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1453,\t\t42.065988,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.93775,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1454,\t\t90.34573,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.126607,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1455,\t\t0.22583,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.654438,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1456,\t\t15.282913,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.054822,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1457,\t\t0.754986,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.002672,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1458,\t\t0.092814,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.246199,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1459,\t\t1.289877,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.309059,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1460,\t\t11.726373,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t101.498473,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1461,\t\t5.998525,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.951737,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1462,\t\t0.800775,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.402686,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1463,\t\t0.230564,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.711207,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1464,\t\t56.813334,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t218.884211,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1465,\t\t1.774674,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.299939,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1466,\t\t1.986566,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.685017,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1467,\t\t0.597814,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.096155,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1468,\t\t8.255468,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.789171,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1469,\t\t14.258037,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t65.007467,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1470,\t\t31.860244,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t78.965265,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1471,\t\t63.238067,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t159.165074,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1472,\t\t3.198483,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.980182,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1473,\t\t2.804144,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.362608,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1474,\t\t0.483778,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.398948,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1475,\t\t0.127364,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.39088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1476,\t\t60.580739,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t250.480113,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1477,\t\t2.819831,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.122974,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1479,\t\t1.208522,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.592606,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1480,\t\t3.809346,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.681964,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1481,\t\t0.019551,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.053146,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1482,\t\t2.264094,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.51083,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1483,\t\t0.973836,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.599649,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1484,\t\t0.007978,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.02991,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1485,\t\t0.150327,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.563547,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1486,\t\t0.773405,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.89934,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1487,\t\t0.415121,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.142917,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1488,\t\t0.217656,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.569856,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1489,\t\t0.038722,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.118938,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1490,\t\t248.562083,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t782.463701,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1491,\t\t24.382622,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.622838,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1492,\t\t54.199425,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t229.927503,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1493,\t\t24.858298,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.557175,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1494,\t\t43.844965,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t404.486733,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1495,\t\t6.9702,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.920717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1496,\t\t6.5e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.000282,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1497,\t\t50.872807,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.070006,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1498,\t\t77.287581,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t105.800802,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1499,\t\t0.150642,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.286676,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1500,\t\t0.052649,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.154817,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1501,\t\t2.712721,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.165333,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1502,\t\t0.000983,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.938928,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1503,\t\t5.713549,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.972187,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1504,\t\t31.014259,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t188.822836,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1505,\t\t1.170811,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.765913,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1506,\t\t3.179194,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.406717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1507,\t\t0.631429,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.438042,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1508,\t\t0.039419,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.065259,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1510,\t\t41.416012,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t107.008141,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1511,\t\t50.866705,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.22192,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1512,\t\t22.97123,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.130052,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1513,\t\t3.795286,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.051786,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1514,\t\t0.001464,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.027711,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1516,\t\t0.010238,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.02881,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1517,\t\t1.112423,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.286804,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1518,\t\t0.185954,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.670542,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1519,\t\t0.012906,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.04654,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n\t])\n\tppc[\"branch\"] = array([\n\t\t[586,\t\t1,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[589,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[590,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[593,\t\t112,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[595,\t\t115,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[598,\t\t118,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[599,\t\t119,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[601,\t\t119,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[602,\t\t121,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[603,\t\t526,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[607,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[608,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[609,\t\t529,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[612,\t\t493,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[614,\t\t130,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[616,\t\t132,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[617,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[618,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[619,\t\t134,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[624,\t\t14,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[629,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[632,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[637,\t\t148,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[638,\t\t149,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[640,\t\t153,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[641,\t\t155,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[642,\t\t533,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[643,\t\t534,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[647,\t\t536,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[652,\t\t167,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[655,\t\t170,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[661,\t\t177,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[663,\t\t178,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[666,\t\t180,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[668,\t\t183,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[670,\t\t183,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[672,\t\t185,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[681,\t\t197,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[683,\t\t200,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[687,\t\t202,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[694,\t\t21,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[695,\t\t210,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[696,\t\t211,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[697,\t\t211,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[698,\t\t212,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[702,\t\t215,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[704,\t\t217,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[705,\t\t217,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[707,\t\t219,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[713,\t\t225,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[714,\t\t225,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[716,\t\t226,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[717,\t\t227,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[719,\t\t229,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[724,\t\t238,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[730,\t\t547,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[732,\t\t247,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[735,\t\t253,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[738,\t\t258,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[741,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[742,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[743,\t\t500,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[747,\t\t273,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[748,\t\t274,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[749,\t\t274,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[750,\t\t557,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[753,\t\t28,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[758,\t\t286,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[761,\t\t288,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[762,\t\t289,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[763,\t\t560,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[765,\t\t560,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[767,\t\t292,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[772,\t\t3,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[774,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[777,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[778,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[781,\t\t303,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[784,\t\t563,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[785,\t\t501,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[788,\t\t311,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[789,\t\t565,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[791,\t\t314,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[792,\t\t316,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[795,\t\t319,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[800,\t\t326,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[801,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[802,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[805,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[806,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[808,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[809,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[811,\t\t568,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[814,\t\t570,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[816,\t\t335,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[817,\t\t571,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[821,\t\t338,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[822,\t\t339,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[826,\t\t339,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[830,\t\t345,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[835,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[836,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[839,\t\t350,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[841,\t\t573,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[844,\t\t352,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[845,\t\t356,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[849,\t\t574,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[850,\t\t574,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[851,\t\t575,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[853,\t\t362,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[855,\t\t363,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[856,\t\t363,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[857,\t\t365,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[858,\t\t368,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[860,\t\t371,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[865,\t\t375,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[869,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[870,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[872,\t\t378,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[874,\t\t576,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[875,\t\t381,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[882,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[883,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[885,\t\t393,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[886,\t\t394,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[889,\t\t397,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[890,\t\t40,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[893,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[894,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[895,\t\t580,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[896,\t\t581,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[898,\t\t403,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[900,\t\t405,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[902,\t\t405,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[903,\t\t406,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[905,\t\t413,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[906,\t\t414,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[907,\t\t583,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[909,\t\t417,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[913,\t\t422,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[915,\t\t423,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[917,\t\t43,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[918,\t\t424,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[920,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[921,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[922,\t\t429,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[923,\t\t432,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[925,\t\t44,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[931,\t\t439,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[935,\t\t45,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[936,\t\t445,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[937,\t\t447,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[939,\t\t450,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[940,\t\t451,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[944,\t\t458,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[950,\t\t462,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[952,\t\t47,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[958,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[959,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[960,\t\t479,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[963,\t\t481,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[965,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[966,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[967,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[969,\t\t486,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[971,\t\t51,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[973,\t\t506,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[976,\t\t58,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[978,\t\t491,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[980,\t\t508,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[981,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[982,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[983,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[984,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[985,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[986,\t\t64,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[987,\t\t65,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[988,\t\t66,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[993,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[994,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[995,\t\t509,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[997,\t\t510,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[999,\t\t70,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1000,\t\t71,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1002,\t\t71,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1003,\t\t72,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1007,\t\t511,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1008,\t\t75,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1010,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1011,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1012,\t\t81,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1014,\t\t83,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1026,\t\t518,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1027,\t\t218,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1028,\t\t221,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1029,\t\t268,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1030,\t\t269,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1031,\t\t498,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1032,\t\t1,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1033,\t\t3,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1034,\t\t4,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1035,\t\t6,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1036,\t\t7,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1037,\t\t8,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1038,\t\t9,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1039,\t\t11,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1040,\t\t14,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1041,\t\t16,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1042,\t\t17,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1043,\t\t19,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1044,\t\t21,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1045,\t\t23,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1046,\t\t25,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1047,\t\t27,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1048,\t\t28,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1049,\t\t29,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1050,\t\t31,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1051,\t\t33,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1052,\t\t34,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1053,\t\t35,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1054,\t\t36,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1055,\t\t38,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1056,\t\t39,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1057,\t\t40,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1058,\t\t41,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1059,\t\t43,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1060,\t\t44,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1061,\t\t45,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1062,\t\t47,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1063,\t\t48,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1064,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1065,\t\t50,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1066,\t\t51,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1067,\t\t53,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1068,\t\t54,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1069,\t\t55,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1070,\t\t57,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1071,\t\t58,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1072,\t\t59,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1073,\t\t60,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1074,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1075,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1076,\t\t64,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1077,\t\t65,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1078,\t\t66,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1079,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1080,\t\t70,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1081,\t\t71,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1082,\t\t72,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1083,\t\t73,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1084,\t\t75,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1085,\t\t76,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1086,\t\t77,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1087,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1088,\t\t80,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1089,\t\t81,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1090,\t\t82,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1091,\t\t83,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1092,\t\t84,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1093,\t\t85,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1094,\t\t88,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1095,\t\t89,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1096,\t\t90,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1097,\t\t91,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1098,\t\t92,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1099,\t\t93,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1100,\t\t97,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1101,\t\t98,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1102,\t\t101,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1103,\t\t102,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1104,\t\t103,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1105,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1106,\t\t109,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1107,\t\t110,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1108,\t\t111,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1109,\t\t112,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1110,\t\t113,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1111,\t\t114,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1112,\t\t115,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1113,\t\t116,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1114,\t\t118,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1115,\t\t119,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1116,\t\t121,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1117,\t\t122,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1118,\t\t126,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1119,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1120,\t\t130,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1121,\t\t131,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1122,\t\t132,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1123,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1124,\t\t134,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1125,\t\t135,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1126,\t\t136,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1127,\t\t137,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1128,\t\t139,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1129,\t\t140,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1130,\t\t141,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1131,\t\t142,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1132,\t\t144,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1133,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1134,\t\t146,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1135,\t\t147,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1136,\t\t148,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1137,\t\t149,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1138,\t\t150,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1139,\t\t151,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1140,\t\t152,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1141,\t\t153,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1142,\t\t154,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1143,\t\t155,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1144,\t\t158,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1145,\t\t161,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1146,\t\t162,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1147,\t\t163,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1148,\t\t164,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1149,\t\t166,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1150,\t\t167,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1151,\t\t168,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1152,\t\t169,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1153,\t\t170,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1154,\t\t171,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1155,\t\t172,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1156,\t\t173,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1157,\t\t174,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1158,\t\t175,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1159,\t\t176,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1160,\t\t177,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1161,\t\t178,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1162,\t\t179,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1163,\t\t180,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1164,\t\t181,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1165,\t\t182,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1166,\t\t183,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1167,\t\t185,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1168,\t\t186,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1169,\t\t187,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1170,\t\t188,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1171,\t\t189,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1172,\t\t190,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1173,\t\t192,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1174,\t\t193,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1175,\t\t194,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1176,\t\t196,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1177,\t\t197,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1178,\t\t198,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1179,\t\t199,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1180,\t\t200,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1181,\t\t202,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1182,\t\t203,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1183,\t\t204,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1184,\t\t205,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1185,\t\t206,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1186,\t\t207,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1187,\t\t208,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1188,\t\t209,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1189,\t\t210,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1190,\t\t211,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1191,\t\t212,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1192,\t\t213,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1193,\t\t214,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1194,\t\t215,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1195,\t\t216,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1196,\t\t217,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1197,\t\t218,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1198,\t\t219,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1199,\t\t221,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1200,\t\t222,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1201,\t\t223,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1202,\t\t224,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1203,\t\t225,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1204,\t\t226,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1205,\t\t227,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1206,\t\t228,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1207,\t\t229,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1208,\t\t230,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1209,\t\t234,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1210,\t\t235,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1211,\t\t237,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1212,\t\t238,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1213,\t\t239,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1214,\t\t240,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1215,\t\t241,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1216,\t\t242,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1217,\t\t243,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1218,\t\t244,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1219,\t\t247,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1220,\t\t251,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1221,\t\t252,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1222,\t\t253,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1223,\t\t254,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1224,\t\t255,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1225,\t\t256,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1226,\t\t257,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1227,\t\t258,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1228,\t\t260,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1229,\t\t263,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1230,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1231,\t\t266,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1232,\t\t267,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1233,\t\t268,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1235,\t\t271,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1236,\t\t272,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1237,\t\t273,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1238,\t\t274,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1239,\t\t275,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1240,\t\t276,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1241,\t\t278,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1242,\t\t281,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1243,\t\t282,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1244,\t\t283,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1245,\t\t284,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1246,\t\t285,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1247,\t\t286,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1248,\t\t287,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1249,\t\t288,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1250,\t\t289,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1251,\t\t291,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1252,\t\t292,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1253,\t\t293,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1254,\t\t294,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1255,\t\t295,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1256,\t\t296,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1257,\t\t297,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1258,\t\t298,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1259,\t\t299,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1260,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1261,\t\t302,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1262,\t\t303,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1263,\t\t304,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1264,\t\t307,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1265,\t\t308,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1266,\t\t309,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1267,\t\t311,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1268,\t\t312,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1269,\t\t314,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1270,\t\t316,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1271,\t\t317,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1272,\t\t318,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1273,\t\t319,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1274,\t\t321,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1275,\t\t322,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1276,\t\t323,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1277,\t\t324,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1278,\t\t325,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1279,\t\t326,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1280,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1281,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1282,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1283,\t\t331,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1284,\t\t333,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1285,\t\t335,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1286,\t\t337,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1287,\t\t338,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1288,\t\t339,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1289,\t\t340,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1290,\t\t341,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1291,\t\t342,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1292,\t\t343,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1293,\t\t344,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1294,\t\t345,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1295,\t\t346,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1296,\t\t347,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1297,\t\t348,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1298,\t\t350,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1299,\t\t352,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1300,\t\t353,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1301,\t\t354,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1302,\t\t355,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1303,\t\t356,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1304,\t\t357,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1305,\t\t359,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1306,\t\t361,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1307,\t\t362,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1308,\t\t363,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1309,\t\t364,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1310,\t\t365,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1311,\t\t366,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1312,\t\t367,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1313,\t\t368,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1314,\t\t369,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1315,\t\t370,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1316,\t\t371,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1317,\t\t372,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1318,\t\t373,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1319,\t\t374,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1320,\t\t375,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1321,\t\t376,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1322,\t\t377,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1323,\t\t378,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1324,\t\t379,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1325,\t\t381,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1326,\t\t384,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1327,\t\t385,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1328,\t\t386,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1329,\t\t387,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1330,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1331,\t\t390,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1332,\t\t391,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1333,\t\t392,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1334,\t\t393,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1335,\t\t394,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1336,\t\t395,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1337,\t\t396,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1338,\t\t397,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1339,\t\t398,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1340,\t\t399,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1341,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1342,\t\t403,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1343,\t\t404,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1344,\t\t405,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1345,\t\t406,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1346,\t\t407,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1347,\t\t408,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1348,\t\t410,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1349,\t\t411,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1350,\t\t412,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1352,\t\t414,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1355,\t\t418,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1356,\t\t419,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1357,\t\t420,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1358,\t\t421,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1359,\t\t422,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1360,\t\t423,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1361,\t\t424,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1362,\t\t425,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1363,\t\t426,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1364,\t\t427,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1365,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1366,\t\t429,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1367,\t\t430,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1368,\t\t431,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1369,\t\t432,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1370,\t\t433,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1371,\t\t434,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1372,\t\t435,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1373,\t\t436,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1374,\t\t437,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1375,\t\t438,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1376,\t\t439,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1377,\t\t440,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1378,\t\t441,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1379,\t\t442,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1380,\t\t443,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1381,\t\t445,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1382,\t\t446,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1383,\t\t447,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1384,\t\t448,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1385,\t\t449,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1386,\t\t450,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1387,\t\t451,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1388,\t\t453,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1389,\t\t454,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1390,\t\t455,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1391,\t\t456,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1392,\t\t457,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1393,\t\t458,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1394,\t\t459,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1395,\t\t460,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1396,\t\t461,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1397,\t\t462,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1398,\t\t463,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1399,\t\t464,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1400,\t\t465,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1401,\t\t466,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1402,\t\t467,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1403,\t\t468,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1404,\t\t469,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1405,\t\t470,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1406,\t\t471,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1407,\t\t472,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1408,\t\t473,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1409,\t\t474,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1410,\t\t475,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1411,\t\t476,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1412,\t\t477,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1413,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1414,\t\t479,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1415,\t\t480,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1416,\t\t481,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1417,\t\t482,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1418,\t\t483,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1419,\t\t484,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1420,\t\t485,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1421,\t\t486,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1422,\t\t487,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1423,\t\t488,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1424,\t\t489,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1425,\t\t490,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1426,\t\t491,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1427,\t\t492,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1428,\t\t493,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1429,\t\t494,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1430,\t\t495,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1431,\t\t496,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1432,\t\t497,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1433,\t\t498,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1434,\t\t499,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1435,\t\t500,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1436,\t\t501,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1437,\t\t502,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1438,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1439,\t\t504,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1440,\t\t505,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1441,\t\t506,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1442,\t\t507,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1443,\t\t508,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1444,\t\t509,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1445,\t\t510,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1446,\t\t511,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1447,\t\t512,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1448,\t\t513,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1449,\t\t514,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1450,\t\t515,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1451,\t\t516,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1452,\t\t517,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1453,\t\t518,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1454,\t\t519,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1455,\t\t520,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1456,\t\t521,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1457,\t\t522,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1458,\t\t523,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1459,\t\t524,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1460,\t\t525,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1461,\t\t526,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1462,\t\t527,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1463,\t\t528,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1464,\t\t529,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1465,\t\t530,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1466,\t\t531,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1467,\t\t532,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1468,\t\t533,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1469,\t\t534,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1470,\t\t535,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1471,\t\t536,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1472,\t\t537,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1473,\t\t538,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1474,\t\t539,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1475,\t\t540,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1476,\t\t541,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1477,\t\t542,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1479,\t\t544,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1480,\t\t545,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1481,\t\t546,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1482,\t\t547,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1483,\t\t548,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1484,\t\t549,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1485,\t\t550,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1486,\t\t551,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1487,\t\t552,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1488,\t\t554,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1489,\t\t555,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1490,\t\t556,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1491,\t\t557,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1492,\t\t558,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1493,\t\t559,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1494,\t\t560,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1495,\t\t561,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1496,\t\t562,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1497,\t\t563,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1498,\t\t564,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1499,\t\t565,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1500,\t\t566,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1501,\t\t567,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1502,\t\t568,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1503,\t\t569,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1504,\t\t570,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1505,\t\t571,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1506,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1507,\t\t573,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1508,\t\t574,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1510,\t\t576,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1511,\t\t577,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1512,\t\t578,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1513,\t\t579,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1514,\t\t580,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1516,\t\t582,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1517,\t\t583,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1518,\t\t584,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1519,\t\t585,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1,\t\t490,\t\t0,\t\t0.01433884297520661,\t\t0.151691958358336,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t43.375\t\t],\n\t\t[3,\t\t4,\t\t0,\t\t0.006291637811634348,\t\t0.903417549506624,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t72.681\t\t],\n\t\t[491,\t\t6,\t\t0,\t\t0.011200661157024791,\t\t0.118492839955776,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.882\t\t],\n\t\t[7,\t\t5,\t\t0,\t\t0.005794840720221606,\t\t0.20802058859584005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.471\t\t],\n\t\t[8,\t\t9,\t\t0,\t\t0.0024379328254847646,\t\t0.350063268897336,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.163\t\t],\n\t\t[492,\t\t11,\t\t0,\t\t0.018224793388429753,\t\t0.0482004476327704,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.565\t\t],\n\t\t[11,\t\t493,\t\t0,\t\t0.030286942148760328,\t\t0.08010209706571599,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.809\t\t],\n\t\t[492,\t\t493,\t\t0,\t\t0.04521652892561983,\t\t0.11958747011094399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t68.39\t\t],\n\t\t[494,\t\t14,\t\t0,\t\t0.012990743801652892,\t\t0.137430291356512,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.297\t\t],\n\t\t[13,\t\t15,\t\t0,\t\t0.007681959833795014,\t\t0.27576354266704156,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.371\t\t],\n\t\t[16,\t\t5,\t\t0,\t\t0.006275623268698061,\t\t0.22527950450957998,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t36.248000000000005\t\t],\n\t\t[17,\t\t18,\t\t0,\t\t0.04623522622347646,\t\t0.9335989000302801,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t200.291\t\t],\n\t\t[17,\t\t12,\t\t0,\t\t0.0056020313942728535,\t\t0.113118303398186,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.268\t\t],\n\t\t[14,\t\t495,\t\t0,\t\t0.0017957024793388433,\t\t0.018996904156819597,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.432\t\t],\n\t\t[494,\t\t19,\t\t0,\t\t0.010246611570247935,\t\t0.10839986031771602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.996\t\t],\n\t\t[20,\t\t21,\t\t0,\t\t0.005415685595567867,\t\t0.19440984828307922,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t31.281\t\t],\n\t\t[20,\t\t22,\t\t0,\t\t0.0049706544321329645,\t\t0.713737278110032,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.42100000000001\t\t],\n\t\t[497,\t\t23,\t\t0,\t\t0.002190413223140496,\t\t0.005793146490362,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.313\t\t],\n\t\t[23,\t\t499,\t\t0,\t\t0.020799669421487598,\t\t0.22004164444829602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.919\t\t],\n\t\t[25,\t\t26,\t\t0,\t\t0.00141845567867036,\t\t0.050919084651523595,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.193\t\t],\n\t\t[25,\t\t22,\t\t0,\t\t0.0035578254847645433,\t\t0.0319293051869808,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.275\t\t],\n\t\t[23,\t\t27,\t\t0,\t\t0.027738181818181818,\t\t0.073361203699828,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.95399999999999\t\t],\n\t\t[28,\t\t23,\t\t0,\t\t0.012841652892561981,\t\t0.0339632611780132,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.423\t\t],\n\t\t[8,\t\t21,\t\t0,\t\t0.004948753462603878,\t\t0.17764812836304802,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t28.584\t\t],\n\t\t[9,\t\t29,\t\t0,\t\t0.002212863573407202,\t\t0.31774552934092004,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t25.563000000000002\t\t],\n\t\t[30,\t\t25,\t\t0,\t\t0.019958795013850415,\t\t0.17911796401827998,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t57.641000000000005\t\t],\n\t\t[31,\t\t32,\t\t0,\t\t0.0299776084949446,\t\t0.605319030583196,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t129.863\t\t],\n\t\t[32,\t\t33,\t\t0,\t\t0.016762234533725762,\t\t0.33846927983213604,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.61399999999999\t\t],\n\t\t[34,\t\t35,\t\t0,\t\t0.001931900826446281,\t\t0.020437759184893597,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t5.843999999999999\t\t],\n\t\t[35,\t\t36,\t\t0,\t\t0.0008730578512396695,\t\t0.0092361605077588,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.641\t\t],\n\t\t[490,\t\t6,\t\t0,\t\t0.049352066115702475,\t\t0.130525028606764,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.645\t\t],\n\t\t[37,\t\t10,\t\t0,\t\t0.02404639889196676,\t\t0.485553838251812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t104.169\t\t],\n\t\t[10,\t\t38,\t\t0,\t\t0.006848799630657894,\t\t0.13829351176534158,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.669\t\t],\n\t\t[37,\t\t38,\t\t0,\t\t0.01437834718372576,\t\t1.1613317560186958,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t124.574\t\t],\n\t\t[39,\t\t40,\t\t0,\t\t0.04521629732222991,\t\t0.913024308337812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t195.877\t\t],\n\t\t[39,\t\t41,\t\t0,\t\t0.017466989843005543,\t\t0.35269996139852006,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t75.667\t\t],\n\t\t[42,\t\t41,\t\t0,\t\t0.031145429362880884,\t\t0.6289001042979919,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t134.922\t\t],\n\t\t[18,\t\t42,\t\t0,\t\t0.03439750692520776,\t\t0.6945672650962679,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t149.01\t\t],\n\t\t[492,\t\t43,\t\t0,\t\t0.01819173553719008,\t\t0.192452068436848,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t55.03\t\t],\n\t\t[44,\t\t45,\t\t0,\t\t0.02562314049586777,\t\t0.067767398802972,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.755\t\t],\n\t\t[44,\t\t505,\t\t0,\t\t0.006061487603305785,\t\t0.0160312607980052,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.168\t\t],\n\t\t[46,\t\t12,\t\t0,\t\t0.0014741170360110802,\t\t0.2116687641962416,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.029\t\t],\n\t\t[47,\t\t48,\t\t0,\t\t0.005344182825484765,\t\t0.01199019212302604,\t\t428.0,\t\t428.0,\t\t428.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.7170000000000005\t\t],\n\t\t[49,\t\t50,\t\t0,\t\t0.0019151662049861494,\t\t0.0171874439892256,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.531000000000001\t\t],\n\t\t[31,\t\t33,\t\t0,\t\t0.013475992613088641,\t\t0.27211225959163604,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t58.378\t\t],\n\t\t[31,\t\t51,\t\t0,\t\t0.003518611495844875,\t\t0.5052381383693519,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.647\t\t],\n\t\t[52,\t\t53,\t\t0,\t\t0.010464421745152355,\t\t1.5025884408875438,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t120.885\t\t],\n\t\t[52,\t\t54,\t\t0,\t\t0.0076126500461911354,\t\t0.1537174637168,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.978\t\t],\n\t\t[506,\t\t55,\t\t0,\t\t0.012634380165289257,\t\t0.133660287181212,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.219\t\t],\n\t\t[506,\t\t507,\t\t0,\t\t0.044157355371900825,\t\t0.11678619613628,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t66.788\t\t],\n\t\t[57,\t\t506,\t\t0,\t\t0.004687272727272727,\t\t0.049587095736244,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.179\t\t],\n\t\t[57,\t\t58,\t\t0,\t\t0.014436363636363634,\t\t0.0381809096340232,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.835\t\t],\n\t\t[58,\t\t506,\t\t0,\t\t0.019797685950413223,\t\t0.052360391943288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.944000000000003\t\t],\n\t\t[59,\t\t60,\t\t0,\t\t0.019407548476454296,\t\t0.174170863885556,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.049\t\t],\n\t\t[508,\t\t62,\t\t0,\t\t0.051111404958677685,\t\t0.03379452026753001,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.653\t\t],\n\t\t[30,\t\t61,\t\t0,\t\t0.03143698060941828,\t\t0.28212765137935203,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t90.79\t\t],\n\t\t[63,\t\t506,\t\t0,\t\t0.027457190082644623,\t\t0.072618044249872,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.528999999999996\t\t],\n\t\t[13,\t\t64,\t\t0,\t\t0.0014816481994459833,\t\t0.2127501654814608,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.116\t\t],\n\t\t[65,\t\t66,\t\t0,\t\t0.03778185595567867,\t\t0.7629053006222161,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t163.671\t\t],\n\t\t[59,\t\t67,\t\t0,\t\t0.0051880193905817175,\t\t0.046559297286324804,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.982999999999999\t\t],\n\t\t[61,\t\t67,\t\t0,\t\t0.012931440443213295,\t\t0.1160517597580644,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.346\t\t],\n\t\t[68,\t\t69,\t\t0,\t\t0.011149584487534626,\t\t0.4002427745096039,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.4\t\t],\n\t\t[70,\t\t69,\t\t0,\t\t0.009625346260387812,\t\t0.345526355460808,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.596000000000004\t\t],\n\t\t[71,\t\t72,\t\t0,\t\t0.008878635734072021,\t\t0.318721276477736,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.283\t\t],\n\t\t[73,\t\t74,\t\t0,\t\t0.012529547553116345,\t\t0.253001288604392,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t54.278\t\t],\n\t\t[37,\t\t75,\t\t0,\t\t0.027459141274238225,\t\t0.5544652029066119,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t118.95299999999999\t\t],\n\t\t[72,\t\t75,\t\t0,\t\t0.006688711911357341,\t\t0.240108375006292,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.634\t\t],\n\t\t[37,\t\t72,\t\t0,\t\t0.036222068328739615,\t\t0.7314094881920841,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t156.914\t\t],\n\t\t[76,\t\t77,\t\t0,\t\t0.004683777700831025,\t\t0.6725445900750401,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t54.107\t\t],\n\t\t[77,\t\t51,\t\t0,\t\t0.00363183864265928,\t\t0.5214964473447999,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t41.955\t\t],\n\t\t[73,\t\t72,\t\t0,\t\t0.025475069252077563,\t\t0.514402082018968,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.35799999999999\t\t],\n\t\t[18,\t\t40,\t\t0,\t\t0.01302770083102493,\t\t0.26306018504072,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.43600000000001\t\t],\n\t\t[492,\t\t45,\t\t0,\t\t0.0308703030303719,\t\t0.18370114733484796,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t70.03699999999999\t\t],\n\t\t[10,\t\t74,\t\t0,\t\t0.030167359187465374,\t\t0.609150547206812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t130.685\t\t],\n\t\t[45,\t\t511,\t\t0,\t\t0.08203371900826446,\t\t0.05424014819960001,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.038000000000004\t\t],\n\t\t[78,\t\t32,\t\t0,\t\t0.013458795013850415,\t\t0.48313777647302397,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.738\t\t],\n\t\t[79,\t\t80,\t\t0,\t\t0.0038086911357340715,\t\t0.1367226831743568,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.999000000000002\t\t],\n\t\t[81,\t\t79,\t\t0,\t\t0.010767832409972299,\t\t0.3865388099484561,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t62.195\t\t],\n\t\t[34,\t\t82,\t\t0,\t\t0.0015497520661157025,\t\t0.00409874294399768,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.344\t\t],\n\t\t[83,\t\t84,\t\t0,\t\t0.00902611570247934,\t\t0.0238720301499152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.652000000000001\t\t],\n\t\t[83,\t\t499,\t\t0,\t\t0.04179570247933885,\t\t0.0276350398834796,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.608\t\t],\n\t\t[85,\t\t86,\t\t0,\t\t0.00802354570637119,\t\t0.28802563884886,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.343999999999994\t\t],\n\t\t[87,\t\t86,\t\t0,\t\t0.01904968836565097,\t\t0.683837154069184,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.031\t\t],\n\t\t[88,\t\t89,\t\t0,\t\t0.00380297520661157,\t\t0.010058007429140002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.752000000000001\t\t],\n\t\t[90,\t\t86,\t\t0,\t\t0.012097818559556786,\t\t0.434282055192244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t69.877\t\t],\n\t\t[91,\t\t86,\t\t0,\t\t9.26246537396122e-05,\t\t0.013299992817559201,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.07\t\t],\n\t\t[86,\t\t92,\t\t0,\t\t0.0001852493074792244,\t\t0.0066499964087796005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.07\t\t],\n\t\t[86,\t\t93,\t\t0,\t\t0.008152181440443215,\t\t0.292643346635492,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.086999999999996\t\t],\n\t\t[94,\t\t86,\t\t0,\t\t0.012883829639889197,\t\t0.46249792780547194,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.417\t\t],\n\t\t[86,\t\t95,\t\t0,\t\t0.010421052631578947,\t\t0.37409026526870803,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t60.192\t\t],\n\t\t[513,\t\t517,\t\t0,\t\t0.0008733884297520661,\t\t0.0023099144321748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.321\t\t],\n\t\t[97,\t\t66,\t\t0,\t\t0.03812777008310249,\t\t0.34217338998058805,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.113\t\t],\n\t\t[42,\t\t98,\t\t0,\t\t0.003091759002770083,\t\t0.44394630230884,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t35.716\t\t],\n\t\t[99,\t\t100,\t\t0,\t\t0.016371537396121884,\t\t0.587698093837988,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t94.56200000000001\t\t],\n\t\t[42,\t\t101,\t\t0,\t\t0.008165339335180054,\t\t0.29311568282888,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.163000000000004\t\t],\n\t\t[102,\t\t42,\t\t0,\t\t0.012403047091412742,\t\t0.44523901189173193,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t71.64\t\t],\n\t\t[103,\t\t87,\t\t0,\t\t0.007073060941828254,\t\t0.25390556381756,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.854\t\t],\n\t\t[104,\t\t103,\t\t0,\t\t0.0028852146814404432,\t\t0.1035721403291428,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.665\t\t],\n\t\t[105,\t\t87,\t\t0,\t\t0.006406682825484765,\t\t0.22998422159488002,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.005\t\t],\n\t\t[106,\t\t107,\t\t0,\t\t0.005714219759923823,\t\t0.11538365264216799,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.754\t\t],\n\t\t[108,\t\t107,\t\t0,\t\t0.0025427631578947367,\t\t0.09127896939786201,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.687000000000001\t\t],\n\t\t[109,\t\t106,\t\t0,\t\t0.003030470914127424,\t\t0.10878648330773438,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.504\t\t],\n\t\t[110,\t\t111,\t\t0,\t\t0.019821849030470913,\t\t0.7115558306889919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.491\t\t],\n\t\t[87,\t\t112,\t\t0,\t\t0.006135907202216068,\t\t0.220264039928212,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.441\t\t],\n\t\t[113,\t\t87,\t\t0,\t\t0.003981648199445983,\t\t0.14293141813921081,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.998\t\t],\n\t\t[87,\t\t85,\t\t0,\t\t0.011046225761772853,\t\t0.3965324494097,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.803000000000004\t\t],\n\t\t[110,\t\t114,\t\t0,\t\t0.011665339335180056,\t\t0.418757110306188,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.37899999999999\t\t],\n\t\t[115,\t\t116,\t\t0,\t\t0.007048925619834712,\t\t0.07457124214588401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.323\t\t],\n\t\t[117,\t\t118,\t\t0,\t\t0.005987534626038782,\t\t0.21493782785077598,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.584\t\t],\n\t\t[117,\t\t119,\t\t0,\t\t0.0038738746537396117,\t\t0.5562504472696961,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.751000000000005\t\t],\n\t\t[117,\t\t120,\t\t0,\t\t0.005886686288088643,\t\t0.8452704781039522,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t68.003\t\t],\n\t\t[121,\t\t122,\t\t0,\t\t0.0021170360110803325,\t\t0.0759964075574972,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.228\t\t],\n\t\t[123,\t\t124,\t\t0,\t\t0.0018386426592797783,\t\t0.0660027680945204,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.62\t\t],\n\t\t[125,\t\t126,\t\t0,\t\t0.004941135734072022,\t\t0.17737467056702802,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.54\t\t],\n\t\t[127,\t\t119,\t\t0,\t\t0.0029027008310249305,\t\t0.1041998502705648,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.766\t\t],\n\t\t[118,\t\t128,\t\t0,\t\t0.007397160664819945,\t\t0.265539950057812,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.726000000000006\t\t],\n\t\t[121,\t\t119,\t\t0,\t\t0.002552458448753463,\t\t0.0916270065931116,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.743\t\t],\n\t\t[530,\t\t527,\t\t0,\t\t0.022726611570247933,\t\t0.060106736329903994,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.374\t\t],\n\t\t[125,\t\t130,\t\t0,\t\t0.002931440443213297,\t\t0.105231531956442,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.932000000000002\t\t],\n\t\t[125,\t\t123,\t\t0,\t\t0.0019078081717451524,\t\t0.2739425623421336,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t22.039\t\t],\n\t\t[131,\t\t132,\t\t0,\t\t0.0035744459833795014,\t\t0.12831385593973843,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.646\t\t],\n\t\t[133,\t\t123,\t\t0,\t\t0.003864439058171745,\t\t0.13872389704704202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.320999999999998\t\t],\n\t\t[524,\t\t134,\t\t0,\t\t0.008092231404958678,\t\t0.08560847143881999,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.479\t\t],\n\t\t[135,\t\t136,\t\t0,\t\t0.005242901662049862,\t\t0.1882073282678,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.283\t\t],\n\t\t[123,\t\t131,\t\t0,\t\t0.003138331024930748,\t\t0.1126583971045252,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.127\t\t],\n\t\t[117,\t\t128,\t\t0,\t\t0.010800034626038782,\t\t0.38769479063117196,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.381\t\t],\n\t\t[137,\t\t521,\t\t0,\t\t0.013832396694214875,\t\t0.14633421587532003,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t41.843\t\t],\n\t\t[531,\t\t514,\t\t0,\t\t0.0059504132231404955,\t\t0.035409362037522,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.5\t\t],\n\t\t[139,\t\t521,\t\t0,\t\t0.021257520661157023,\t\t0.05622132386323199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.152\t\t],\n\t\t[140,\t\t514,\t\t0,\t\t0.018527603305785127,\t\t0.04900131122836401,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.023000000000003\t\t],\n\t\t[522,\t\t141,\t\t0,\t\t0.012168595041322314,\t\t0.032183175718526795,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.405\t\t],\n\t\t[142,\t\t523,\t\t0,\t\t0.007060165289256198,\t\t0.0746901476577608,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.357\t\t],\n\t\t[530,\t\t526,\t\t0,\t\t0.020281652892561983,\t\t0.053640374808152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.676\t\t],\n\t\t[140,\t\t532,\t\t0,\t\t0.004669090909090909,\t\t0.0123486871461184,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.062\t\t],\n\t\t[142,\t\t144,\t\t0,\t\t0.006678126721756199,\t\t0.0397397958689204,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.151\t\t],\n\t\t[140,\t\t522,\t\t0,\t\t0.020450247933884298,\t\t0.05408627047793199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.930999999999997\t\t],\n\t\t[145,\t\t146,\t\t0,\t\t0.028527603305785125,\t\t0.07544904460236,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.148\t\t],\n\t\t[147,\t\t523,\t\t0,\t\t0.02461289256198347,\t\t0.0650955220034416,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t37.227\t\t],\n\t\t[144,\t\t523,\t\t0,\t\t0.008479338842975206,\t\t0.0224259292904064,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.825\t\t],\n\t\t[139,\t\t523,\t\t0,\t\t0.029245619834710742,\t\t0.0193370088934308,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.116999999999997\t\t],\n\t\t[140,\t\t141,\t\t0,\t\t0.008362975206611572,\t\t0.022118173847506,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.649000000000001\t\t],\n\t\t[528,\t\t526,\t\t0,\t\t0.015389090909090908,\t\t0.0407006573227188,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.276\t\t],\n\t\t[528,\t\t148,\t\t0,\t\t0.014306115702479338,\t\t0.0378364333712244,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.638\t\t],\n\t\t[149,\t\t150,\t\t0,\t\t0.013604628099173552,\t\t0.035981157661543604,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.576999999999998\t\t],\n\t\t[145,\t\t528,\t\t0,\t\t0.00320595041322314,\t\t0.0084790121737992,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.849\t\t],\n\t\t[530,\t\t151,\t\t0,\t\t0.013144462809917355,\t\t0.0347641247737036,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.881\t\t],\n\t\t[524,\t\t152,\t\t0,\t\t0.014598347107438016,\t\t0.03860931919944,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.08\t\t],\n\t\t[149,\t\t525,\t\t0,\t\t0.016897190082644627,\t\t0.17875695122823998,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t51.114\t\t],\n\t\t[139,\t\t514,\t\t0,\t\t0.007824132231404959,\t\t0.020693056313687997,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.834000000000001\t\t],\n\t\t[126,\t\t120,\t\t0,\t\t0.012780297783933518,\t\t0.458781387757004,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.819\t\t],\n\t\t[530,\t\t153,\t\t0,\t\t0.02254545454545455,\t\t0.059627617060924,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.1\t\t],\n\t\t[528,\t\t147,\t\t0,\t\t0.15786710743801652,\t\t0.104380679149868,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t119.387\t\t],\n\t\t[528,\t\t154,\t\t0,\t\t0.006528264462809917,\t\t0.017265779790547203,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t9.874\t\t],\n\t\t[130,\t\t120,\t\t0,\t\t0.01450502077562327,\t\t0.5206947188067639,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t83.781\t\t],\n\t\t[528,\t\t155,\t\t0,\t\t0.16064132231404957,\t\t0.1062149715341,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t121.485\t\t],\n\t\t[524,\t\t533,\t\t0,\t\t0.004432727272727273,\t\t0.0468942356109744,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.409\t\t],\n\t\t[524,\t\t149,\t\t0,\t\t0.0056413223140495865,\t\t0.05968007537478799,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.065\t\t],\n\t\t[154,\t\t150,\t\t0,\t\t0.007539173553719007,\t\t0.0199394052006688,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t11.402999999999999\t\t],\n\t\t[157,\t\t110,\t\t0,\t\t0.009962084487534625,\t\t0.357614433044424,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t57.541000000000004\t\t],\n\t\t[119,\t\t158,\t\t0,\t\t0.0002490189289012004,\t\t0.08045252664623159,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t3,\t\t1,\t\t-360,\t\t4.315\t\t],\n\t\t[159,\t\t60,\t\t0,\t\t0.010967451523545706,\t\t0.0984261617997728,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.674\t\t],\n\t\t[536,\t\t161,\t\t0,\t\t0.021314380165289255,\t\t0.056371704363524,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.238\t\t],\n\t\t[115,\t\t151,\t\t0,\t\t0.00379404958677686,\t\t0.0401376047510724,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.477\t\t],\n\t\t[162,\t\t134,\t\t0,\t\t0.0015910743801652895,\t\t0.016832124393744,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t4.813\t\t],\n\t\t[115,\t\t526,\t\t0,\t\t0.0037884297520661154,\t\t0.010019537998747198,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.73\t\t],\n\t\t[138,\t\t87,\t\t0,\t\t0.0011838642659279777,\t\t0.16999131006813442,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t13.675999999999998\t\t],\n\t\t[123,\t\t163,\t\t0,\t\t0.0022778739612188364,\t\t0.08177009602828919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.157\t\t],\n\t\t[112,\t\t164,\t\t0,\t\t0.0008672957063711912,\t\t0.12453516639176802,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.019\t\t],\n\t\t[112,\t\t165,\t\t0,\t\t0.005989439058171744,\t\t0.21500619230086396,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.595\t\t],\n\t\t[166,\t\t165,\t\t0,\t\t0.002632790858725762,\t\t0.09451074335350361,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.207\t\t],\n\t\t[167,\t\t537,\t\t0,\t\t0.00832595041322314,\t\t0.08808100664460242,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t25.186\t\t],\n\t\t[168,\t\t104,\t\t0,\t\t0.002552458448753463,\t\t0.0916270065931116,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.743\t\t],\n\t\t[531,\t\t520,\t\t0,\t\t0.016156694214876033,\t\t0.042730794079516396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.436999999999998\t\t],\n\t\t[139,\t\t520,\t\t0,\t\t0.010682314049586776,\t\t0.0282522993797748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.157\t\t],\n\t\t[520,\t\t169,\t\t0,\t\t0.0011328925619834712,\t\t0.0119849761681232,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t3.427\t\t],\n\t\t[168,\t\t105,\t\t0,\t\t0.007340893351800554,\t\t0.26352009133553606,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.401\t\t],\n\t\t[520,\t\t170,\t\t0,\t\t0.005842644628099174,\t\t0.015452470732151198,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t8.837\t\t],\n\t\t[171,\t\t89,\t\t0,\t\t0.005505454545454546,\t\t0.058242717567848004,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.654\t\t],\n\t\t[521,\t\t172,\t\t0,\t\t0.006304793388429752,\t\t0.06669899780522001,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.072\t\t],\n\t\t[123,\t\t173,\t\t0,\t\t0.005247403047091413,\t\t0.18836891696656402,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.309\t\t],\n\t\t[521,\t\t174,\t\t0,\t\t0.013300495867768597,\t\t0.035176796844864404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.117\t\t],\n\t\t[37,\t\t39,\t\t0,\t\t0.004338873499549862,\t\t0.35044859579205606,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t37.592\t\t],\n\t\t[530,\t\t175,\t\t0,\t\t0.013128595041322313,\t\t0.0347221581224188,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.857\t\t],\n\t\t[530,\t\t176,\t\t0,\t\t0.005685289256198347,\t\t0.01503630144005,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.599\t\t],\n\t\t[88,\t\t530,\t\t0,\t\t0.006015867768595041,\t\t0.0159106066755372,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.099\t\t],\n\t\t[177,\t\t496,\t\t0,\t\t0.018632066115702478,\t\t0.19711036673178398,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.361999999999995\t\t],\n\t\t[178,\t\t525,\t\t0,\t\t0.03106842975206612,\t\t0.08216895464241199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.99100000000001\t\t],\n\t\t[179,\t\t493,\t\t0,\t\t0.057079669421487594,\t\t0.15096278779194802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.333\t\t],\n\t\t[180,\t\t181,\t\t0,\t\t0.041027438016528923,\t\t0.10850827416682,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.053999999999995\t\t],\n\t\t[182,\t\t180,\t\t0,\t\t0.00866314049586777,\t\t0.09164817200545601,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.206\t\t],\n\t\t[179,\t\t181,\t\t0,\t\t0.01957223140495868,\t\t0.051764115772731996,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.603\t\t],\n\t\t[180,\t\t493,\t\t0,\t\t0.06676561983471074,\t\t0.17657993119175203,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t100.98299999999999\t\t],\n\t\t[183,\t\t30,\t\t0,\t\t0.0024804362880886427,\t\t0.356166349712776,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t28.654\t\t],\n\t\t[183,\t\t21,\t\t0,\t\t0.0025647506925207757,\t\t0.36827307214930394,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t29.628\t\t],\n\t\t[538,\t\t185,\t\t0,\t\t0.018631404958677687,\t\t0.0123189607681008,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.09\t\t],\n\t\t[538,\t\t89,\t\t0,\t\t0.014509752066115702,\t\t0.038375005396288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.945999999999998\t\t],\n\t\t[184,\t\t186,\t\t0,\t\t0.0016554709141274237,\t\t0.059427351084826,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.562000000000001\t\t],\n\t\t[184,\t\t187,\t\t0,\t\t0.002698753462603878,\t\t0.09687863927102919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.588\t\t],\n\t\t[520,\t\t172,\t\t0,\t\t0.0034188429752066113,\t\t0.0361682589818792,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.342\t\t],\n\t\t[89,\t\t175,\t\t0,\t\t0.0037309090909090903,\t\t0.0098674088877672,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.643\t\t],\n\t\t[185,\t\t89,\t\t0,\t\t0.005812892561983471,\t\t0.0153737832609196,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.792\t\t],\n\t\t[89,\t\t188,\t\t0,\t\t0.003108760330578513,\t\t0.008221966434607202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.702\t\t],\n\t\t[189,\t\t190,\t\t0,\t\t0.008599492151454294,\t\t0.17364414688031998,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.253\t\t],\n\t\t[539,\t\t172,\t\t0,\t\t0.0021570247933884296,\t\t0.022819366646419197,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t6.525\t\t],\n\t\t[504,\t\t192,\t\t0,\t\t0.0003084297520661157,\t\t0.00326290713886456,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.9329999999999999\t\t],\n\t\t[105,\t\t186,\t\t0,\t\t0.003273372576177285,\t\t0.1175060580379876,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.907\t\t],\n\t\t[105,\t\t187,\t\t0,\t\t0.0021712257617728533,\t\t0.0779416868808324,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.540999999999999\t\t],\n\t\t[539,\t\t193,\t\t0,\t\t0.005608595041322314,\t\t0.01483346262541,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.482999999999999\t\t],\n\t\t[187,\t\t194,\t\t0,\t\t4.8649584487534626e-05,\t\t0.0069856037041576,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.562\t\t],\n\t\t[539,\t\t540,\t\t0,\t\t0.004394710743801653,\t\t0.0116230138006708,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.647\t\t],\n\t\t[539,\t\t196,\t\t0,\t\t0.00332297520661157,\t\t0.008788516227194,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.026\t\t],\n\t\t[197,\t\t540,\t\t0,\t\t0.004737190082644629,\t\t0.012528794024621601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.165\t\t],\n\t\t[110,\t\t198,\t\t0,\t\t0.00018724030470914128,\t\t0.02688587333118328,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.1630000000000003\t\t],\n\t\t[197,\t\t539,\t\t0,\t\t0.009172231404958677,\t\t0.024258473063998802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.873\t\t],\n\t\t[199,\t\t537,\t\t0,\t\t0.03612826446280991,\t\t0.0238877676441712,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.322\t\t],\n\t\t[134,\t\t526,\t\t0,\t\t0.007771239669421488,\t\t0.020553167475975197,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.754000000000001\t\t],\n\t\t[200,\t\t193,\t\t0,\t\t0.0009322314049586776,\t\t0.009862163056380801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.82\t\t],\n\t\t[4,\t\t201,\t\t0,\t\t0.013726108033240996,\t\t0.49273365914097605,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t79.282\t\t],\n\t\t[202,\t\t86,\t\t0,\t\t0.00013365650969529087,\t\t0.00479794133417816,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.772\t\t],\n\t\t[85,\t\t203,\t\t0,\t\t0.0019011426592797783,\t\t0.2729854600553416,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.962\t\t],\n\t\t[147,\t\t204,\t\t0,\t\t0.0073874380165289254,\t\t0.0781523963903056,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t22.346999999999998\t\t],\n\t\t[147,\t\t205,\t\t0,\t\t0.005959669421487603,\t\t0.00394049369636956,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.507\t\t],\n\t\t[123,\t\t206,\t\t0,\t\t0.0005753116343490305,\t\t0.0826091142668064,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t6.646\t\t],\n\t\t[537,\t\t207,\t\t0,\t\t0.018456198347107437,\t\t0.048812461297776,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.915\t\t],\n\t\t[165,\t\t208,\t\t0,\t\t0.00414612188365651,\t\t0.14883562055771601,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.948\t\t],\n\t\t[4,\t\t94,\t\t0,\t\t0.013687673130193905,\t\t0.49135394025941603,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t79.06\t\t],\n\t\t[4,\t\t2,\t\t0,\t\t5.2054478301015697e-05,\t\t0.016817654469309,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t3,\t\t1,\t\t-360,\t\t0.902\t\t],\n\t\t[209,\t\t4,\t\t0,\t\t0.0022369286703601107,\t\t0.32120104149338397,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.840999999999998\t\t],\n\t\t[119,\t\t163,\t\t0,\t\t0.003535145429362881,\t\t0.12690306230914922,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.419\t\t],\n\t\t[210,\t\t3,\t\t0,\t\t0.0003150969529085873,\t\t0.011311208844832242,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.82\t\t],\n\t\t[99,\t\t211,\t\t0,\t\t0.0035045013850415513,\t\t0.1258030161741948,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.242\t\t],\n\t\t[99,\t\t69,\t\t0,\t\t0.021717970914127423,\t\t0.7796219621557,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t125.443\t\t],\n\t\t[212,\t\t99,\t\t0,\t\t0.008453774238227147,\t\t0.30346978938770003,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.82899999999999\t\t],\n\t\t[213,\t\t214,\t\t0,\t\t0.01490115702479339,\t\t0.15764073118032798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t45.076\t\t],\n\t\t[510,\t\t215,\t\t0,\t\t0.002174710743801653,\t\t0.09202587186721281,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t13.157\t\t],\n\t\t[128,\t\t69,\t\t0,\t\t0.010711651662049862,\t\t1.538088234801848,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t123.741\t\t],\n\t\t[216,\t\t69,\t\t0,\t\t0.009628462603878117,\t\t1.3825528982351443,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t111.228\t\t],\n\t\t[217,\t\t98,\t\t0,\t\t0.0012787396121883656,\t\t0.045903620070299994,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.386\t\t],\n\t\t[504,\t\t218,\t\t0,\t\t0.027480991735537193,\t\t0.072680994226412,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.565\t\t],\n\t\t[177,\t\t504,\t\t0,\t\t0.07054809917355372,\t\t0.18658373169634002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t106.704\t\t],\n\t\t[219,\t\t209,\t\t0,\t\t0.003938798476454294,\t\t0.5655728721401839,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t45.501000000000005\t\t],\n\t\t[219,\t\t220,\t\t0,\t\t0.0013026315789473684,\t\t0.1870451326342096,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t15.048\t\t],\n\t\t[94,\t\t95,\t\t0,\t\t0.01070740997229917,\t\t0.38436979242743197,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.846000000000004\t\t],\n\t\t[159,\t\t221,\t\t0,\t\t0.009937153739612188,\t\t0.356719480257712,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.397\t\t],\n\t\t[34,\t\t161,\t\t0,\t\t0.010965289256198347,\t\t0.116002818645824,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.17\t\t],\n\t\t[222,\t\t221,\t\t0,\t\t0.0046457756232686975,\t\t0.16677196601221997,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.834\t\t],\n\t\t[211,\t\t52,\t\t0,\t\t0.05267313019390582,\t\t0.472709090515552,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t152.12\t\t],\n\t\t[215,\t\t223,\t\t0,\t\t0.04873190082644628,\t\t0.128884831985184,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.707\t\t],\n\t\t[224,\t\t215,\t\t0,\t\t0.019086280991735535,\t\t0.050478887076288004,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.868000000000002\t\t],\n\t\t[225,\t\t224,\t\t0,\t\t0.04200925619834711,\t\t0.11110496071615601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.538999999999994\t\t],\n\t\t[224,\t\t223,\t\t0,\t\t0.031061818181818183,\t\t0.082151468537468,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.981\t\t],\n\t\t[226,\t\t6,\t\t0,\t\t0.06420099173553719,\t\t0.0424492677936932,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.552\t\t],\n\t\t[7,\t\t3,\t\t0,\t\t0.009332929362880887,\t\t0.335029305054692,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t53.907\t\t],\n\t\t[216,\t\t227,\t\t0,\t\t0.01989941135734072,\t\t0.7143401282507,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.939\t\t],\n\t\t[228,\t\t229,\t\t0,\t\t0.010545454545454545,\t\t0.027890337012274,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.95\t\t],\n\t\t[227,\t\t230,\t\t0,\t\t0.003993074792243767,\t\t0.573366419334696,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t46.128\t\t],\n\t\t[231,\t\t53,\t\t0,\t\t0.007193213296398893,\t\t1.0328749562310842,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t83.096\t\t],\n\t\t[544,\t\t545,\t\t0,\t\t0.013061818181818181,\t\t0.034545548464856,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.756\t\t],\n\t\t[234,\t\t235,\t\t0,\t\t0.04608859504132231,\t\t0.121893887321888,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t69.709\t\t],\n\t\t[546,\t\t214,\t\t0,\t\t0.057025454545454546,\t\t0.15081940173295602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.251\t\t],\n\t\t[233,\t\t227,\t\t0,\t\t0.0029001038781163438,\t\t0.1041066260218888,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.750999999999998\t\t],\n\t\t[237,\t\t238,\t\t0,\t\t0.026324628099173554,\t\t0.06962267451304,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.816\t\t],\n\t\t[212,\t\t100,\t\t0,\t\t0.007955505540166205,\t\t0.285583163531816,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.951\t\t],\n\t\t[519,\t\t239,\t\t0,\t\t0.01740429752066116,\t\t0.046030422038308406,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.324\t\t],\n\t\t[238,\t\t519,\t\t0,\t\t0.015166280991735538,\t\t0.040111375593995205,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.939\t\t],\n\t\t[213,\t\t240,\t\t0,\t\t0.01665388429752066,\t\t0.04404574915373599,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.189\t\t],\n\t\t[241,\t\t242,\t\t0,\t\t0.009862015235457064,\t\t0.3540221919932281,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.963\t\t],\n\t\t[70,\t\t241,\t\t0,\t\t0.003819858033240997,\t\t0.5484941897752321,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.126999999999995\t\t],\n\t\t[509,\t\t213,\t\t0,\t\t0.011363636363636364,\t\t0.120216969880216,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t34.375\t\t],\n\t\t[68,\t\t243,\t\t0,\t\t0.003611668975069252,\t\t0.1296500701715312,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.861\t\t],\n\t\t[243,\t\t244,\t\t0,\t\t0.0007699099722991691,\t\t0.027637882270859202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.447\t\t],\n\t\t[68,\t\t244,\t\t0,\t\t0.004104051246537396,\t\t0.147325387728876,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.705\t\t],\n\t\t[544,\t\t547,\t\t0,\t\t0.02418776859504132,\t\t0.255884661882476,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.168\t\t],\n\t\t[245,\t\t227,\t\t0,\t\t0.012676419667590028,\t\t0.45505241780707606,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.219\t\t],\n\t\t[246,\t\t208,\t\t0,\t\t0.0010155817174515235,\t\t0.0364568961999408,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.8660000000000005\t\t],\n\t\t[112,\t\t208,\t\t0,\t\t0.0017927631578947367,\t\t0.0643558063672372,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.355\t\t],\n\t\t[165,\t\t247,\t\t0,\t\t0.0002113919667590028,\t\t0.0075884538459086,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.2209999999999999\t\t],\n\t\t[537,\t\t549,\t\t0,\t\t0.00032066115702479337,\t\t0.00084807607842936,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.485\t\t],\n\t\t[537,\t\t550,\t\t0,\t\t0.00032198347107438016,\t\t0.0008515732993697601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.48700000000000004\t\t],\n\t\t[537,\t\t551,\t\t0,\t\t0.0002651239669421488,\t\t0.0007011927988648,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.401\t\t],\n\t\t[110,\t\t251,\t\t0,\t\t0.00023857340720221602,\t\t0.008564200982522441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.3780000000000001\t\t],\n\t\t[510,\t\t252,\t\t0,\t\t0.08467702479338843,\t\t0.055987884365424005,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.03699999999999\t\t],\n\t\t[529,\t\t253,\t\t0,\t\t0.04859504132231405,\t\t0.12852286961777998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.5\t\t],\n\t\t[237,\t\t239,\t\t0,\t\t0.03309421487603306,\t\t0.08752669712542799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.055\t\t],\n\t\t[254,\t\t238,\t\t0,\t\t0.07815008264462811,\t\t0.05167231372274401,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t59.101000000000006\t\t],\n\t\t[69,\t\t255,\t\t0,\t\t0.0009369806094182826,\t\t0.134541235754472,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.824000000000002\t\t],\n\t\t[510,\t\t225,\t\t0,\t\t0.021953719008264466,\t\t0.232250442756508,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t66.41\t\t],\n\t\t[256,\t\t257,\t\t0,\t\t0.010125619834710746,\t\t0.0267799693631888,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.315\t\t],\n\t\t[258,\t\t190,\t\t0,\t\t0.011717451523545707,\t\t0.10515695255750121,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.84\t\t],\n\t\t[258,\t\t259,\t\t0,\t\t0.015782548476454293,\t\t0.1416387085570408,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.58\t\t],\n\t\t[260,\t\t261,\t\t0,\t\t0.006791031855955679,\t\t0.9751256416231477,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t78.45\t\t],\n\t\t[554,\t\t553,\t\t0,\t\t0.17583338842975205,\t\t0.11625986438453201,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t132.974\t\t],\n\t\t[515,\t\t263,\t\t0,\t\t0.006987107438016529,\t\t0.0739172618295936,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.136\t\t],\n\t\t[14,\t\t264,\t\t0,\t\t0.01700694214876033,\t\t0.17991802858084,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.446000000000005\t\t],\n\t\t[116,\t\t555,\t\t0,\t\t0.0009768595041322315,\t\t0.0103342878835768,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.955\t\t],\n\t\t[151,\t\t116,\t\t0,\t\t0.007244958677685951,\t\t0.0191612735410668,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.958\t\t],\n\t\t[111,\t\t114,\t\t0,\t\t0.008806613573407202,\t\t0.3161358573133961,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.867\t\t],\n\t\t[77,\t\t111,\t\t0,\t\t0.00288452216066482,\t\t0.41418912211817605,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.321999999999996\t\t],\n\t\t[266,\t\t525,\t\t0,\t\t0.01042909090909091,\t\t0.027582581569373602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.774000000000001\t\t],\n\t\t[267,\t\t120,\t\t0,\t\t0.013136945983379503,\t\t0.471584184581432,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t75.87899999999999\t\t],\n\t\t[268,\t\t269,\t\t0,\t\t0.0010327272727272726,\t\t0.0027313295556817604,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.5619999999999998\t\t],\n\t\t[556,\t\t271,\t\t0,\t\t0.052289586776859506,\t\t0.0345735262323792,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.544000000000004\t\t],\n\t\t[556,\t\t272,\t\t0,\t\t0.04685355371900827,\t\t0.030979257409249603,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.433\t\t],\n\t\t[529,\t\t273,\t\t0,\t\t0.0034604958677685953,\t\t0.009152227205140799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.234\t\t],\n\t\t[128,\t\t274,\t\t0,\t\t0.0029350761772853184,\t\t0.1053620459045884,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.953\t\t],\n\t\t[34,\t\t275,\t\t0,\t\t0.0008290909090909092,\t\t0.00054818938265696,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.627\t\t],\n\t\t[503,\t\t276,\t\t0,\t\t0.006707438016528925,\t\t0.07095861291266,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t20.29\t\t],\n\t\t[503,\t\t504,\t\t0,\t\t0.06432727272727272,\t\t0.680524223098808,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t194.59\t\t],\n\t\t[177,\t\t218,\t\t0,\t\t0.04330380165289256,\t\t0.114528740018308,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t65.497\t\t],\n\t\t[277,\t\t278,\t\t0,\t\t0.007191135734072023,\t\t1.032576638635032,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t83.072\t\t],\n\t\t[557,\t\t558,\t\t0,\t\t0.04341289256198347,\t\t0.258338836678648,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t98.493\t\t],\n\t\t[557,\t\t559,\t\t0,\t\t0.03415867768595042,\t\t0.09034195998366001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.665\t\t],\n\t\t[559,\t\t558,\t\t0,\t\t0.04474314049586777,\t\t0.11833546501370001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.67399999999999\t\t],\n\t\t[277,\t\t78,\t\t0,\t\t0.03585768698060942,\t\t0.32180078416049196,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t103.557\t\t],\n\t\t[277,\t\t279,\t\t0,\t\t0.021390927977839334,\t\t0.191970480441328,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.777\t\t],\n\t\t[78,\t\t279,\t\t0,\t\t0.015811980609418283,\t\t0.1419028439283376,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.665\t\t],\n\t\t[281,\t\t282,\t\t0,\t\t0.0023178670360110803,\t\t0.08320574945862161,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.388\t\t],\n\t\t[283,\t\t161,\t\t0,\t\t0.036741157024793386,\t\t0.09717203248350399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t55.571000000000005\t\t],\n\t\t[268,\t\t161,\t\t0,\t\t0.018883636363636366,\t\t0.199771751868832,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.123000000000005\t\t],\n\t\t[256,\t\t284,\t\t0,\t\t0.010755371900826446,\t\t0.113782083346976,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t32.535\t\t],\n\t\t[515,\t\t516,\t\t0,\t\t0.04071140495867769,\t\t0.107672438361532,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.576\t\t],\n\t\t[263,\t\t516,\t\t0,\t\t0.0030355371900826445,\t\t0.128452925198488,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t18.365\t\t],\n\t\t[516,\t\t285,\t\t0,\t\t0.006908429752066116,\t\t0.018271230811372,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.449000000000002\t\t],\n\t\t[63,\t\t286,\t\t0,\t\t0.019088925619834708,\t\t0.050485881518556,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.872\t\t],\n\t\t[287,\t\t516,\t\t0,\t\t0.01732892561983471,\t\t0.011457770111127998,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.105\t\t],\n\t\t[8,\t\t102,\t\t0,\t\t0.015100069252077563,\t\t0.542055501663692,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t87.21799999999999\t\t],\n\t\t[8,\t\t101,\t\t0,\t\t0.019246883656509697,\t\t0.69091598202144,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t111.17\t\t],\n\t\t[80,\t\t288,\t\t0,\t\t0.007984072022160666,\t\t0.2866086302684072,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t46.11600000000001\t\t],\n\t\t[80,\t\t289,\t\t0,\t\t0.0003782317636201524,\t\t0.122198345223416,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t6.553999999999999\t\t],\n\t\t[276,\t\t560,\t\t0,\t\t0.01778314049586777,\t\t0.047032375838192794,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.897\t\t],\n\t\t[37,\t\t290,\t\t0,\t\t0.005629501385041551,\t\t0.4546919507138321,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t48.773999999999994\t\t],\n\t\t[290,\t\t74,\t\t0,\t\t0.02071595106187673,\t\t1.673216783321968,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t179.483\t\t],\n\t\t[512,\t\t291,\t\t0,\t\t0.0053299173553719,\t\t0.056385693247479204,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t16.123\t\t],\n\t\t[78,\t\t292,\t\t0,\t\t0.0058149815327908595,\t\t0.469673087481408,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t50.381\t\t],\n\t\t[199,\t\t548,\t\t0,\t\t0.0015530578512396695,\t\t0.00410748599634868,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.349\t\t],\n\t\t[491,\t\t293,\t\t0,\t\t0.014176528925619833,\t\t0.009373426429729999,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.720999999999998\t\t],\n\t\t[4,\t\t294,\t\t0,\t\t9.669321329639889e-05,\t\t0.013884198109531681,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.117\t\t],\n\t\t[490,\t\t541,\t\t0,\t\t0.050580495867768596,\t\t0.133773946861896,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.503\t\t],\n\t\t[491,\t\t295,\t\t0,\t\t0.010613553719008264,\t\t0.028070443890777202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.053\t\t],\n\t\t[491,\t\t296,\t\t0,\t\t0.004400661157024794,\t\t0.0116387512948784,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.656000000000001\t\t],\n\t\t[295,\t\t297,\t\t0,\t\t0.020297520661157024,\t\t0.053682341459340005,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.7\t\t],\n\t\t[508,\t\t161,\t\t0,\t\t0.023239669421487603,\t\t0.061463658055360006,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.15\t\t],\n\t\t[117,\t\t123,\t\t0,\t\t0.005876211911357341,\t\t0.21094161505628,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.941\t\t],\n\t\t[133,\t\t117,\t\t0,\t\t0.004469182825484764,\t\t0.0401081792747688,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.907\t\t],\n\t\t[71,\t\t74,\t\t0,\t\t0.03904524469065097,\t\t0.7884161162841721,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t169.144\t\t],\n\t\t[74,\t\t278,\t\t0,\t\t0.0077122576177285325,\t\t1.10740463560792,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t89.09200000000001\t\t],\n\t\t[298,\t\t515,\t\t0,\t\t0.021701157024793388,\t\t0.05739464148919599,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.823\t\t],\n\t\t[5,\t\t299,\t\t0,\t\t0.0016232686980609415,\t\t0.058271370400665996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.376\t\t],\n\t\t[32,\t\t292,\t\t0,\t\t0.009679362880886427,\t\t0.34746541983297996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.908\t\t],\n\t\t[5,\t\t29,\t\t0,\t\t0.00743395083102493,\t\t1.0674425076571843,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t85.87700000000001\t\t],\n\t\t[503,\t\t560,\t\t0,\t\t0.015140495867768593,\t\t0.160172719142436,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.8\t\t],\n\t\t[300,\t\t301,\t\t0,\t\t0.004892053324099723,\t\t0.7024509290644521,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.513000000000005\t\t],\n\t\t[51,\t\t300,\t\t0,\t\t0.002573493767313019,\t\t0.3695284920307039,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.729\t\t],\n\t\t[244,\t\t302,\t\t0,\t\t0.007714508310249307,\t\t1.107727813004004,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t89.118\t\t],\n\t\t[31,\t\t302,\t\t0,\t\t0.004369113573407203,\t\t0.6273619041941161,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.472\t\t],\n\t\t[51,\t\t282,\t\t0,\t\t0.006288434903047093,\t\t0.9029576432132521,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.64399999999999\t\t],\n\t\t[303,\t\t304,\t\t0,\t\t8.795013850415512e-05,\t\t0.000789298639172312,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.254\t\t],\n\t\t[305,\t\t304,\t\t0,\t\t0.003881117266849031,\t\t0.0783689646873844,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.813\t\t],\n\t\t[305,\t\t259,\t\t0,\t\t0.0025625,\t\t0.36794989475177603,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t29.601999999999997\t\t],\n\t\t[306,\t\t307,\t\t0,\t\t0.03223268698060942,\t\t0.289268628831688,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t93.088\t\t],\n\t\t[305,\t\t308,\t\t0,\t\t0.0024272853185595567,\t\t0.0217833994511184,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.01\t\t],\n\t\t[305,\t\t309,\t\t0,\t\t0.011014773776523545,\t\t0.22241441259921202,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.716\t\t],\n\t\t[310,\t\t309,\t\t0,\t\t0.009565962603878117,\t\t0.343394627639832,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.253\t\t],\n\t\t[306,\t\t309,\t\t0,\t\t0.035333795013850415,\t\t0.31709917455019604,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t102.044\t\t],\n\t\t[311,\t\t280,\t\t0,\t\t0.003433691135734072,\t\t0.1232611016590444,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.833\t\t],\n\t\t[280,\t\t278,\t\t0,\t\t0.009749769159764544,\t\t0.7874838737974121,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t84.47200000000001\t\t],\n\t\t[311,\t\t32,\t\t0,\t\t0.01205909510619806,\t\t0.9740069506375919,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t104.48\t\t],\n\t\t[13,\t\t312,\t\t0,\t\t0.0043324965373961214,\t\t0.622104056565324,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.049\t\t],\n\t\t[313,\t\t314,\t\t0,\t\t0.006092624653739613,\t\t0.218710302449316,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.191\t\t],\n\t\t[312,\t\t313,\t\t0,\t\t0.00893957756232687,\t\t0.32090893884734,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.635\t\t],\n\t\t[547,\t\t566,\t\t0,\t\t0.027035702479338848,\t\t0.286013220297816,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.783\t\t],\n\t\t[245,\t\t315,\t\t0,\t\t0.014162569252077564,\t\t0.508401547875772,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.803\t\t],\n\t\t[312,\t\t316,\t\t0,\t\t8.803670360110802e-05,\t\t0.01264120812658816,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.0170000000000001\t\t],\n\t\t[312,\t\t314,\t\t0,\t\t0.005339854570637119,\t\t0.191687700220296,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.843000000000004\t\t],\n\t\t[554,\t\t546,\t\t0,\t\t0.08174743801652892,\t\t0.21620344446439202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t123.64299999999999\t\t],\n\t\t[262,\t\t216,\t\t0,\t\t0.042641966759002774,\t\t0.38268554099981195,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t123.15\t\t],\n\t\t[317,\t\t233,\t\t0,\t\t0.005647276084951523,\t\t0.114031901035644,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.464000000000002\t\t],\n\t\t[318,\t\t317,\t\t0,\t\t0.008311634349030471,\t\t0.16783161497270002,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t36.006\t\t],\n\t\t[231,\t\t52,\t\t0,\t\t0.035263677285318554,\t\t1.2658796434850879,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t203.683\t\t],\n\t\t[319,\t\t567,\t\t0,\t\t0.006089586776859504,\t\t0.0644223069721,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.421\t\t],\n\t\t[557,\t\t321,\t\t0,\t\t0.010004628099173555,\t\t0.10583989458750401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.264\t\t],\n\t\t[277,\t\t65,\t\t0,\t\t0.009430170821779778,\t\t0.7616700793261759,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t81.703\t\t],\n\t\t[322,\t\t288,\t\t0,\t\t0.006545013850415513,\t\t0.528637424797136,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.706\t\t],\n\t\t[322,\t\t323,\t\t0,\t\t0.0018503000923372577,\t\t0.14944779312484,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t16.031\t\t],\n\t\t[277,\t\t324,\t\t0,\t\t0.019719529085872576,\t\t0.39818407235049996,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t85.425\t\t],\n\t\t[324,\t\t325,\t\t0,\t\t0.01103508771932133,\t\t0.22282459929396403,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.803999999999995\t\t],\n\t\t[277,\t\t325,\t\t0,\t\t0.008665743305609418,\t\t0.174981914850048,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.54\t\t],\n\t\t[326,\t\t327,\t\t0,\t\t0.007654214876033058,\t\t0.0202436634226288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.577\t\t],\n\t\t[328,\t\t326,\t\t0,\t\t0.10300958677685952,\t\t0.068109252150368,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.90100000000001\t\t],\n\t\t[328,\t\t327,\t\t0,\t\t0.09827173553719008,\t\t0.064976616491468,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.318\t\t],\n\t\t[326,\t\t329,\t\t0,\t\t0.028062148760330575,\t\t0.07421802283046801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.443999999999996\t\t],\n\t\t[568,\t\t329,\t\t0,\t\t0.05699900826446282,\t\t0.15074945731414802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.211\t\t],\n\t\t[568,\t\t326,\t\t0,\t\t0.03218644628099173,\t\t0.08512585494846397,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.681999999999995\t\t],\n\t\t[332,\t\t78,\t\t0,\t\t0.006471029547541551,\t\t0.522661750455416,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.065\t\t],\n\t\t[333,\t\t306,\t\t0,\t\t0.008580159279778392,\t\t0.308006702824228,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.559\t\t],\n\t\t[332,\t\t333,\t\t0,\t\t0.007504674515235457,\t\t0.26939943395502003,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.347\t\t],\n\t\t[332,\t\t334,\t\t0,\t\t0.017124653739612188,\t\t0.15368328149175597,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.456\t\t],\n\t\t[66,\t\t334,\t\t0,\t\t0.030625,\t\t0.27484062260471603,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t88.445\t\t],\n\t\t[330,\t\t335,\t\t0,\t\t0.00550536703601108,\t\t0.790516769355108,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.598\t\t],\n\t\t[336,\t\t66,\t\t0,\t\t0.015054362880886425,\t\t0.1351036887216764,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.477\t\t],\n\t\t[330,\t\t336,\t\t0,\t\t0.039036357340720224,\t\t0.350327404269788,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t112.73700000000001\t\t],\n\t\t[68,\t\t70,\t\t0,\t\t0.016314058171745152,\t\t0.14640868261713597,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.115\t\t],\n\t\t[509,\t\t337,\t\t0,\t\t0.03494082644628099,\t\t0.09241056617056001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t52.848\t\t],\n\t\t[324,\t\t288,\t\t0,\t\t0.012627423822714683,\t\t0.11332339674541761,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t36.468\t\t],\n\t\t[338,\t\t559,\t\t0,\t\t0.009228099173553718,\t\t0.097624922595552,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t27.915\t\t],\n\t\t[339,\t\t559,\t\t0,\t\t0.03560595041322315,\t\t0.023542417076125203,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.927\t\t],\n\t\t[339,\t\t340,\t\t0,\t\t0.08711537190082644,\t\t0.23040041287850396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t131.762\t\t],\n\t\t[559,\t\t340,\t\t0,\t\t0.20983272727272728,\t\t0.138740000599684,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t158.686\t\t],\n\t\t[341,\t\t292,\t\t0,\t\t0.0009329409048961218,\t\t0.07535316024134399,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.083\t\t],\n\t\t[557,\t\t342,\t\t0,\t\t0.006019834710743802,\t\t0.0636843933534336,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t18.21\t\t],\n\t\t[558,\t\t343,\t\t0,\t\t0.010650247933884296,\t\t0.11266996708783199,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.217\t\t],\n\t\t[502,\t\t340,\t\t0,\t\t0.021737520661157025,\t\t0.22996326026071198,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t65.756\t\t],\n\t\t[72,\t\t32,\t\t0,\t\t0.00675502077562327,\t\t0.969954803293024,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t78.03399999999999\t\t],\n\t\t[344,\t\t345,\t\t0,\t\t0.0005762927054480609,\t\t0.04654686738645321,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.993\t\t],\n\t\t[346,\t\t47,\t\t0,\t\t0.0011340027700831024,\t\t0.04070792194158799,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.55\t\t],\n\t\t[46,\t\t47,\t\t0,\t\t0.0008975069252077563,\t\t0.0322183003580208,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.184\t\t],\n\t\t[346,\t\t345,\t\t0,\t\t0.0007217797783933517,\t\t0.025910126194627202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.169\t\t],\n\t\t[347,\t\t328,\t\t0,\t\t0.029905454545454544,\t\t0.07909314882361201,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.232\t\t],\n\t\t[347,\t\t348,\t\t0,\t\t0.04883438016528925,\t\t0.129155866607944,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.862\t\t],\n\t\t[571,\t\t348,\t\t0,\t\t0.041548429752066116,\t\t0.10988617921762801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.842\t\t],\n\t\t[347,\t\t572,\t\t0,\t\t0.016052231404958678,\t\t0.04245451362512801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.279\t\t],\n\t\t[571,\t\t570,\t\t0,\t\t0.17379041322314048,\t\t0.11490906279551602,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t131.429\t\t],\n\t\t[14,\t\t350,\t\t0,\t\t0.02166743801652892,\t\t0.05730546235524,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.772\t\t],\n\t\t[350,\t\t573,\t\t0,\t\t0.026277685950413226,\t\t0.06949852316919598,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.745\t\t],\n\t\t[15,\t\t351,\t\t0,\t\t0.02639265927977839,\t\t0.236857956201204,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.222\t\t],\n\t\t[352,\t\t15,\t\t0,\t\t0.0015260560941828254,\t\t0.219126704094076,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.629\t\t],\n\t\t[15,\t\t335,\t\t0,\t\t0.0035338758079432133,\t\t1.1417173740880242,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.235\t\t],\n\t\t[232,\t\t227,\t\t0,\t\t5.5747922437673134e-05,\t\t0.000500303468136644,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.161\t\t],\n\t\t[565,\t\t544,\t\t0,\t\t0.0394803305785124,\t\t0.10441652566461601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t59.714\t\t],\n\t\t[235,\t\t567,\t\t0,\t\t0.02391404958677686,\t\t0.25298896294275997,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.34\t\t],\n\t\t[567,\t\t286,\t\t0,\t\t0.008068760330578512,\t\t0.34144067500694797,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.816\t\t],\n\t\t[353,\t\t519,\t\t0,\t\t0.007621818181818182,\t\t0.080631926038356,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.055999999999997\t\t],\n\t\t[354,\t\t353,\t\t0,\t\t0.0008436363636363636,\t\t0.00892490784392768,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.552\t\t],\n\t\t[355,\t\t354,\t\t0,\t\t0.0068502479338842966,\t\t0.0181173530898976,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.360999999999999\t\t],\n\t\t[354,\t\t356,\t\t0,\t\t0.01855404958677686,\t\t0.049071255647172,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.063000000000002\t\t],\n\t\t[357,\t\t358,\t\t0,\t\t0.0034823407202216067,\t\t0.5000300103406239,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.228\t\t],\n\t\t[574,\t\t359,\t\t0,\t\t0.013352066115702478,\t\t0.0353131884615884,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.195\t\t],\n\t\t[235,\t\t575,\t\t0,\t\t0.007459504132231404,\t\t0.0789147905557,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.565\t\t],\n\t\t[167,\t\t361,\t\t0,\t\t0.000616198347107438,\t\t0.0065188198358579995,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.864\t\t],\n\t\t[528,\t\t362,\t\t0,\t\t0.0011960330578512398,\t\t0.012652945368078402,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.6180000000000003\t\t],\n\t\t[363,\t\t344,\t\t0,\t\t0.0002662742382271468,\t\t0.009558592968871479,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.538\t\t],\n\t\t[259,\t\t364,\t\t0,\t\t0.013069713758102496,\t\t0.26390852570525997,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.618\t\t],\n\t\t[54,\t\t56,\t\t0,\t\t0.007723337950138504,\t\t0.0693122289241068,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.305\t\t],\n\t\t[365,\t\t364,\t\t0,\t\t0.0049974607571537395,\t\t0.10091058802821559,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.649\t\t],\n\t\t[231,\t\t366,\t\t0,\t\t0.0013273891966759002,\t\t0.0476500209962672,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t7.667000000000001\t\t],\n\t\t[30,\t\t367,\t\t0,\t\t0.01126108033240997,\t\t0.1010613005635992,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.522\t\t],\n\t\t[61,\t\t367,\t\t0,\t\t0.020337603878116343,\t\t0.18251754162067196,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t58.735\t\t],\n\t\t[254,\t\t368,\t\t0,\t\t0.0004297520661157025,\t\t0.00454638722456732,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.3\t\t],\n\t\t[254,\t\t369,\t\t0,\t\t0.00015999999999999999,\t\t0.00169265493591832,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.484\t\t],\n\t\t[254,\t\t370,\t\t0,\t\t0.0003669421487603306,\t\t0.0038819152455960805,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.11\t\t],\n\t\t[99,\t\t358,\t\t0,\t\t0.0020184383656509696,\t\t0.28982797432374396,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.316999999999997\t\t],\n\t\t[354,\t\t519,\t\t0,\t\t0.006762644628099174,\t\t0.07154264880985199,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.457\t\t],\n\t\t[571,\t\t371,\t\t0,\t\t0.023726942148760328,\t\t0.06275238397221199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.887\t\t],\n\t\t[207,\t\t372,\t\t0,\t\t0.002329256198347108,\t\t0.006160354689297601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.523\t\t],\n\t\t[57,\t\t373,\t\t0,\t\t0.0017725619834710745,\t\t0.0046880246727212796,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.681\t\t],\n\t\t[209,\t\t374,\t\t0,\t\t0.0010122922437673131,\t\t0.0363388121515216,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.847\t\t],\n\t\t[375,\t\t376,\t\t0,\t\t0.0045364727608518006,\t\t0.0916021467933684,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.652\t\t],\n\t\t[376,\t\t377,\t\t0,\t\t0.0030886426592797783,\t\t0.062367022394423606,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.38\t\t],\n\t\t[16,\t\t49,\t\t0,\t\t0.002266101108033241,\t\t0.32538991773524,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.178\t\t],\n\t\t[318,\t\t377,\t\t0,\t\t0.004755078485685596,\t\t0.0960163149704152,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.599\t\t],\n\t\t[378,\t\t297,\t\t0,\t\t0.01753917355371901,\t\t0.046387138574374404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.528000000000002\t\t],\n\t\t[562,\t\t379,\t\t0,\t\t0.01802314049586777,\t\t0.047667121439141605,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.26\t\t],\n\t\t[576,\t\t563,\t\t0,\t\t0.001808264462809917,\t\t0.004782449638150801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.735\t\t],\n\t\t[576,\t\t381,\t\t0,\t\t0.0034320661157024794,\t\t0.009077036954898,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.191\t\t],\n\t\t[577,\t\t576,\t\t0,\t\t0.06004495867768594,\t\t0.15880530575430396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t90.818\t\t],\n\t\t[244,\t\t383,\t\t0,\t\t0.006845567867036011,\t\t0.1382282547912684,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.655\t\t],\n\t\t[244,\t\t306,\t\t0,\t\t0.02679108956599723,\t\t0.5409756541164079,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t116.059\t\t],\n\t\t[383,\t\t306,\t\t0,\t\t0.0300685595567867,\t\t0.269846910348376,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.838\t\t],\n\t\t[380,\t\t306,\t\t0,\t\t0.00025605955678670365,\t\t0.03676764369572,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.958\t\t],\n\t\t[252,\t\t225,\t\t0,\t\t0.062094545454545444,\t\t0.041056499553586,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.958999999999996\t\t],\n\t\t[220,\t\t76,\t\t0,\t\t0.002772074099722992,\t\t0.398042682239984,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.023\t\t],\n\t\t[542,\t\t384,\t\t0,\t\t0.007939834710743802,\t\t0.020999063146094,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.009\t\t],\n\t\t[385,\t\t384,\t\t0,\t\t0.053734876033057856,\t\t0.035529141854791196,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.637\t\t],\n\t\t[542,\t\t385,\t\t0,\t\t0.011306115702479337,\t\t0.119608453436296,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t34.201\t\t],\n\t\t[386,\t\t385,\t\t0,\t\t0.003668760330578512,\t\t0.0388121580140316,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.097999999999999\t\t],\n\t\t[387,\t\t578,\t\t0,\t\t0.015444628099173553,\t\t0.16339016240905604,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.72\t\t],\n\t\t[332,\t\t388,\t\t0,\t\t0.014036184210526315,\t\t0.5038646344377999,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.07300000000001\t\t],\n\t\t[382,\t\t332,\t\t0,\t\t0.017764369806094183,\t\t0.637697365901468,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t102.60700000000001\t\t],\n\t\t[382,\t\t388,\t\t0,\t\t0.00476159972299169,\t\t0.17092976750548,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.503\t\t],\n\t\t[579,\t\t578,\t\t0,\t\t0.01911074380165289,\t\t0.050543585664,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.905\t\t],\n\t\t[577,\t\t387,\t\t0,\t\t0.07597818181818182,\t\t0.20094506949431204,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.917\t\t],\n\t\t[144,\t\t390,\t\t0,\t\t0.0004277685950413223,\t\t0.0011313509747276,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.647\t\t],\n\t\t[37,\t\t49,\t\t0,\t\t0.008441481994459835,\t\t0.303028527944352,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t48.758\t\t],\n\t\t[391,\t\t233,\t\t0,\t\t0.014211218836565096,\t\t0.1275369872004348,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.042\t\t],\n\t\t[392,\t\t310,\t\t0,\t\t0.007035318559556785,\t\t0.06313767618386361,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.317999999999998\t\t],\n\t\t[260,\t\t393,\t\t0,\t\t0.006341412742382271,\t\t0.0569102963692744,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.314\t\t],\n\t\t[394,\t\t230,\t\t0,\t\t0.0007590027700831025,\t\t0.00681158510656168,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.1919999999999997\t\t],\n\t\t[395,\t\t282,\t\t0,\t\t0.008762984764542936,\t\t0.314569689934484,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.615\t\t],\n\t\t[395,\t\t244,\t\t0,\t\t0.0034046052631578946,\t\t0.12221699007344,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.665\t\t],\n\t\t[25,\t\t396,\t\t0,\t\t0.008809037396121884,\t\t0.316222866612064,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.881\t\t],\n\t\t[81,\t\t74,\t\t0,\t\t0.0075207756232686974,\t\t0.26997742429652244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t43.44\t\t],\n\t\t[278,\t\t80,\t\t0,\t\t0.016286011080332407,\t\t0.5846279085788,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t94.068\t\t],\n\t\t[81,\t\t278,\t\t0,\t\t0.021054016620498613,\t\t0.755787629231688,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t121.60799999999999\t\t],\n\t\t[569,\t\t570,\t\t0,\t\t0.03253950413223141,\t\t0.08605961294018,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.216\t\t],\n\t\t[397,\t\t552,\t\t0,\t\t0.006289586776859504,\t\t0.0166345314104904,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.513\t\t],\n\t\t[542,\t\t398,\t\t0,\t\t0.0005580165289256199,\t\t0.0059033089500572,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.6880000000000002\t\t],\n\t\t[398,\t\t385,\t\t0,\t\t0.021893553719008262,\t\t0.05790348713648401,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.114000000000004\t\t],\n\t\t[399,\t\t499,\t\t0,\t\t0.03266380165289256,\t\t0.021597087927192803,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.701999999999998\t\t],\n\t\t[83,\t\t399,\t\t0,\t\t0.025700495867768593,\t\t0.016992996557050798,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.436\t\t],\n\t\t[498,\t\t400,\t\t0,\t\t0.012134214876033058,\t\t0.032092247974028,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.352999999999998\t\t],\n\t\t[518,\t\t239,\t\t0,\t\t0.04685289256198347,\t\t0.123915281026504,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t70.865\t\t],\n\t\t[575,\t\t543,\t\t0,\t\t0.0030307438016528923,\t\t0.032062521596058796,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.168\t\t],\n\t\t[401,\t\t360,\t\t0,\t\t0.007957063711911357,\t\t0.071409774520472,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.98\t\t],\n\t\t[580,\t\t581,\t\t0,\t\t0.007134545454545454,\t\t0.018869255592422397,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.790999999999999\t\t],\n\t\t[401,\t\t402,\t\t0,\t\t0.0033434903047091418,\t\t0.030005778188384805,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.656\t\t],\n\t\t[403,\t\t231,\t\t0,\t\t0.009592105263157893,\t\t0.08608327126915,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.701999999999998\t\t],\n\t\t[189,\t\t360,\t\t0,\t\t0.028456024930747923,\t\t0.255375399471348,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t82.181\t\t],\n\t\t[234,\t\t404,\t\t0,\t\t0.008092561983471074,\t\t0.0214029921648796,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.24\t\t],\n\t\t[235,\t\t404,\t\t0,\t\t0.05107504132231405,\t\t0.13508190749437998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.251\t\t],\n\t\t[235,\t\t580,\t\t0,\t\t0.000580495867768595,\t\t0.00153527999352772,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.878\t\t],\n\t\t[216,\t\t259,\t\t0,\t\t0.0022115650969529088,\t\t0.079389770210892,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t12.774000000000001\t\t],\n\t\t[405,\t\t259,\t\t0,\t\t0.0052832409972299165,\t\t0.1896554115982928,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.516\t\t],\n\t\t[405,\t\t318,\t\t0,\t\t0.0066348684210526315,\t\t0.23817552558268398,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t38.323\t\t],\n\t\t[406,\t\t230,\t\t0,\t\t8.098164819944598e-05,\t\t0.046512685161986804,\t\t6845.0,\t\t6845.0,\t\t6845.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.871\t\t],\n\t\t[542,\t\t407,\t\t0,\t\t0.025569586776859506,\t\t0.067625761355152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.674\t\t],\n\t\t[23,\t\t408,\t\t0,\t\t0.03224528925619835,\t\t0.08528148128033601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.771\t\t],\n\t\t[577,\t\t348,\t\t0,\t\t0.012999008264462809,\t\t0.13751772188026398,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.321999999999996\t\t],\n\t\t[562,\t\t564,\t\t0,\t\t0.06921520661157024,\t\t0.18305853298686803,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t104.68799999999999\t\t],\n\t\t[582,\t\t507,\t\t0,\t\t0.006357685950413223,\t\t0.016814638289042002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.616\t\t],\n\t\t[27,\t\t410,\t\t0,\t\t0.0030042975206611565,\t\t0.007945685980170399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.544\t\t],\n\t\t[501,\t\t27,\t\t0,\t\t0.003811570247933884,\t\t0.040322957460962,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.53\t\t],\n\t\t[27,\t\t411,\t\t0,\t\t0.004648595041322314,\t\t0.012294480221518,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.031000000000001\t\t],\n\t\t[411,\t\t410,\t\t0,\t\t0.002054214876033058,\t\t0.0054329327333556,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.1069999999999998\t\t],\n\t\t[403,\t\t360,\t\t0,\t\t0.008191481994459833,\t\t0.07351353506655639,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.656999999999996\t\t],\n\t\t[412,\t\t360,\t\t0,\t\t0.016761772853185596,\t\t0.15042664773666,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.408\t\t],\n\t\t[326,\t\t413,\t\t0,\t\t0.012077024793388432,\t\t0.12776397267356798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t36.533\t\t],\n\t\t[414,\t\t413,\t\t0,\t\t0.008093223140495867,\t\t0.08561896310149601,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t24.482\t\t],\n\t\t[6,\t\t297,\t\t0,\t\t0.019472396694214876,\t\t0.0128750188978664,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.725999999999999\t\t],\n\t\t[554,\t\t580,\t\t0,\t\t0.07435371900826447,\t\t0.196648733567264,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t112.46\t\t],\n\t\t[262,\t\t401,\t\t0,\t\t0.03931232686980609,\t\t0.35280406181043206,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t113.53399999999999\t\t],\n\t\t[499,\t\t556,\t\t0,\t\t0.04185586776859504,\t\t0.11069928308639199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t63.306999999999995\t\t],\n\t\t[224,\t\t229,\t\t0,\t\t0.004135206611570248,\t\t0.0437467367631624,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.509\t\t],\n\t\t[583,\t\t507,\t\t0,\t\t0.024632727272727268,\t\t0.065147980317596,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.257\t\t],\n\t\t[415,\t\t307,\t\t0,\t\t0.015675554016620498,\t\t0.1406784987952448,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.271\t\t],\n\t\t[416,\t\t507,\t\t0,\t\t0.0010555371900826446,\t\t0.011166626467730801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.193\t\t],\n\t\t[284,\t\t561,\t\t0,\t\t0.015221487603305786,\t\t0.16102953827307598,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.045\t\t],\n\t\t[543,\t\t417,\t\t0,\t\t0.0006614876033057851,\t\t0.027991756419545603,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t4.002\t\t],\n\t\t[418,\t\t506,\t\t0,\t\t0.0009395041322314049,\t\t0.009939101917118,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.842\t\t],\n\t\t[220,\t\t157,\t\t0,\t\t0.004599549861495845,\t\t0.165112574384632,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.566999999999997\t\t],\n\t\t[295,\t\t419,\t\t0,\t\t0.0012023140495867769,\t\t0.012719392565946,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.637\t\t],\n\t\t[295,\t\t420,\t\t0,\t\t0.0008003305785123967,\t\t0.008466771900532,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.421\t\t],\n\t\t[541,\t\t62,\t\t0,\t\t0.05133355371900827,\t\t0.0339414035471236,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.821\t\t],\n\t\t[52,\t\t421,\t\t0,\t\t0.00013885041551246538,\t\t0.004984389831631239,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.802\t\t],\n\t\t[60,\t\t160,\t\t0,\t\t6.128808864265928e-05,\t\t0.000550023067454096,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.177\t\t],\n\t\t[535,\t\t161,\t\t0,\t\t3.735537190082645e-05,\t\t0.00039518596644331203,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.113\t\t],\n\t\t[267,\t\t282,\t\t0,\t\t0.0065652700831024926,\t\t0.235677115717012,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.921\t\t],\n\t\t[52,\t\t365,\t\t0,\t\t0.007655586334279779,\t\t0.15458444922992,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.164\t\t],\n\t\t[28,\t\t27,\t\t0,\t\t0.015726942148760328,\t\t0.041594197273402404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.787\t\t],\n\t\t[30,\t\t201,\t\t0,\t\t0.009128289473684211,\t\t0.327683234253536,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t52.725\t\t],\n\t\t[422,\t\t81,\t\t0,\t\t0.0004226685133887349,\t\t0.13655487952674,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t6,\t\t1,\t\t-360,\t\t7.324\t\t],\n\t\t[119,\t\t425,\t\t0,\t\t0.003579120498614958,\t\t0.1284816595874996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.673000000000002\t\t],\n\t\t[423,\t\t425,\t\t0,\t\t0.0006518351800554017,\t\t0.0233992864289392,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.765\t\t],\n\t\t[424,\t\t425,\t\t0,\t\t0.005922957063711911,\t\t0.21261965153389198,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.211\t\t],\n\t\t[426,\t\t428,\t\t0,\t\t0.013948429752066116,\t\t0.14756174042535197,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t42.193999999999996\t\t],\n\t\t[427,\t\t428,\t\t0,\t\t0.0002664462809917355,\t\t0.0028187600792304794,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.8059999999999999\t\t],\n\t\t[19,\t\t428,\t\t0,\t\t0.023607603305785128,\t\t0.24974703912892798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t71.413\t\t],\n\t\t[45,\t\t429,\t\t0,\t\t0.02562314049586777,\t\t0.067767398802972,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.755\t\t],\n\t\t[44,\t\t429,\t\t0,\t\t5.289256198347107e-05,\t\t0.00013988883767892,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.08\t\t],\n\t\t[505,\t\t429,\t\t0,\t\t0.006012561983471073,\t\t0.015901863623161996,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.094\t\t],\n\t\t[231,\t\t431,\t\t0,\t\t0.011677285318559558,\t\t0.4191859418495199,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.44800000000001\t\t],\n\t\t[190,\t\t431,\t\t0,\t\t0.009600761772853185,\t\t0.34464383257266795,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.45399999999999\t\t],\n\t\t[430,\t\t431,\t\t0,\t\t0.0028100761772853187,\t\t0.1008748520662472,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.230999999999998\t\t],\n\t\t[286,\t\t433,\t\t0,\t\t0.01568694214876033,\t\t0.16595362535967603,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.453\t\t],\n\t\t[432,\t\t433,\t\t0,\t\t0.00010049586776859504,\t\t0.00106315516636076,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.304\t\t],\n\t\t[506,\t\t433,\t\t0,\t\t0.0065904132231404955,\t\t0.06972059669946801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.936\t\t],\n\t\t[23,\t\t434,\t\t0,\t\t0.02613685950413223,\t\t0.069126069139116,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.532\t\t],\n\t\t[400,\t\t434,\t\t0,\t\t0.008155371900826446,\t\t0.021569110159669603,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t12.335\t\t],\n\t\t[500,\t\t434,\t\t0,\t\t0.006338512396694216,\t\t0.0167639285853336,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t9.587\t\t],\n\t\t[32,\t\t436,\t\t0,\t\t0.0044813019390581715,\t\t0.16086776359270402,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.884\t\t],\n\t\t[435,\t\t436,\t\t0,\t\t0.0006634349030470914,\t\t0.023815688073266,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.832\t\t],\n\t\t[78,\t\t436,\t\t0,\t\t0.00897680055401662,\t\t0.32224515307884394,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.85\t\t],\n\t\t[86,\t\t438,\t\t0,\t\t0.014693213296398892,\t\t0.52745036936438,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t84.868\t\t],\n\t\t[437,\t\t438,\t\t0,\t\t1.0387811634349031e-05,\t\t0.0003728969948845,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.06\t\t],\n\t\t[221,\t\t438,\t\t0,\t\t0.002280124653739612,\t\t0.081850890377238,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.17\t\t],\n\t\t[207,\t\t439,\t\t0,\t\t0.055703801652892564,\t\t0.0368309823503996,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.126000000000005\t\t],\n\t\t[516,\t\t439,\t\t0,\t\t0.05448462809917355,\t\t0.03602487292327441,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.20399999999999\t\t],\n\t\t[513,\t\t439,\t\t0,\t\t0.046726611570247926,\t\t0.0308953241066316,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.336999999999996\t\t],\n\t\t[181,\t\t441,\t\t0,\t\t0.040805289256198356,\t\t0.10792074104825197,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.718\t\t],\n\t\t[440,\t\t441,\t\t0,\t\t0.0001322314049586777,\t\t0.000349722094197784,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.2\t\t],\n\t\t[504,\t\t441,\t\t0,\t\t0.05916099173553719,\t\t0.156467413554364,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t89.48100000000001\t\t],\n\t\t[135,\t\t442,\t\t0,\t\t0.004956890581717451,\t\t0.177940231009092,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.631\t\t],\n\t\t[109,\t\t442,\t\t0,\t\t0.0015380886426592797,\t\t0.055213615042649204,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.884\t\t],\n\t\t[112,\t\t442,\t\t0,\t\t0.0027304362880886425,\t\t0.09801597510545401,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.770999999999999\t\t],\n\t\t[113,\t\t443,\t\t0,\t\t0.0019885734072022164,\t\t0.07138491472072879,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.485999999999999\t\t],\n\t\t[132,\t\t443,\t\t0,\t\t0.006788434903047091,\t\t0.24368818615747198,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.21\t\t],\n\t\t[107,\t\t443,\t\t0,\t\t2.2333795013850418e-05,\t\t0.000801728539002036,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.129\t\t],\n\t\t[444,\t\t445,\t\t0,\t\t7.877423822714682e-05,\t\t0.00282780221121528,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.455\t\t],\n\t\t[112,\t\t445,\t\t0,\t\t0.002816135734072022,\t\t0.101092375313206,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.266\t\t],\n\t\t[109,\t\t445,\t\t0,\t\t0.0014354224376731304,\t\t0.0515281497432104,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.291\t\t],\n\t\t[119,\t\t447,\t\t0,\t\t0.005212690443213296,\t\t0.74849127803204,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t60.217\t\t],\n\t\t[100,\t\t447,\t\t0,\t\t0.0050695117728531865,\t\t0.7279322237145921,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t58.563\t\t],\n\t\t[446,\t\t447,\t\t0,\t\t2.9518698060941832e-05,\t\t0.00423859584186224,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.341\t\t],\n\t\t[124,\t\t448,\t\t0,\t\t6.509695290858726e-05,\t\t0.00233682116794768,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.376\t\t],\n\t\t[125,\t\t448,\t\t0,\t\t0.00615148891966759,\t\t0.22082338542026803,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.531\t\t],\n\t\t[131,\t\t448,\t\t0,\t\t3.912742382271468e-05,\t\t0.0014045786807313759,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.226\t\t],\n\t\t[449,\t\t450,\t\t0,\t\t0.0023614958448753462,\t\t0.08477191683710039,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.64\t\t],\n\t\t[173,\t\t450,\t\t0,\t\t0.002862361495844876,\t\t0.10275176694050518,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.533\t\t],\n\t\t[184,\t\t450,\t\t0,\t\t0.004022853185595568,\t\t0.14441057621844403,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.236\t\t],\n\t\t[144,\t\t451,\t\t0,\t\t0.007672727272727273,\t\t0.020292624515794402,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.605\t\t],\n\t\t[140,\t\t451,\t\t0,\t\t0.006991074380165291,\t\t0.018489807120219602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.574000000000002\t\t],\n\t\t[514,\t\t451,\t\t0,\t\t0.01149289256198347,\t\t0.030396095817207994,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.383\t\t],\n\t\t[537,\t\t585,\t\t0,\t\t0.05072595041322314,\t\t0.134158641165824,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.723\t\t],\n\t\t[141,\t\t585,\t\t0,\t\t0.007994710743801653,\t\t0.0211441978151932,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.092\t\t],\n\t\t[584,\t\t585,\t\t0,\t\t9.256198347107438e-05,\t\t0.000244805465938352,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.14\t\t],\n\t\t[522,\t\t454,\t\t0,\t\t0.0035008264462809916,\t\t0.0092588924438956,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.295\t\t],\n\t\t[144,\t\t454,\t\t0,\t\t0.00452892561983471,\t\t0.011977981726290799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.85\t\t],\n\t\t[453,\t\t454,\t\t0,\t\t0.001114710743801653,\t\t0.0029481572540882,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.686\t\t],\n\t\t[199,\t\t456,\t\t0,\t\t0.013063140495867768,\t\t0.0086372614214612,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.879\t\t],\n\t\t[140,\t\t456,\t\t0,\t\t0.005061818181818182,\t\t0.013387361765852802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t7.656000000000001\t\t],\n\t\t[455,\t\t456,\t\t0,\t\t0.0011365289256198346,\t\t0.00300586139962416,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.719\t\t],\n\t\t[537,\t\t456,\t\t0,\t\t0.039058512396694216,\t\t0.025825228046024003,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.538\t\t],\n\t\t[538,\t\t457,\t\t0,\t\t0.027927272727272728,\t\t0.0184653265736368,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.12\t\t],\n\t\t[153,\t\t457,\t\t0,\t\t0.030093223140495867,\t\t0.019897438549384,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.758000000000003\t\t],\n\t\t[176,\t\t457,\t\t0,\t\t0.004579173553719009,\t\t0.0030277190305137603,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.463\t\t],\n\t\t[524,\t\t459,\t\t0,\t\t0.004318677685950414,\t\t0.011421923596476799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.532\t\t],\n\t\t[458,\t\t459,\t\t0,\t\t0.001993388429752066,\t\t0.0052720605700488,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.015\t\t],\n\t\t[134,\t\t459,\t\t0,\t\t0.011813553719008265,\t\t0.031244171895617998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.868\t\t],\n\t\t[460,\t\t461,\t\t0,\t\t6.611570247933885e-05,\t\t0.000174861047098892,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.1\t\t],\n\t\t[150,\t\t461,\t\t0,\t\t0.008018512396694214,\t\t0.021207147792120403,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.128\t\t],\n\t\t[149,\t\t461,\t\t0,\t\t0.005586115702479339,\t\t0.0147740098693748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.449\t\t],\n\t\t[521,\t\t463,\t\t0,\t\t0.014348429752066114,\t\t0.009487086110365599,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.850999999999999\t\t],\n\t\t[462,\t\t463,\t\t0,\t\t0.007197355371900825,\t\t0.0047588433967958406,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.443\t\t],\n\t\t[538,\t\t463,\t\t0,\t\t0.012211570247933883,\t\t0.0080742088497664,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.235\t\t],\n\t\t[110,\t\t464,\t\t0,\t\t0.0025753116343490306,\t\t0.0924473799817492,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.875\t\t],\n\t\t[90,\t\t464,\t\t0,\t\t0.007328947368421053,\t\t0.26309125979076,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.332\t\t],\n\t\t[165,\t\t464,\t\t0,\t\t0.002152527700831025,\t\t0.0772704722900764,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.433\t\t],\n\t\t[458,\t\t465,\t\t0,\t\t0.002003305785123967,\t\t0.0052982897270776,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.03\t\t],\n\t\t[134,\t\t465,\t\t0,\t\t0.011838677685950413,\t\t0.031310619093534,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.906\t\t],\n\t\t[524,\t\t465,\t\t0,\t\t0.004293553719008264,\t\t0.0113554763986092,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.494\t\t],\n\t\t[466,\t\t467,\t\t0,\t\t0.0023509349030470914,\t\t0.084392804892244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.579\t\t],\n\t\t[110,\t\t467,\t\t0,\t\t0.0025337603878116343,\t\t0.09095579200221118,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.635\t\t],\n\t\t[165,\t\t467,\t\t0,\t\t0.0022891274238227145,\t\t0.08217406777274441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.222000000000001\t\t],\n\t\t[468,\t\t469,\t\t0,\t\t0.0005269421487603305,\t\t0.0013936425453786,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.797\t\t],\n\t\t[541,\t\t469,\t\t0,\t\t0.022390743801652895,\t\t0.05921844221026801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.866\t\t],\n\t\t[490,\t\t469,\t\t0,\t\t0.028243305785123966,\t\t0.07469714209944801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.718\t\t],\n\t\t[263,\t\t471,\t\t0,\t\t0.0371900826446281,\t\t0.0245898347482832,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.125\t\t],\n\t\t[470,\t\t471,\t\t0,\t\t0.001570909090909091,\t\t0.0010386746197682802,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.188\t\t],\n\t\t[534,\t\t471,\t\t0,\t\t0.024497190082644622,\t\t0.0161973787927468,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.526\t\t],\n\t\t[136,\t\t472,\t\t0,\t\t0.0007079293628808865,\t\t0.025412930201351602,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.0889999999999995\t\t],\n\t\t[110,\t\t472,\t\t0,\t\t0.00019511772853185596,\t\t0.0070042485539216805,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.127\t\t],\n\t\t[251,\t\t472,\t\t0,\t\t4.207063711911357e-05,\t\t0.00151023282928764,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.243\t\t],\n\t\t[226,\t\t474,\t\t0,\t\t0.017639669421487602,\t\t0.011663231841509601,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.34\t\t],\n\t\t[473,\t\t474,\t\t0,\t\t0.003467107438016529,\t\t0.00916971330986216,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t5.244\t\t],\n\t\t[257,\t\t474,\t\t0,\t\t0.020264462809917356,\t\t0.053594910935781594,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.65\t\t],\n\t\t[6,\t\t474,\t\t0,\t\t0.08066247933884299,\t\t0.05333349367016,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.001000000000005\t\t],\n\t\t[299,\t\t475,\t\t0,\t\t0.013238227146814403,\t\t0.47521993028123993,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.464\t\t],\n\t\t[3,\t\t475,\t\t0,\t\t0.0002794321329639889,\t\t0.010030929162389441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.614\t\t],\n\t\t[210,\t\t475,\t\t0,\t\t0.0001481994459833795,\t\t0.00531999712702368,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.856\t\t],\n\t\t[297,\t\t476,\t\t0,\t\t0.0193500826446281,\t\t0.05117658265464801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.267\t\t],\n\t\t[296,\t\t476,\t\t0,\t\t0.005596694214876033,\t\t0.014801987636898,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.465\t\t],\n\t\t[295,\t\t476,\t\t0,\t\t0.0009474380165289256,\t\t0.00250575880492432,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.433\t\t],\n\t\t[313,\t\t478,\t\t0,\t\t0.008696849030470914,\t\t0.31219557906752804,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.233000000000004\t\t],\n\t\t[477,\t\t478,\t\t0,\t\t1.5235457063711912e-05,\t\t0.0005469155924977479,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.08800000000000001\t\t],\n\t\t[245,\t\t478,\t\t0,\t\t0.005264542936288089,\t\t0.188984197007248,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.408\t\t],\n\t\t[479,\t\t481,\t\t0,\t\t0.028420495867768597,\t\t0.07516576970575199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.986000000000004\t\t],\n\t\t[565,\t\t481,\t\t0,\t\t0.024842314049586776,\t\t0.065702289836964,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.574\t\t],\n\t\t[480,\t\t481,\t\t0,\t\t7.735537190082645e-05,\t\t0.000204587425105844,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.11699999999999999\t\t],\n\t\t[415,\t\t482,\t\t0,\t\t0.011021814404432133,\t\t0.0989140353680364,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.831\t\t],\n\t\t[56,\t\t482,\t\t0,\t\t0.002630886426592798,\t\t0.0236105947261788,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.598\t\t],\n\t\t[409,\t\t482,\t\t0,\t\t0.0007635041551246537,\t\t0.0068519822810072005,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.205\t\t],\n\t\t[483,\t\t484,\t\t0,\t\t9.037396121883656e-05,\t\t0.000811050963873968,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.261\t\t],\n\t\t[3,\t\t484,\t\t0,\t\t0.010022160664819944,\t\t0.08994275516621358,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.944000000000003\t\t],\n\t\t[301,\t\t484,\t\t0,\t\t0.00966516620498615,\t\t0.08673894848517479,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.913\t\t],\n\t\t[233,\t\t485,\t\t0,\t\t0.01410180055401662,\t\t0.1265550251138996,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.726\t\t],\n\t\t[392,\t\t485,\t\t0,\t\t0.00914819944598338,\t\t0.0820994883738036,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.42\t\t],\n\t\t[391,\t\t485,\t\t0,\t\t8.518005540166207e-05,\t\t0.000764438839512864,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.24600000000000002\t\t],\n\t\t[579,\t\t488,\t\t0,\t\t0.004636473829194215,\t\t0.11036180126571601,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.038\t\t],\n\t\t[486,\t\t488,\t\t0,\t\t0.00016969696969690082,\t\t0.00403929018798184,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.77\t\t],\n\t\t[487,\t\t488,\t\t0,\t\t0.00014567493112954544,\t\t0.00346749456396992,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.6609999999999999\t\t],\n\t\t[270,\t\t489,\t\t0,\t\t0.0001745152354570637,\t\t0.0062646695140596,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.008\t\t],\n\t\t[331,\t\t489,\t\t0,\t\t0.003002943213296399,\t\t0.10779830627119119,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.345\t\t],\n\t\t[396,\t\t489,\t\t0,\t\t0.01124792243767313,\t\t0.40377286606072005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.968\t\t],\n\t\t[519,\t\t253,\t\t0,\t\t0.013353485337561985,\t\t0.141267767926912,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.394293146100004\t\t],\n\t\t[382,\t\t349,\t\t0,\t\t0.009091647380263157,\t\t1.30547149138788,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t105.02671053600001\t\t],\n\t\t[349,\t\t351,\t\t0,\t\t0.0005858117819605263,\t\t0.0841168325920224,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.76729770521\t\t],\n\t\t[459,\t\t465,\t\t0,\t\t1.578788789911157e-05,\t\t0.00016702153987596,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.047758360894800005\t\t],\n\t\t[549,\t\t550,\t\t0,\t\t3.680432518409091e-05,\t\t0.000389356391787088,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.111333083682\t\t],\n\t\t[550,\t\t551,\t\t0,\t\t5.755645674710744e-05,\t\t0.0006088951287918401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.17410828165999997\t\t],\n\t\t[194,\t\t195,\t\t0,\t\t1.7560672583171745e-05,\t\t0.00252154053805592,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.202860889681\t\t],\n\t\t[247,\t\t248,\t\t0,\t\t2.1755213937811637e-05,\t\t0.0031238355819477198,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.25131623141\t\t],\n\t\t[2,\t\t294,\t\t0,\t\t2.3531392658518004e-05,\t\t0.003378877444715,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.271834647991\t\t],\n\t\t[549,\t\t551,\t\t0,\t\t9.265809538429751e-05,\t\t0.0009802386406577602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.28029073853799996\t\t],\n\t\t[54,\t\t365,\t\t0,\t\t2.573045189134349e-05,\t\t0.00369464080598484,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.297238180249\t\t],\n\t\t[131,\t\t265,\t\t0,\t\t2.7616389041343487e-05,\t\t0.00396544290388756,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.319024526206\t\t],\n\t\t[91,\t\t92,\t\t0,\t\t2.8945628197853184e-05,\t\t0.0041563086239824396,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.33437989694200004\t\t],\n\t\t[247,\t\t249,\t\t0,\t\t3.098840072160664e-05,\t\t0.00444963074500788,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.357978005136\t\t],\n\t\t[186,\t\t191,\t\t0,\t\t3.1591661821191135e-05,\t\t0.00453625312865552,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.36494687735799997\t\t],\n\t\t[129,\t\t173,\t\t0,\t\t3.202671277479225e-05,\t\t0.00459872218332188,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.369972585975\t\t],\n\t\t[96,\t\t202,\t\t0,\t\t3.5971247867797784e-05,\t\t0.00516511877739804,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.415539855369\t\t],\n\t\t[53,\t\t320,\t\t0,\t\t3.784209581142659e-05,\t\t0.00543375421308236,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.437151890814\t\t],\n\t\t[24,\t\t396,\t\t0,\t\t4.144748602818559e-05,\t\t0.005951452925597279,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.47880135859800005\t\t],\n\t\t[133,\t\t156,\t\t0,\t\t4.431754564044322e-05,\t\t0.0063635653674415605,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.511956287238\t\t],\n\t\t[442,\t\t452,\t\t0,\t\t4.483572190450138e-05,\t\t0.006437970402313801,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.517942259441\t\t],\n\t\t[445,\t\t452,\t\t0,\t\t4.490753296371191e-05,\t\t0.0064482817668697215,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.518771820797\t\t],\n\t\t[247,\t\t250,\t\t0,\t\t4.594910768732687e-05,\t\t0.00659784169268824,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.530804092004\t\t],\n\t\t[187,\t\t195,\t\t0,\t\t4.755760376239612e-05,\t\t0.006828805970367921,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.549385438663\t\t],\n\t\t[216,\t\t236,\t\t0,\t\t5.03353075283241e-05,\t\t0.00722765701751724,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.581473472567\t\t],\n\t\t[244,\t\t389,\t\t0,\t\t5.1633313019736845e-05,\t\t0.007414037889302401,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.596468032004\t\t],\n\t\t[394,\t\t406,\t\t0,\t\t5.6346419007686985e-05,\t\t0.008090793734075721,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.650913832377\t\t],\n\t\t[442,\t\t445,\t\t0,\t\t6.388070648310249e-05,\t\t0.00917264360085512,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.737949921293\t\t],\n\t\t[442,\t\t444,\t\t0,\t\t6.584378362735456e-05,\t\t0.00945452224616264,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.760627388463\t\t],\n\t\t[198,\t\t472,\t\t0,\t\t8.37554210498615e-05,\t\t0.0120264578966664,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.967542623967\t\t],\n\t\t[464,\t\t467,\t\t0,\t\t8.460287496468144e-05,\t\t0.01214814397621276,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.977332411594\t\t],\n\t\t[198,\t\t251,\t\t0,\t\t8.83613182396122e-05,\t\t0.012687819608389479,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.0207499483\t\t],\n\t\t[112,\t\t143,\t\t0,\t\t9.049653833033241e-05,\t\t0.012994416294241841,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.04541601079\t\t],\n\t\t[2,\t\t490,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[5,\t\t491,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[10,\t\t492,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[12,\t\t493,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[13,\t\t494,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[15,\t\t495,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[18,\t\t496,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[20,\t\t497,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[22,\t\t498,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[24,\t\t499,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[26,\t\t500,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[30,\t\t501,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[32,\t\t502,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[37,\t\t503,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[42,\t\t504,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[46,\t\t505,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[52,\t\t506,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[56,\t\t507,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[61,\t\t508,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[68,\t\t509,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[69,\t\t510,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[74,\t\t511,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[78,\t\t512,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[86,\t\t513,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[87,\t\t514,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[94,\t\t515,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[95,\t\t516,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[96,\t\t517,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[99,\t\t518,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[100,\t\t519,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[104,\t\t520,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[105,\t\t521,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[106,\t\t522,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[107,\t\t523,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[117,\t\t524,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[120,\t\t525,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[123,\t\t526,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[124,\t\t527,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[125,\t\t528,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[128,\t\t529,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[129,\t\t530,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[138,\t\t531,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[143,\t\t532,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[156,\t\t533,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[157,\t\t534,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[159,\t\t535,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[160,\t\t536,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[165,\t\t537,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[184,\t\t538,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[191,\t\t539,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[195,\t\t540,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[201,\t\t541,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[220,\t\t542,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[231,\t\t543,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[232,\t\t544,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[233,\t\t545,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[236,\t\t546,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[245,\t\t547,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[246,\t\t548,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[248,\t\t549,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[249,\t\t550,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[250,\t\t551,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[259,\t\t552,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[261,\t\t553,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[262,\t\t554,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[265,\t\t555,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[270,\t\t556,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[277,\t\t557,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[279,\t\t558,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[280,\t\t559,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[290,\t\t560,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[301,\t\t561,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[305,\t\t562,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[306,\t\t563,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[310,\t\t564,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[313,\t\t565,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[315,\t\t566,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[320,\t\t567,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[330,\t\t568,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[332,\t\t569,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[334,\t\t570,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[336,\t\t571,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[349,\t\t572,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[351,\t\t573,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[358,\t\t574,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[360,\t\t575,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[380,\t\t576,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[382,\t\t577,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[383,\t\t578,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[389,\t\t579,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[401,\t\t580,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[402,\t\t581,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[409,\t\t582,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[415,\t\t583,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[444,\t\t584,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[452,\t\t585,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t]\n\t])\n\tppc[\"gen_control\"] = array([\n\t\t[586,\t\t1,\t\t0.08658028904199107,\t\t4.329014452099554,\t\t0, 0, 0],\n\t\t[589,\t\t1,\t\t0.010042676909098597,\t\t0.5021338454549299,\t\t0, 0, 0],\n\t\t[590,\t\t1,\t\t0.012095775674984046,\t\t0.6047887837492023,\t\t0, 0, 0],\n\t\t[593,\t\t1,\t\t0.0017666198683200384,\t\t0.08833099341600192,\t\t0, 0, 0],\n\t\t[595,\t\t1,\t\t1.50560576164933,\t\t75.2802880824665,\t\t0, 0, 0],\n\t\t[598,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[599,\t\t1,\t\t0.0029602819415092537,\t\t0.1480140970754627,\t\t0, 0, 0],\n\t\t[601,\t\t1,\t\t0.019576058000303126,\t\t0.9788029000151565,\t\t0, 0, 0],\n\t\t[602,\t\t1,\t\t0.007830423200121252,\t\t0.39152116000606263,\t\t0, 0, 0],\n\t\t[603,\t\t1,\t\t1.0997606567649967,\t\t54.98803283824984,\t\t0, 0, 0],\n\t\t[607,\t\t1,\t\t0.5729577951308232,\t\t28.64788975654116,\t\t0, 0, 0],\n\t\t[608,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[609,\t\t1,\t\t0.0057932399285449895,\t\t0.2896619964272495,\t\t0, 0, 0],\n\t\t[612,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[614,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[616,\t\t1,\t\t0.0046154933496649645,\t\t0.23077466748324824,\t\t0, 0, 0],\n\t\t[617,\t\t1,\t\t0.04360845440717932,\t\t2.1804227203589663,\t\t0, 0, 0],\n\t\t[618,\t\t1,\t\t0.010631550198538607,\t\t0.5315775099269304,\t\t0, 0, 0],\n\t\t[619,\t\t1,\t\t0.037560566569687294,\t\t1.8780283284843649,\t\t0, 0, 0],\n\t\t[624,\t\t1,\t\t0.004297183463481174,\t\t0.21485917317405873,\t\t0, 0, 0],\n\t\t[629,\t\t1,\t\t0.023968734429639437,\t\t1.198436721481972,\t\t0, 0, 0],\n\t\t[632,\t\t1,\t\t0.01435577586688896,\t\t0.717788793344448,\t\t0, 0, 0],\n\t\t[637,\t\t1,\t\t0.017093240888069558,\t\t0.854662044403478,\t\t0, 0, 0],\n\t\t[638,\t\t1,\t\t0.02048324117592693,\t\t1.0241620587963465,\t\t0, 0, 0],\n\t\t[640,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[641,\t\t1,\t\t0.0040107045659157625,\t\t0.20053522829578813,\t\t0, 0, 0],\n\t\t[642,\t\t1,\t\t0.00919915571071155,\t\t0.4599577855355775,\t\t0, 0, 0],\n\t\t[643,\t\t1,\t\t0.27279157245950864,\t\t13.639578622975431,\t\t0, 0, 0],\n\t\t[647,\t\t1,\t\t0.00445633840657307,\t\t0.2228169203286535,\t\t0, 0, 0],\n\t\t[652,\t\t1,\t\t0.00746436683100989,\t\t0.37321834155049455,\t\t0, 0, 0],\n\t\t[655,\t\t1,\t\t0.019576058000303126,\t\t0.9788029000151565,\t\t0, 0, 0],\n\t\t[661,\t\t1,\t\t0.010408733278209955,\t\t0.5204366639104978,\t\t0, 0, 0],\n\t\t[663,\t\t1,\t\t0.00238732414637843,\t\t0.1193662073189215,\t\t0, 0, 0],\n\t\t[666,\t\t1,\t\t0.00919915571071155,\t\t0.4599577855355775,\t\t0, 0, 0],\n\t\t[668,\t\t1,\t\t0.24382537281678363,\t\t12.191268640839182,\t\t0, 0, 0],\n\t\t[670,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[672,\t\t1,\t\t0.010536057232683471,\t\t0.5268028616341736,\t\t0, 0, 0],\n\t\t[681,\t\t1,\t\t0.0063821132179850025,\t\t0.31910566089925013,\t\t0, 0, 0],\n\t\t[683,\t\t1,\t\t0.008753521870054244,\t\t0.4376760935027122,\t\t0, 0, 0],\n\t\t[687,\t\t1,\t\t0.42303383873825773,\t\t21.151691936912886,\t\t0, 0, 0],\n\t\t[694,\t\t1,\t\t0.005220282133414166,\t\t0.2610141066707083,\t\t0, 0, 0],\n\t\t[695,\t\t1,\t\t0.004679155326901723,\t\t0.23395776634508614,\t\t0, 0, 0],\n\t\t[696,\t\t1,\t\t0.22950142793851305,\t\t11.475071396925653,\t\t0, 0, 0],\n\t\t[697,\t\t1,\t\t0.0036923946797319715,\t\t0.1846197339865986,\t\t0, 0, 0],\n\t\t[698,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[702,\t\t1,\t\t0.023363945645890238,\t\t1.168197282294512,\t\t0, 0, 0],\n\t\t[704,\t\t1,\t\t0.16170142218136566,\t\t8.085071109068283,\t\t0, 0, 0],\n\t\t[705,\t\t1,\t\t0.005411268065124442,\t\t0.27056340325622213,\t\t0, 0, 0],\n\t\t[707,\t\t1,\t\t0.010822536130248884,\t\t0.5411268065124443,\t\t0, 0, 0],\n\t\t[713,\t\t1,\t\t0.004265352474862795,\t\t0.21326762374313976,\t\t0, 0, 0],\n\t\t[714,\t\t1,\t\t0.00477464829275686,\t\t0.238732414637843,\t\t0, 0, 0],\n\t\t[716,\t\t1,\t\t1.5915494309189534e-05,\t\t0.0007957747154594768,\t\t0, 0, 0],\n\t\t[717,\t\t1,\t\t0.0017507043740108488,\t\t0.08753521870054244,\t\t0, 0, 0],\n\t\t[719,\t\t1,\t\t0.623250757147862,\t\t31.162537857393104,\t\t0, 0, 0],\n\t\t[724,\t\t1,\t\t0.0019257748114119334,\t\t0.09628874057059668,\t\t0, 0, 0],\n\t\t[730,\t\t1,\t\t0.10077690996578814,\t\t5.038845498289407,\t\t0, 0, 0],\n\t\t[732,\t\t1,\t\t0.004647324338283344,\t\t0.2323662169141672,\t\t0, 0, 0],\n\t\t[735,\t\t1,\t\t0.013496339174192726,\t\t0.6748169587096363,\t\t0, 0, 0],\n\t\t[738,\t\t1,\t\t0.04408591923645501,\t\t2.2042959618227504,\t\t0, 0, 0],\n\t\t[741,\t\t1,\t\t0.0340591578216656,\t\t1.7029578910832803,\t\t0, 0, 0],\n\t\t[742,\t\t1,\t\t0.0028647889756541157,\t\t0.14323944878270578,\t\t0, 0, 0],\n\t\t[743,\t\t1,\t\t0.44881693951914486,\t\t22.440846975957243,\t\t0, 0, 0],\n\t\t[747,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[748,\t\t1,\t\t0.03501408748021698,\t\t1.7507043740108488,\t\t0, 0, 0],\n\t\t[749,\t\t1,\t\t0.0025464790894703256,\t\t0.12732395447351627,\t\t0, 0, 0],\n\t\t[750,\t\t1,\t\t0.028902537665488188,\t\t1.4451268832744095,\t\t0, 0, 0],\n\t\t[753,\t\t1,\t\t0.049624511256052974,\t\t2.4812255628026487,\t\t0, 0, 0],\n\t\t[758,\t\t1,\t\t0.0058887328944001276,\t\t0.2944366447200064,\t\t0, 0, 0],\n\t\t[761,\t\t1,\t\t0.004997465213085514,\t\t0.2498732606542757,\t\t0, 0, 0],\n\t\t[762,\t\t1,\t\t0.3517324242330887,\t\t17.586621211654435,\t\t0, 0, 0],\n\t\t[763,\t\t1,\t\t0.006461690689530951,\t\t0.32308453447654756,\t\t0, 0, 0],\n\t\t[765,\t\t1,\t\t0.018780283284843647,\t\t0.9390141642421824,\t\t0, 0, 0],\n\t\t[767,\t\t1,\t\t0.0035650707252584553,\t\t0.17825353626292276,\t\t0, 0, 0],\n\t\t[772,\t\t1,\t\t0.002992112930127632,\t\t0.1496056465063816,\t\t0, 0, 0],\n\t\t[774,\t\t1,\t\t0.010663381187156987,\t\t0.5331690593578494,\t\t0, 0, 0],\n\t\t[777,\t\t1,\t\t0.012573240504259732,\t\t0.6286620252129866,\t\t0, 0, 0],\n\t\t[778,\t\t1,\t\t0.004679155326901723,\t\t0.23395776634508614,\t\t0, 0, 0],\n\t\t[781,\t\t1,\t\t0.4169859509007658,\t\t20.84929754503829,\t\t0, 0, 0],\n\t\t[784,\t\t1,\t\t0.4058451048843331,\t\t20.292255244216655,\t\t0, 0, 0],\n\t\t[785,\t\t1,\t\t0.00047746482927568597,\t\t0.0238732414637843,\t\t0, 0, 0],\n\t\t[788,\t\t1,\t\t0.2785211504108168,\t\t13.926057520540843,\t\t0, 0, 0],\n\t\t[789,\t\t1,\t\t0.0123185925953127,\t\t0.615929629765635,\t\t0, 0, 0],\n\t\t[791,\t\t1,\t\t0.0031830988618379067,\t\t0.15915494309189535,\t\t0, 0, 0],\n\t\t[792,\t\t1,\t\t0.009979014931861837,\t\t0.49895074659309185,\t\t0, 0, 0],\n\t\t[795,\t\t1,\t\t0.004329014452099553,\t\t0.2164507226049777,\t\t0, 0, 0],\n\t\t[800,\t\t1,\t\t0.0058091554228541795,\t\t0.290457771142709,\t\t0, 0, 0],\n\t\t[801,\t\t1,\t\t0.007957747154594767,\t\t0.3978873577297384,\t\t0, 0, 0],\n\t\t[802,\t\t1,\t\t0.07957747154594767,\t\t3.9788735772973833,\t\t0, 0, 0],\n\t\t[805,\t\t1,\t\t0.44881693951914486,\t\t22.440846975957243,\t\t0, 0, 0],\n\t\t[806,\t\t1,\t\t0.005697746962689853,\t\t0.2848873481344927,\t\t0, 0, 0],\n\t\t[808,\t\t1,\t\t0.034616200122487235,\t\t1.7308100061243619,\t\t0, 0, 0],\n\t\t[809,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[811,\t\t1,\t\t0.0040107045659157625,\t\t0.20053522829578813,\t\t0, 0, 0],\n\t\t[814,\t\t1,\t\t0.014164789935178685,\t\t0.7082394967589343,\t\t0, 0, 0],\n\t\t[816,\t\t1,\t\t0.012748310941660816,\t\t0.6374155470830408,\t\t0, 0, 0],\n\t\t[817,\t\t1,\t\t0.017188733853924696,\t\t0.8594366926962349,\t\t0, 0, 0],\n\t\t[821,\t\t1,\t\t0.013130282805081364,\t\t0.6565141402540683,\t\t0, 0, 0],\n\t\t[822,\t\t1,\t\t0.04265352474862795,\t\t2.1326762374313977,\t\t0, 0, 0],\n\t\t[826,\t\t1,\t\t0.018461973398659858,\t\t0.9230986699329929,\t\t0, 0, 0],\n\t\t[830,\t\t1,\t\t0.02832957987035737,\t\t1.4164789935178685,\t\t0, 0, 0],\n\t\t[835,\t\t1,\t\t0.010138169874953733,\t\t0.5069084937476867,\t\t0, 0, 0],\n\t\t[836,\t\t1,\t\t0.008116902097686661,\t\t0.4058451048843331,\t\t0, 0, 0],\n\t\t[839,\t\t1,\t\t0.011666057328635928,\t\t0.5833028664317964,\t\t0, 0, 0],\n\t\t[841,\t\t1,\t\t0.0037083101740411615,\t\t0.18541550870205808,\t\t0, 0, 0],\n\t\t[844,\t\t1,\t\t0.012732395447351627,\t\t0.6366197723675814,\t\t0, 0, 0],\n\t\t[845,\t\t1,\t\t0.10122254380644544,\t\t5.061127190322272,\t\t0, 0, 0],\n\t\t[849,\t\t1,\t\t0.24796340133717296,\t\t12.398170066858649,\t\t0, 0, 0],\n\t\t[850,\t\t1,\t\t0.005092958178940651,\t\t0.25464790894703254,\t\t0, 0, 0],\n\t\t[851,\t\t1,\t\t0.01265281797580568,\t\t0.632640898790284,\t\t0, 0, 0],\n\t\t[853,\t\t1,\t\t0.0036923946797319715,\t\t0.1846197339865986,\t\t0, 0, 0],\n\t\t[855,\t\t1,\t\t0.21899720169444797,\t\t10.949860084722399,\t\t0, 0, 0],\n\t\t[856,\t\t1,\t\t0.011459155902616463,\t\t0.5729577951308231,\t\t0, 0, 0],\n\t\t[857,\t\t1,\t\t0.4462704604296745,\t\t22.313523021483725,\t\t0, 0, 0],\n\t\t[858,\t\t1,\t\t0.01808000153523931,\t\t0.9040000767619655,\t\t0, 0, 0],\n\t\t[860,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[865,\t\t1,\t\t0.0035014087480216977,\t\t0.17507043740108488,\t\t0, 0, 0],\n\t\t[869,\t\t1,\t\t0.4329014452099553,\t\t21.645072260497766,\t\t0, 0, 0],\n\t\t[870,\t\t1,\t\t0.018589297353133374,\t\t0.9294648676566688,\t\t0, 0, 0],\n\t\t[872,\t\t1,\t\t0.00716197243913529,\t\t0.3580986219567645,\t\t0, 0, 0],\n\t\t[874,\t\t1,\t\t0.006589014644004467,\t\t0.3294507322002233,\t\t0, 0, 0],\n\t\t[875,\t\t1,\t\t0.007766761222884492,\t\t0.38833806114422464,\t\t0, 0, 0],\n\t\t[882,\t\t1,\t\t0.005538592019597957,\t\t0.2769296009798979,\t\t0, 0, 0],\n\t\t[883,\t\t1,\t\t0.005729577951308231,\t\t0.28647889756541156,\t\t0, 0, 0],\n\t\t[885,\t\t1,\t\t0.15597184423005742,\t\t7.798592211502871,\t\t0, 0, 0],\n\t\t[886,\t\t1,\t\t0.8186930272647096,\t\t40.93465136323548,\t\t0, 0, 0],\n\t\t[889,\t\t1,\t\t0.0030239439187460114,\t\t0.15119719593730058,\t\t0, 0, 0],\n\t\t[890,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[893,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[894,\t\t1,\t\t0.025146481008519465,\t\t1.2573240504259733,\t\t0, 0, 0],\n\t\t[895,\t\t1,\t\t0.0030239439187460114,\t\t0.15119719593730058,\t\t0, 0, 0],\n\t\t[896,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[898,\t\t1,\t\t0.013464508185574344,\t\t0.6732254092787172,\t\t0, 0, 0],\n\t\t[900,\t\t1,\t\t0.03584169318429482,\t\t1.7920846592147412,\t\t0, 0, 0],\n\t\t[902,\t\t1,\t\t0.006207042780583919,\t\t0.31035213902919595,\t\t0, 0, 0],\n\t\t[903,\t\t1,\t\t0.0031990143561470966,\t\t0.15995071780735484,\t\t0, 0, 0],\n\t\t[905,\t\t1,\t\t0.021851973686517232,\t\t1.0925986843258617,\t\t0, 0, 0],\n\t\t[906,\t\t1,\t\t0.010504226244065093,\t\t0.5252113122032547,\t\t0, 0, 0],\n\t\t[907,\t\t1,\t\t0.02142225534016911,\t\t1.0711127670084555,\t\t0, 0, 0],\n\t\t[909,\t\t1,\t\t0.005856901905781748,\t\t0.2928450952890874,\t\t0, 0, 0],\n\t\t[913,\t\t1,\t\t0.02355493157760051,\t\t1.1777465788800257,\t\t0, 0, 0],\n\t\t[915,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[917,\t\t1,\t\t0.005411268065124442,\t\t0.27056340325622213,\t\t0, 0, 0],\n\t\t[918,\t\t1,\t\t0.012254930618075942,\t\t0.612746530903797,\t\t0, 0, 0],\n\t\t[920,\t\t1,\t\t0.0020371832715762603,\t\t0.10185916357881303,\t\t0, 0, 0],\n\t\t[921,\t\t1,\t\t0.019735212943395024,\t\t0.9867606471697512,\t\t0, 0, 0],\n\t\t[922,\t\t1,\t\t0.05220282133414166,\t\t2.6101410667070835,\t\t0, 0, 0],\n\t\t[923,\t\t1,\t\t0.023236621691416718,\t\t1.161831084570836,\t\t0, 0, 0],\n\t\t[925,\t\t1,\t\t0.008276057040778557,\t\t0.4138028520389279,\t\t0, 0, 0],\n\t\t[931,\t\t1,\t\t0.03455253814525047,\t\t1.7276269072625237,\t\t0, 0, 0],\n\t\t[935,\t\t1,\t\t0.007352958370845565,\t\t0.36764791854227824,\t\t0, 0, 0],\n\t\t[936,\t\t1,\t\t0.016615776058793875,\t\t0.8307888029396938,\t\t0, 0, 0],\n\t\t[937,\t\t1,\t\t0.00477464829275686,\t\t0.238732414637843,\t\t0, 0, 0],\n\t\t[939,\t\t1,\t\t1.5915494309189534e-05,\t\t0.0007957747154594768,\t\t0, 0, 0],\n\t\t[940,\t\t1,\t\t0.009421972631040205,\t\t0.47109863155201026,\t\t0, 0, 0],\n\t\t[944,\t\t1,\t\t0.004042535554534142,\t\t0.2021267777267071,\t\t0, 0, 0],\n\t\t[950,\t\t1,\t\t0.005092958178940651,\t\t0.25464790894703254,\t\t0, 0, 0],\n\t\t[952,\t\t1,\t\t0.005045211696013082,\t\t0.2522605848006541,\t\t0, 0, 0],\n\t\t[958,\t\t1,\t\t0.010615634704229418,\t\t0.530781735211471,\t\t0, 0, 0],\n\t\t[959,\t\t1,\t\t0.007241549910681238,\t\t0.3620774955340619,\t\t0, 0, 0],\n\t\t[960,\t\t1,\t\t0.004217605991935227,\t\t0.21088029959676136,\t\t0, 0, 0],\n\t\t[963,\t\t1,\t\t0.2785211504108168,\t\t13.926057520540843,\t\t0, 0, 0],\n\t\t[965,\t\t1,\t\t0.11204507993669433,\t\t5.602253996834716,\t\t0, 0, 0],\n\t\t[966,\t\t1,\t\t0.021008452488130186,\t\t1.0504226244065094,\t\t0, 0, 0],\n\t\t[967,\t\t1,\t\t0.01193662073189215,\t\t0.5968310365946076,\t\t0, 0, 0],\n\t\t[969,\t\t1,\t\t0.018111832523857688,\t\t0.9055916261928845,\t\t0, 0, 0],\n\t\t[971,\t\t1,\t\t0.0031830988618379067,\t\t0.15915494309189535,\t\t0, 0, 0],\n\t\t[973,\t\t1,\t\t0.4287634166895661,\t\t21.438170834478306,\t\t0, 0, 0],\n\t\t[976,\t\t1,\t\t0.008562535938343968,\t\t0.4281267969171984,\t\t0, 0, 0],\n\t\t[978,\t\t1,\t\t0.0007321127382227185,\t\t0.03660563691113593,\t\t0, 0, 0],\n\t\t[980,\t\t1,\t\t0.11140846016432673,\t\t5.570423008216337,\t\t0, 0, 0],\n\t\t[981,\t\t1,\t\t0.03787887645587108,\t\t1.8939438227935543,\t\t0, 0, 0],\n\t\t[982,\t\t1,\t\t0.0015756339366097638,\t\t0.07878169683048819,\t\t0, 0, 0],\n\t\t[983,\t\t1,\t\t0.01400563499208679,\t\t0.7002817496043395,\t\t0, 0, 0],\n\t\t[984,\t\t1,\t\t0.14801409707546268,\t\t7.400704853773133,\t\t0, 0, 0],\n\t\t[985,\t\t1,\t\t0.0035014087480216977,\t\t0.17507043740108488,\t\t0, 0, 0],\n\t\t[986,\t\t1,\t\t0.0017825353626292277,\t\t0.08912676813146138,\t\t0, 0, 0],\n\t\t[987,\t\t1,\t\t0.02618098813861678,\t\t1.3090494069308392,\t\t0, 0, 0],\n\t\t[988,\t\t1,\t\t0.0008116902097686662,\t\t0.04058451048843331,\t\t0, 0, 0],\n\t\t[993,\t\t1,\t\t0.06238873769202297,\t\t3.119436884601149,\t\t0, 0, 0],\n\t\t[994,\t\t1,\t\t0.010504226244065093,\t\t0.5252113122032547,\t\t0, 0, 0],\n\t\t[995,\t\t1,\t\t0.0006684507609859605,\t\t0.033422538049298026,\t\t0, 0, 0],\n\t\t[997,\t\t1,\t\t0.005984225860255264,\t\t0.2992112930127632,\t\t0, 0, 0],\n\t\t[999,\t\t1,\t\t0.004965634224467135,\t\t0.24828171122335674,\t\t0, 0, 0],\n\t\t[1000,\t\t1,\t\t0.015597184423005743,\t\t0.7798592211502873,\t\t0, 0, 0],\n\t\t[1002,\t\t1,\t\t0.0031512678732195276,\t\t0.15756339366097638,\t\t0, 0, 0],\n\t\t[1003,\t\t1,\t\t0.2864788975654116,\t\t14.32394487827058,\t\t0, 0, 0],\n\t\t[1007,\t\t1,\t\t0.007416620348082323,\t\t0.37083101740411617,\t\t0, 0, 0],\n\t\t[1008,\t\t1,\t\t0.015597184423005743,\t\t0.7798592211502873,\t\t0, 0, 0],\n\t\t[1010,\t\t1,\t\t0.238732414637843,\t\t11.93662073189215,\t\t0, 0, 0],\n\t\t[1011,\t\t1,\t\t0.005952394871636886,\t\t0.2976197435818443,\t\t0, 0, 0],\n\t\t[1012,\t\t1,\t\t0.9024085273310466,\t\t45.12042636655233,\t\t0, 0, 0],\n\t\t[1014,\t\t1,\t\t0.238732414637843,\t\t11.93662073189215,\t\t0, 0, 0],\n\t\t[1026,\t\t1,\t\t0.20868396138209316,\t\t10.434198069104658,\t\t0, 0, 0],\n\t\t[1027,\t\t3,\t\t0.0021774309145741774,\t\t0.10887154572870887,\t\t2.22, 61.69, 0.004502],\n\t\t[1028,\t\t2,\t\t0.025464790894703257,\t\t1.273239544735163,\t\t0, 0, 0],\n\t\t[1029,\t\t2,\t\t0.003819718634205488,\t\t0.19098593171027442,\t\t0, 0, 0],\n\t\t[1030,\t\t2,\t\t0.06480789282701978,\t\t3.2403946413509894,\t\t0, 0, 0],\n\t\t[1031,\t\t2,\t\t0.0921316134570364,\t\t4.60658067285182,\t\t0, 0, 0],\n\t\t[1032,\t\t2,\t\t0.00952447200469018,\t\t0.476223600234509,\t\t0, 0, 0],\n\t\t[1033,\t\t2,\t\t0.002900927229801628,\t\t0.1450463614900814,\t\t0, 0, 0],\n\t\t[1034,\t\t2,\t\t0.005364335122251813,\t\t0.26821675611259066,\t\t0, 0, 0],\n\t\t[1035,\t\t3,\t\t0.00317587127473044,\t\t0.158793563736522,\t\t2.22, 61.69, 0.004502],\n\t\t[1036,\t\t2,\t\t0.0042795539826391196,\t\t0.21397769913195597,\t\t0, 0, 0],\n\t\t[1037,\t\t2,\t\t0.004955336101366405,\t\t0.24776680506832027,\t\t0, 0, 0],\n\t\t[1038,\t\t2,\t\t0.004727468685835316,\t\t0.2363734342917658,\t\t0, 0, 0],\n\t\t[1039,\t\t2,\t\t0.003994483783969036,\t\t0.19972418919845183,\t\t0, 0, 0],\n\t\t[1040,\t\t3,\t\t1.6455671484213926e-06,\t\t8.227835742106963e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1041,\t\t2,\t\t0.009514652147542641,\t\t0.47573260737713213,\t\t0, 0, 0],\n\t\t[1042,\t\t2,\t\t0.0015736071241218962,\t\t0.0786803562060948,\t\t0, 0, 0],\n\t\t[1043,\t\t3,\t\t0.0001860633480619268,\t\t0.009303167403096342,\t\t2.22, 61.69, 0.004502],\n\t\t[1044,\t\t3,\t\t0.0021677989511531123,\t\t0.10838994755765562,\t\t2.22, 61.69, 0.004502],\n\t\t[1045,\t\t2,\t\t0.003936615026511589,\t\t0.19683075132557948,\t\t0, 0, 0],\n\t\t[1046,\t\t2,\t\t0.00679827557108513,\t\t0.33991377855425653,\t\t0, 0, 0],\n\t\t[1047,\t\t3,\t\t0.0008294889076348922,\t\t0.04147444538174461,\t\t2.22, 61.69, 0.004502],\n\t\t[1048,\t\t2,\t\t0.004561818873896339,\t\t0.22809094369481697,\t\t0, 0, 0],\n\t\t[1049,\t\t2,\t\t0.017519480206840264,\t\t0.8759740103420132,\t\t0, 0, 0],\n\t\t[1050,\t\t2,\t\t0.0026070963318590656,\t\t0.13035481659295328,\t\t0, 0, 0],\n\t\t[1051,\t\t2,\t\t0.015687938685312278,\t\t0.784396934265614,\t\t0, 0, 0],\n\t\t[1052,\t\t3,\t\t0.001315809692296204,\t\t0.06579048461481019,\t\t2.22, 61.69, 0.004502],\n\t\t[1053,\t\t3,\t\t0.001042024786453249,\t\t0.05210123932266245,\t\t2.22, 61.69, 0.004502],\n\t\t[1054,\t\t2,\t\t0.017434200209443074,\t\t0.8717100104721537,\t\t0, 0, 0],\n\t\t[1055,\t\t3,\t\t8.839648832281736e-05,\t\t0.0044198244161408684,\t\t2.22, 61.69, 0.004502],\n\t\t[1056,\t\t2,\t\t0.02055744509394447,\t\t1.0278722546972237,\t\t0, 0, 0],\n\t\t[1057,\t\t2,\t\t0.01491834386909586,\t\t0.745917193454793,\t\t0, 0, 0],\n\t\t[1058,\t\t2,\t\t0.03604848709529135,\t\t1.8024243547645675,\t\t0, 0, 0],\n\t\t[1059,\t\t2,\t\t0.013557939312312254,\t\t0.6778969656156127,\t\t0, 0, 0],\n\t\t[1060,\t\t3,\t\t0.0003102305990567321,\t\t0.015511529952836606,\t\t2.22, 61.69, 0.004502],\n\t\t[1061,\t\t2,\t\t0.00553682869668096,\t\t0.276841434834048,\t\t0, 0, 0],\n\t\t[1062,\t\t3,\t\t8.785726057007105e-05,\t\t0.004392863028503553,\t\t2.22, 61.69, 0.004502],\n\t\t[1063,\t\t3,\t\t0.0002599673274981191,\t\t0.012998366374905956,\t\t2.22, 61.69, 0.004502],\n\t\t[1064,\t\t2,\t\t0.008608339751224173,\t\t0.4304169875612086,\t\t0, 0, 0],\n\t\t[1065,\t\t2,\t\t0.01602939949909265,\t\t0.8014699749546326,\t\t0, 0, 0],\n\t\t[1066,\t\t2,\t\t0.007713974637252848,\t\t0.3856987318626424,\t\t0, 0, 0],\n\t\t[1067,\t\t3,\t\t0.002078788013715776,\t\t0.1039394006857888,\t\t2.22, 61.69, 0.004502],\n\t\t[1068,\t\t3,\t\t0.0003188842576981683,\t\t0.015944212884908417,\t\t2.22, 61.69, 0.004502],\n\t\t[1069,\t\t3,\t\t0.00020313001706596343,\t\t0.010156500853298172,\t\t2.22, 61.69, 0.004502],\n\t\t[1070,\t\t3,\t\t5.020379247175116e-05,\t\t0.0025101896235875582,\t\t2.22, 61.69, 0.004502],\n\t\t[1071,\t\t3,\t\t0.0002755733400308117,\t\t0.013778667001540588,\t\t2.22, 61.69, 0.004502],\n\t\t[1072,\t\t2,\t\t0.007168748144119091,\t\t0.3584374072059546,\t\t0, 0, 0],\n\t\t[1073,\t\t2,\t\t0.004954025493475761,\t\t0.24770127467378808,\t\t0, 0, 0],\n\t\t[1074,\t\t2,\t\t0.009778033156939965,\t\t0.48890165784699824,\t\t0, 0, 0],\n\t\t[1075,\t\t3,\t\t0.0010048055180333312,\t\t0.05024027590166657,\t\t2.22, 61.69, 0.004502],\n\t\t[1076,\t\t3,\t\t6.469727966193708e-05,\t\t0.003234863983096854,\t\t2.22, 61.69, 0.004502],\n\t\t[1077,\t\t3,\t\t0.0011830299572765865,\t\t0.05915149786382932,\t\t2.22, 61.69, 0.004502],\n\t\t[1078,\t\t3,\t\t0.001172568898243543,\t\t0.058628444912177155,\t\t2.22, 61.69, 0.004502],\n\t\t[1079,\t\t2,\t\t0.004604543003215469,\t\t0.23022715016077344,\t\t0, 0, 0],\n\t\t[1080,\t\t2,\t\t0.008412929217414397,\t\t0.4206464608707199,\t\t0, 0, 0],\n\t\t[1081,\t\t2,\t\t0.02157539837421533,\t\t1.0787699187107667,\t\t0, 0, 0],\n\t\t[1082,\t\t2,\t\t0.027088925496952045,\t\t1.3544462748476025,\t\t0, 0, 0],\n\t\t[1083,\t\t2,\t\t0.02648799276287556,\t\t1.324399638143778,\t\t0, 0, 0],\n\t\t[1084,\t\t2,\t\t0.024097921922649333,\t\t1.2048960961324668,\t\t0, 0, 0],\n\t\t[1085,\t\t2,\t\t0.007239283505967098,\t\t0.3619641752983549,\t\t0, 0, 0],\n\t\t[1086,\t\t2,\t\t0.0113228173380432,\t\t0.56614086690216,\t\t0, 0, 0],\n\t\t[1087,\t\t2,\t\t0.007427186304799236,\t\t0.3713593152399618,\t\t0, 0, 0],\n\t\t[1088,\t\t3,\t\t0.0023416461987310717,\t\t0.11708230993655358,\t\t2.22, 61.69, 0.004502],\n\t\t[1089,\t\t2,\t\t0.016069322163187832,\t\t0.8034661081593916,\t\t0, 0, 0],\n\t\t[1090,\t\t2,\t\t0.005674885746854652,\t\t0.2837442873427326,\t\t0, 0, 0],\n\t\t[1091,\t\t3,\t\t0.002915330196419503,\t\t0.14576650982097517,\t\t2.22, 61.69, 0.004502],\n\t\t[1092,\t\t2,\t\t0.002887919870246302,\t\t0.1443959935123151,\t\t0, 0, 0],\n\t\t[1093,\t\t2,\t\t0.009906140914748767,\t\t0.49530704573743833,\t\t0, 0, 0],\n\t\t[1094,\t\t3,\t\t0.00023930778294026586,\t\t0.011965389147013294,\t\t2.22, 61.69, 0.004502],\n\t\t[1095,\t\t3,\t\t1.3047613994501091e-05,\t\t0.0006523806997250545,\t\t2.22, 61.69, 0.004502],\n\t\t[1096,\t\t2,\t\t0.005379826679377905,\t\t0.2689913339688953,\t\t0, 0, 0],\n\t\t[1097,\t\t3,\t\t0.0002929164939619051,\t\t0.014645824698095257,\t\t2.22, 61.69, 0.004502],\n\t\t[1098,\t\t2,\t\t0.004521623727146264,\t\t0.22608118635731317,\t\t0, 0, 0],\n\t\t[1099,\t\t2,\t\t0.018521637260932335,\t\t0.9260818630466169,\t\t0, 0, 0],\n\t\t[1100,\t\t3,\t\t7.054344741001778e-07,\t\t3.527172370500889e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1101,\t\t2,\t\t0.002933481656830483,\t\t0.14667408284152417,\t\t0, 0, 0],\n\t\t[1102,\t\t2,\t\t0.013351472434376337,\t\t0.6675736217188168,\t\t0, 0, 0],\n\t\t[1103,\t\t2,\t\t0.01056648600956628,\t\t0.528324300478314,\t\t0, 0, 0],\n\t\t[1104,\t\t3,\t\t1.3172819714966009e-05,\t\t0.0006586409857483004,\t\t2.22, 61.69, 0.004502],\n\t\t[1105,\t\t3,\t\t0.0001386935566767763,\t\t0.006934677833838815,\t\t2.22, 61.69, 0.004502],\n\t\t[1106,\t\t3,\t\t0.00014577275883068604,\t\t0.0072886379415343025,\t\t2.22, 61.69, 0.004502],\n\t\t[1107,\t\t2,\t\t0.004852418696402547,\t\t0.24262093482012728,\t\t0, 0, 0],\n\t\t[1108,\t\t2,\t\t0.01662682691421984,\t\t0.831341345710992,\t\t0, 0, 0],\n\t\t[1109,\t\t3,\t\t4.9542410867097304e-05,\t\t0.002477120543354865,\t\t2.22, 61.69, 0.004502],\n\t\t[1110,\t\t3,\t\t0.00010533237807450261,\t\t0.00526661890372513,\t\t2.22, 61.69, 0.004502],\n\t\t[1111,\t\t2,\t\t0.0045366504659925064,\t\t0.22683252329962533,\t\t0, 0, 0],\n\t\t[1112,\t\t2,\t\t0.004426690383932842,\t\t0.2213345191966421,\t\t0, 0, 0],\n\t\t[1113,\t\t3,\t\t0.00022513170529279912,\t\t0.011256585264639957,\t\t2.22, 61.69, 0.004502],\n\t\t[1114,\t\t3,\t\t0.0008560555102861403,\t\t0.042802775514307015,\t\t2.22, 61.69, 0.004502],\n\t\t[1115,\t\t2,\t\t0.0032197222090973076,\t\t0.16098611045486538,\t\t0, 0, 0],\n\t\t[1116,\t\t3,\t\t0.002075453185310181,\t\t0.10377265926550905,\t\t2.22, 61.69, 0.004502],\n\t\t[1117,\t\t2,\t\t0.005780032679669937,\t\t0.2890016339834969,\t\t0, 0, 0],\n\t\t[1118,\t\t3,\t\t0.0005554515385863421,\t\t0.027772576929317106,\t\t2.22, 61.69, 0.004502],\n\t\t[1119,\t\t3,\t\t0.0027536366373517632,\t\t0.13768183186758817,\t\t2.22, 61.69, 0.004502],\n\t\t[1120,\t\t3,\t\t0.0001538074296570127,\t\t0.007690371482850636,\t\t2.22, 61.69, 0.004502],\n\t\t[1121,\t\t3,\t\t3.4414977793908876e-05,\t\t0.0017207488896954439,\t\t2.22, 61.69, 0.004502],\n\t\t[1122,\t\t3,\t\t9.313004041299959e-05,\t\t0.00465650202064998,\t\t2.22, 61.69, 0.004502],\n\t\t[1123,\t\t3,\t\t9.32225252447514e-05,\t\t0.00466112626223757,\t\t2.22, 61.69, 0.004502],\n\t\t[1124,\t\t3,\t\t8.201464578534214e-05,\t\t0.004100732289267108,\t\t2.22, 61.69, 0.004502],\n\t\t[1125,\t\t3,\t\t0.0016436821796102436,\t\t0.08218410898051219,\t\t2.22, 61.69, 0.004502],\n\t\t[1126,\t\t3,\t\t0.0018560581327172175,\t\t0.09280290663586088,\t\t2.22, 61.69, 0.004502],\n\t\t[1127,\t\t2,\t\t0.006703391093283916,\t\t0.3351695546641958,\t\t0, 0, 0],\n\t\t[1128,\t\t3,\t\t0.0001948941120002845,\t\t0.009744705600014225,\t\t2.22, 61.69, 0.004502],\n\t\t[1129,\t\t3,\t\t0.0003016780123772693,\t\t0.015083900618863466,\t\t2.22, 61.69, 0.004502],\n\t\t[1130,\t\t3,\t\t6.530151955301432e-05,\t\t0.003265075977650716,\t\t2.22, 61.69, 0.004502],\n\t\t[1131,\t\t3,\t\t0.00018443373362804407,\t\t0.009221686681402204,\t\t2.22, 61.69, 0.004502],\n\t\t[1132,\t\t3,\t\t2.2886271300209156e-05,\t\t0.0011443135650104578,\t\t2.22, 61.69, 0.004502],\n\t\t[1133,\t\t3,\t\t4.5810964480308454e-05,\t\t0.002290548224015423,\t\t2.22, 61.69, 0.004502],\n\t\t[1134,\t\t3,\t\t3.236913111220881e-05,\t\t0.0016184565556104404,\t\t2.22, 61.69, 0.004502],\n\t\t[1135,\t\t3,\t\t0.0005167964323996007,\t\t0.025839821619980042,\t\t2.22, 61.69, 0.004502],\n\t\t[1136,\t\t3,\t\t2.5636662405410735e-05,\t\t0.0012818331202705368,\t\t2.22, 61.69, 0.004502],\n\t\t[1137,\t\t3,\t\t0.00023357652984116472,\t\t0.011678826492058236,\t\t2.22, 61.69, 0.004502],\n\t\t[1138,\t\t3,\t\t7.98498118498449e-05,\t\t0.003992490592492246,\t\t2.22, 61.69, 0.004502],\n\t\t[1139,\t\t3,\t\t0.0012619566606414858,\t\t0.0630978330320743,\t\t2.22, 61.69, 0.004502],\n\t\t[1140,\t\t3,\t\t0.0018073289497007397,\t\t0.09036644748503699,\t\t2.22, 61.69, 0.004502],\n\t\t[1141,\t\t2,\t\t0.0076053500901520025,\t\t0.38026750450760016,\t\t0, 0, 0],\n\t\t[1142,\t\t3,\t\t7.73959943559724e-05,\t\t0.00386979971779862,\t\t2.22, 61.69, 0.004502],\n\t\t[1143,\t\t3,\t\t0.0016067873237582107,\t\t0.08033936618791054,\t\t2.22, 61.69, 0.004502],\n\t\t[1144,\t\t2,\t\t0.00334399697192306,\t\t0.16719984859615303,\t\t0, 0, 0],\n\t\t[1145,\t\t2,\t\t0.011197481443497569,\t\t0.5598740721748785,\t\t0, 0, 0],\n\t\t[1146,\t\t3,\t\t5.4833151376821656e-05,\t\t0.002741657568841083,\t\t2.22, 61.69, 0.004502],\n\t\t[1147,\t\t3,\t\t0.002909588342312674,\t\t0.14547941711563372,\t\t2.22, 61.69, 0.004502],\n\t\t[1148,\t\t3,\t\t0.0011233492673683868,\t\t0.05616746336841934,\t\t2.22, 61.69, 0.004502],\n\t\t[1149,\t\t3,\t\t0.0005447417794635118,\t\t0.02723708897317559,\t\t2.22, 61.69, 0.004502],\n\t\t[1150,\t\t3,\t\t0.0002306193019977063,\t\t0.011530965099885314,\t\t2.22, 61.69, 0.004502],\n\t\t[1151,\t\t3,\t\t0.0008299047575760064,\t\t0.04149523787880033,\t\t2.22, 61.69, 0.004502],\n\t\t[1152,\t\t3,\t\t7.417749437366368e-06,\t\t0.0003708874718683184,\t\t2.22, 61.69, 0.004502],\n\t\t[1153,\t\t3,\t\t4.37920348658174e-06,\t\t0.000218960174329087,\t\t2.22, 61.69, 0.004502],\n\t\t[1154,\t\t3,\t\t1.0225677287248534e-05,\t\t0.0005112838643624266,\t\t2.22, 61.69, 0.004502],\n\t\t[1155,\t\t3,\t\t3.879887736397654e-05,\t\t0.001939943868198827,\t\t2.22, 61.69, 0.004502],\n\t\t[1156,\t\t3,\t\t0.0010200134924871187,\t\t0.05100067462435595,\t\t2.22, 61.69, 0.004502],\n\t\t[1157,\t\t3,\t\t0.00027719360593007886,\t\t0.013859680296503944,\t\t2.22, 61.69, 0.004502],\n\t\t[1158,\t\t3,\t\t6.640198284893194e-05,\t\t0.003320099142446597,\t\t2.22, 61.69, 0.004502],\n\t\t[1159,\t\t3,\t\t0.0008593149079194712,\t\t0.04296574539597356,\t\t2.22, 61.69, 0.004502],\n\t\t[1160,\t\t2,\t\t0.008907836724524412,\t\t0.44539183622622064,\t\t0, 0, 0],\n\t\t[1161,\t\t3,\t\t0.0010822211821672938,\t\t0.054111059108364695,\t\t2.22, 61.69, 0.004502],\n\t\t[1162,\t\t2,\t\t0.014701466079666985,\t\t0.7350733039833492,\t\t0, 0, 0],\n\t\t[1163,\t\t2,\t\t0.00941167727254808,\t\t0.47058386362740406,\t\t0, 0, 0],\n\t\t[1164,\t\t2,\t\t0.010066845498626674,\t\t0.5033422749313338,\t\t0, 0, 0],\n\t\t[1165,\t\t2,\t\t0.0017416761450193938,\t\t0.0870838072509697,\t\t0, 0, 0],\n\t\t[1166,\t\t2,\t\t0.005301588846150501,\t\t0.26507944230752506,\t\t0, 0, 0],\n\t\t[1167,\t\t3,\t\t0.00032173361521807824,\t\t0.016086680760903912,\t\t2.22, 61.69, 0.004502],\n\t\t[1168,\t\t3,\t\t8.56746647323757e-05,\t\t0.004283733236618785,\t\t2.22, 61.69, 0.004502],\n\t\t[1169,\t\t3,\t\t0.00017327803824915608,\t\t0.008663901912457804,\t\t2.22, 61.69, 0.004502],\n\t\t[1170,\t\t3,\t\t1.6933420442211857e-05,\t\t0.000846671022110593,\t\t2.22, 61.69, 0.004502],\n\t\t[1171,\t\t3,\t\t0.0005748603194505088,\t\t0.02874301597252544,\t\t2.22, 61.69, 0.004502],\n\t\t[1172,\t\t3,\t\t0.0001739469935516696,\t\t0.00869734967758348,\t\t2.22, 61.69, 0.004502],\n\t\t[1173,\t\t2,\t\t0.009201234761268757,\t\t0.4600617380634379,\t\t0, 0, 0],\n\t\t[1174,\t\t3,\t\t8.021928882473966e-05,\t\t0.004010964441236983,\t\t2.22, 61.69, 0.004502],\n\t\t[1175,\t\t3,\t\t5.445989361520192e-05,\t\t0.002722994680760096,\t\t2.22, 61.69, 0.004502],\n\t\t[1176,\t\t3,\t\t1.4783581244732665e-05,\t\t0.0007391790622366333,\t\t2.22, 61.69, 0.004502],\n\t\t[1177,\t\t3,\t\t0.0017745146198091144,\t\t0.08872573099045572,\t\t2.22, 61.69, 0.004502],\n\t\t[1178,\t\t3,\t\t0.00020168108435446162,\t\t0.010084054217723081,\t\t2.22, 61.69, 0.004502],\n\t\t[1179,\t\t3,\t\t8.316119408334767e-05,\t\t0.004158059704167384,\t\t2.22, 61.69, 0.004502],\n\t\t[1180,\t\t3,\t\t4.3834108298364086e-05,\t\t0.002191705414918204,\t\t2.22, 61.69, 0.004502],\n\t\t[1181,\t\t2,\t\t0.00545834972439398,\t\t0.272917486219699,\t\t0, 0, 0],\n\t\t[1182,\t\t2,\t\t0.006322880792722177,\t\t0.3161440396361089,\t\t0, 0, 0],\n\t\t[1183,\t\t3,\t\t0.0024333246840658566,\t\t0.12166623420329284,\t\t2.22, 61.69, 0.004502],\n\t\t[1184,\t\t3,\t\t0.00026859021396164037,\t\t0.013429510698082018,\t\t2.22, 61.69, 0.004502],\n\t\t[1185,\t\t3,\t\t0.0007221796423758263,\t\t0.036108982118791315,\t\t2.22, 61.69, 0.004502],\n\t\t[1186,\t\t3,\t\t0.0024774929167619207,\t\t0.12387464583809603,\t\t2.22, 61.69, 0.004502],\n\t\t[1187,\t\t3,\t\t0.0006248151564821885,\t\t0.031240757824109424,\t\t2.22, 61.69, 0.004502],\n\t\t[1188,\t\t2,\t\t0.011440868435801076,\t\t0.5720434217900537,\t\t0, 0, 0],\n\t\t[1189,\t\t3,\t\t0.0011319723082993086,\t\t0.05659861541496544,\t\t2.22, 61.69, 0.004502],\n\t\t[1190,\t\t2,\t\t0.01403960969000889,\t\t0.7019804845004446,\t\t0, 0, 0],\n\t\t[1191,\t\t2,\t\t0.004652379906159672,\t\t0.23261899530798363,\t\t0, 0, 0],\n\t\t[1192,\t\t3,\t\t0.0013658402687938922,\t\t0.06829201343969461,\t\t2.22, 61.69, 0.004502],\n\t\t[1193,\t\t3,\t\t0.00015278576957249078,\t\t0.007639288478624539,\t\t2.22, 61.69, 0.004502],\n\t\t[1194,\t\t3,\t\t0.0005720688022791215,\t\t0.028603440113956075,\t\t2.22, 61.69, 0.004502],\n\t\t[1195,\t\t3,\t\t1.2882573563174789e-05,\t\t0.0006441286781587394,\t\t2.22, 61.69, 0.004502],\n\t\t[1196,\t\t2,\t\t0.006501292042891124,\t\t0.32506460214455624,\t\t0, 0, 0],\n\t\t[1197,\t\t2,\t\t0.0031353013883174898,\t\t0.15676506941587448,\t\t0, 0, 0],\n\t\t[1198,\t\t3,\t\t0.002534966273924786,\t\t0.12674831369623932,\t\t2.22, 61.69, 0.004502],\n\t\t[1199,\t\t2,\t\t0.012822920004466005,\t\t0.6411460002233003,\t\t0, 0, 0],\n\t\t[1200,\t\t2,\t\t0.0035658606694853635,\t\t0.1782930334742682,\t\t0, 0, 0],\n\t\t[1201,\t\t3,\t\t0.0016021597716395785,\t\t0.08010798858197893,\t\t2.22, 61.69, 0.004502],\n\t\t[1202,\t\t3,\t\t0.0031762475555186724,\t\t0.15881237777593363,\t\t2.22, 61.69, 0.004502],\n\t\t[1203,\t\t2,\t\t0.011626157559117188,\t\t0.5813078779558594,\t\t0, 0, 0],\n\t\t[1204,\t\t3,\t\t0.002606876272170014,\t\t0.13034381360850072,\t\t2.22, 61.69, 0.004502],\n\t\t[1205,\t\t3,\t\t2.5526554827708783e-05,\t\t0.0012763277413854394,\t\t2.22, 61.69, 0.004502],\n\t\t[1206,\t\t3,\t\t0.00024235441128435216,\t\t0.012117720564217609,\t\t2.22, 61.69, 0.004502],\n\t\t[1207,\t\t3,\t\t0.00022762038155293296,\t\t0.011381019077646649,\t\t2.22, 61.69, 0.004502],\n\t\t[1208,\t\t3,\t\t0.0001427321512302434,\t\t0.007136607561512171,\t\t2.22, 61.69, 0.004502],\n\t\t[1209,\t\t3,\t\t3.338372514128048e-05,\t\t0.0016691862570640242,\t\t2.22, 61.69, 0.004502],\n\t\t[1210,\t\t3,\t\t0.00039663770550901024,\t\t0.019831885275450514,\t\t2.22, 61.69, 0.004502],\n\t\t[1211,\t\t3,\t\t0.0011462484513341364,\t\t0.057312422566706815,\t\t2.22, 61.69, 0.004502],\n\t\t[1212,\t\t2,\t\t0.005804182676892941,\t\t0.290209133844647,\t\t0, 0, 0],\n\t\t[1213,\t\t2,\t\t0.0036505499187602444,\t\t0.18252749593801224,\t\t0, 0, 0],\n\t\t[1214,\t\t3,\t\t0.0002868549194435664,\t\t0.014342745972178321,\t\t2.22, 61.69, 0.004502],\n\t\t[1215,\t\t3,\t\t0.00011151452675817047,\t\t0.005575726337908523,\t\t2.22, 61.69, 0.004502],\n\t\t[1216,\t\t2,\t\t0.003045526656080512,\t\t0.1522763328040256,\t\t0, 0, 0],\n\t\t[1217,\t\t3,\t\t0.0017447046918686658,\t\t0.08723523459343328,\t\t2.22, 61.69, 0.004502],\n\t\t[1218,\t\t3,\t\t5.7460445745909864e-05,\t\t0.0028730222872954936,\t\t2.22, 61.69, 0.004502],\n\t\t[1219,\t\t3,\t\t0.0007855588922898729,\t\t0.03927794461449365,\t\t2.22, 61.69, 0.004502],\n\t\t[1220,\t\t3,\t\t0.0019177720486359734,\t\t0.09588860243179867,\t\t2.22, 61.69, 0.004502],\n\t\t[1221,\t\t2,\t\t0.0377662225422596,\t\t1.88831112711298,\t\t0, 0, 0],\n\t\t[1222,\t\t2,\t\t0.013436354905899806,\t\t0.6718177452949904,\t\t0, 0, 0],\n\t\t[1223,\t\t3,\t\t0.00024230393037435297,\t\t0.01211519651871765,\t\t2.22, 61.69, 0.004502],\n\t\t[1224,\t\t2,\t\t0.010219261097938644,\t\t0.5109630548969322,\t\t0, 0, 0],\n\t\t[1225,\t\t3,\t\t0.0011380444607765993,\t\t0.05690222303882997,\t\t2.22, 61.69, 0.004502],\n\t\t[1226,\t\t3,\t\t0.00014415814628845262,\t\t0.007207907314422632,\t\t2.22, 61.69, 0.004502],\n\t\t[1227,\t\t3,\t\t0.0011129900410750567,\t\t0.05564950205375283,\t\t2.22, 61.69, 0.004502],\n\t\t[1228,\t\t3,\t\t0.00014612835633873598,\t\t0.007306417816936799,\t\t2.22, 61.69, 0.004502],\n\t\t[1229,\t\t2,\t\t0.00326230849376,\t\t0.16311542468800003,\t\t0, 0, 0],\n\t\t[1230,\t\t3,\t\t5.7556446428516376e-05,\t\t0.0028778223214258186,\t\t2.22, 61.69, 0.004502],\n\t\t[1231,\t\t3,\t\t0.0013725412263470476,\t\t0.06862706131735238,\t\t2.22, 61.69, 0.004502],\n\t\t[1232,\t\t2,\t\t0.003132134151850627,\t\t0.15660670759253137,\t\t0, 0, 0],\n\t\t[1233,\t\t2,\t\t0.03662908231521014,\t\t1.831454115760507,\t\t0, 0, 0],\n\t\t[1235,\t\t3,\t\t0.0005753349157073776,\t\t0.028766745785368877,\t\t2.22, 61.69, 0.004502],\n\t\t[1236,\t\t2,\t\t0.005234608320670995,\t\t0.26173041603354974,\t\t0, 0, 0],\n\t\t[1237,\t\t3,\t\t0.0009298092078685558,\t\t0.04649046039342779,\t\t2.22, 61.69, 0.004502],\n\t\t[1238,\t\t2,\t\t0.010558404215023522,\t\t0.5279202107511761,\t\t0, 0, 0],\n\t\t[1239,\t\t3,\t\t0.0001443666373276477,\t\t0.007218331866382386,\t\t2.22, 61.69, 0.004502],\n\t\t[1240,\t\t2,\t\t0.013028548888252384,\t\t0.6514274444126192,\t\t0, 0, 0],\n\t\t[1241,\t\t2,\t\t0.024532881090784327,\t\t1.2266440545392163,\t\t0, 0, 0],\n\t\t[1242,\t\t3,\t\t0.001226848646080533,\t\t0.061342432304026646,\t\t2.22, 61.69, 0.004502],\n\t\t[1243,\t\t2,\t\t0.004813953307010397,\t\t0.24069766535051987,\t\t0, 0, 0],\n\t\t[1244,\t\t2,\t\t0.020592901244747865,\t\t1.0296450622373932,\t\t0, 0, 0],\n\t\t[1245,\t\t3,\t\t0.00032171860701923997,\t\t0.016085930350962,\t\t2.22, 61.69, 0.004502],\n\t\t[1246,\t\t2,\t\t0.003636870278584459,\t\t0.18184351392922293,\t\t0, 0, 0],\n\t\t[1247,\t\t3,\t\t0.0013899571448864774,\t\t0.06949785724432388,\t\t2.22, 61.69, 0.004502],\n\t\t[1248,\t\t2,\t\t0.005854245631350222,\t\t0.2927122815675111,\t\t0, 0, 0],\n\t\t[1249,\t\t2,\t\t0.004846915908139961,\t\t0.24234579540699805,\t\t0, 0, 0],\n\t\t[1250,\t\t3,\t\t0.0019627317861894665,\t\t0.09813658930947333,\t\t2.22, 61.69, 0.004502],\n\t\t[1251,\t\t3,\t\t0.0014899668826355728,\t\t0.07449834413177864,\t\t2.22, 61.69, 0.004502],\n\t\t[1252,\t\t3,\t\t0.0009477821555247328,\t\t0.047389107776236644,\t\t2.22, 61.69, 0.004502],\n\t\t[1253,\t\t2,\t\t0.0037152671425535423,\t\t0.1857633571276771,\t\t0, 0, 0],\n\t\t[1254,\t\t2,\t\t0.005238024431161238,\t\t0.2619012215580619,\t\t0, 0, 0],\n\t\t[1255,\t\t3,\t\t0.0002430881191708174,\t\t0.01215440595854087,\t\t2.22, 61.69, 0.004502],\n\t\t[1256,\t\t3,\t\t0.0009607764830526361,\t\t0.048038824152631804,\t\t2.22, 61.69, 0.004502],\n\t\t[1257,\t\t2,\t\t0.005662916214121937,\t\t0.28314581070609685,\t\t0, 0, 0],\n\t\t[1258,\t\t2,\t\t0.014991588973313675,\t\t0.7495794486656838,\t\t0, 0, 0],\n\t\t[1259,\t\t2,\t\t0.00695753592752513,\t\t0.34787679637625657,\t\t0, 0, 0],\n\t\t[1260,\t\t3,\t\t0.0008105375709406903,\t\t0.04052687854703452,\t\t2.22, 61.69, 0.004502],\n\t\t[1261,\t\t2,\t\t0.008551030064555017,\t\t0.4275515032277509,\t\t0, 0, 0],\n\t\t[1262,\t\t3,\t\t3.3365758929065435e-05,\t\t0.0016682879464532717,\t\t2.22, 61.69, 0.004502],\n\t\t[1263,\t\t3,\t\t2.243579925674327e-05,\t\t0.0011217899628371635,\t\t2.22, 61.69, 0.004502],\n\t\t[1264,\t\t2,\t\t0.005222533303161435,\t\t0.2611266651580718,\t\t0, 0, 0],\n\t\t[1265,\t\t3,\t\t0.0004236530619172327,\t\t0.021182653095861634,\t\t2.22, 61.69, 0.004502],\n\t\t[1266,\t\t2,\t\t0.007621029313600565,\t\t0.38105146568002835,\t\t0, 0, 0],\n\t\t[1267,\t\t3,\t\t0.002512674942558201,\t\t0.12563374712791006,\t\t2.22, 61.69, 0.004502],\n\t\t[1268,\t\t3,\t\t0.00011633352421170827,\t\t0.0058166762105854135,\t\t2.22, 61.69, 0.004502],\n\t\t[1269,\t\t3,\t\t0.0001565105454922114,\t\t0.007825527274610571,\t\t2.22, 61.69, 0.004502],\n\t\t[1270,\t\t3,\t\t0.0018205880939742604,\t\t0.09102940469871301,\t\t2.22, 61.69, 0.004502],\n\t\t[1271,\t\t3,\t\t0.0029941519367685425,\t\t0.14970759683842713,\t\t2.22, 61.69, 0.004502],\n\t\t[1272,\t\t3,\t\t6.857643720268356e-05,\t\t0.0034288218601341786,\t\t2.22, 61.69, 0.004502],\n\t\t[1273,\t\t3,\t\t0.00013809561181086458,\t\t0.006904780590543229,\t\t2.22, 61.69, 0.004502],\n\t\t[1274,\t\t2,\t\t0.0033801727100761705,\t\t0.1690086355038085,\t\t0, 0, 0],\n\t\t[1275,\t\t2,\t\t0.006307329492962109,\t\t0.3153664746481055,\t\t0, 0, 0],\n\t\t[1276,\t\t3,\t\t0.001633288835647369,\t\t0.08166444178236844,\t\t2.22, 61.69, 0.004502],\n\t\t[1277,\t\t2,\t\t0.004176942042758357,\t\t0.20884710213791788,\t\t0, 0, 0],\n\t\t[1278,\t\t2,\t\t0.010850406134369231,\t\t0.5425203067184615,\t\t0, 0, 0],\n\t\t[1279,\t\t3,\t\t1.1414295345720978e-07,\t\t5.707147672860489e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1280,\t\t3,\t\t1.6315991794278248e-05,\t\t0.0008157995897139123,\t\t2.22, 61.69, 0.004502],\n\t\t[1281,\t\t3,\t\t6.370925653689495e-05,\t\t0.0031854628268447476,\t\t2.22, 61.69, 0.004502],\n\t\t[1282,\t\t3,\t\t0.0001457022861234255,\t\t0.007285114306171277,\t\t2.22, 61.69, 0.004502],\n\t\t[1283,\t\t2,\t\t0.08261824948992594,\t\t4.130912474496298,\t\t0, 0, 0],\n\t\t[1284,\t\t3,\t\t0.0012199625614761351,\t\t0.06099812807380676,\t\t2.22, 61.69, 0.004502],\n\t\t[1285,\t\t3,\t\t7.45238092387856e-05,\t\t0.0037261904619392797,\t\t2.22, 61.69, 0.004502],\n\t\t[1286,\t\t3,\t\t0.0011377796471657795,\t\t0.05688898235828898,\t\t2.22, 61.69, 0.004502],\n\t\t[1287,\t\t2,\t\t0.005933272587501368,\t\t0.29666362937506835,\t\t0, 0, 0],\n\t\t[1288,\t\t2,\t\t0.00944760882155904,\t\t0.472380441077952,\t\t0, 0, 0],\n\t\t[1289,\t\t2,\t\t0.00662614067902875,\t\t0.3313070339514375,\t\t0, 0, 0],\n\t\t[1290,\t\t3,\t\t0.0002355657613760111,\t\t0.011778288068800557,\t\t2.22, 61.69, 0.004502],\n\t\t[1291,\t\t2,\t\t0.0062575490505418305,\t\t0.31287745252709154,\t\t0, 0, 0],\n\t\t[1292,\t\t3,\t\t0.002653563231501149,\t\t0.13267816157505744,\t\t2.22, 61.69, 0.004502],\n\t\t[1293,\t\t3,\t\t7.257078378895112e-05,\t\t0.003628539189447556,\t\t2.22, 61.69, 0.004502],\n\t\t[1294,\t\t3,\t\t0.00016427947047883902,\t\t0.008213973523941952,\t\t2.22, 61.69, 0.004502],\n\t\t[1295,\t\t3,\t\t0.00017762178660707758,\t\t0.00888108933035388,\t\t2.22, 61.69, 0.004502],\n\t\t[1296,\t\t3,\t\t0.0008568882170174592,\t\t0.04284441085087297,\t\t2.22, 61.69, 0.004502],\n\t\t[1297,\t\t2,\t\t0.00604494531390442,\t\t0.30224726569522103,\t\t0, 0, 0],\n\t\t[1298,\t\t3,\t\t0.0001029033207011301,\t\t0.005145166035056506,\t\t2.22, 61.69, 0.004502],\n\t\t[1299,\t\t3,\t\t5.5288554460052276e-05,\t\t0.0027644277230026142,\t\t2.22, 61.69, 0.004502],\n\t\t[1300,\t\t3,\t\t0.001511593201166196,\t\t0.07557966005830981,\t\t2.22, 61.69, 0.004502],\n\t\t[1301,\t\t2,\t\t0.0038746782543149596,\t\t0.193733912715748,\t\t0, 0, 0],\n\t\t[1302,\t\t3,\t\t0.0003104985267932093,\t\t0.015524926339660468,\t\t2.22, 61.69, 0.004502],\n\t\t[1303,\t\t3,\t\t0.00027600750632746427,\t\t0.013800375316373212,\t\t2.22, 61.69, 0.004502],\n\t\t[1304,\t\t3,\t\t0.000610793340517708,\t\t0.030539667025885397,\t\t2.22, 61.69, 0.004502],\n\t\t[1305,\t\t3,\t\t2.9075695387122924e-07,\t\t1.4537847693561463e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1306,\t\t3,\t\t0.00011631130798083146,\t\t0.005815565399041573,\t\t2.22, 61.69, 0.004502],\n\t\t[1307,\t\t3,\t\t1.9031130574577255e-05,\t\t0.0009515565287288628,\t\t2.22, 61.69, 0.004502],\n\t\t[1308,\t\t3,\t\t9.9135649111658e-05,\t\t0.0049567824555829,\t\t2.22, 61.69, 0.004502],\n\t\t[1309,\t\t3,\t\t0.0002132096944766602,\t\t0.01066048472383301,\t\t2.22, 61.69, 0.004502],\n\t\t[1310,\t\t3,\t\t0.00010478060392325507,\t\t0.005239030196162754,\t\t2.22, 61.69, 0.004502],\n\t\t[1311,\t\t3,\t\t0.0006543678039112993,\t\t0.03271839019556497,\t\t2.22, 61.69, 0.004502],\n\t\t[1312,\t\t2,\t\t0.016696303623916272,\t\t0.8348151811958137,\t\t0, 0, 0],\n\t\t[1313,\t\t3,\t\t0.0019631283227609974,\t\t0.09815641613804986,\t\t2.22, 61.69, 0.004502],\n\t\t[1314,\t\t3,\t\t0.0007641975650906521,\t\t0.038209878254532606,\t\t2.22, 61.69, 0.004502],\n\t\t[1315,\t\t3,\t\t0.0005015944131679134,\t\t0.02507972065839567,\t\t2.22, 61.69, 0.004502],\n\t\t[1316,\t\t3,\t\t7.629150231212524e-05,\t\t0.0038145751156062617,\t\t2.22, 61.69, 0.004502],\n\t\t[1317,\t\t3,\t\t0.0015252502049763412,\t\t0.07626251024881707,\t\t2.22, 61.69, 0.004502],\n\t\t[1318,\t\t3,\t\t0.00012454395408676328,\t\t0.0062271977043381645,\t\t2.22, 61.69, 0.004502],\n\t\t[1319,\t\t3,\t\t0.001127343871228203,\t\t0.05636719356141015,\t\t2.22, 61.69, 0.004502],\n\t\t[1320,\t\t3,\t\t0.0013215329138219017,\t\t0.06607664569109509,\t\t2.22, 61.69, 0.004502],\n\t\t[1321,\t\t3,\t\t9.318652973073723e-06,\t\t0.00046593264865368617,\t\t2.22, 61.69, 0.004502],\n\t\t[1322,\t\t3,\t\t5.919056262068799e-05,\t\t0.0029595281310344,\t\t2.22, 61.69, 0.004502],\n\t\t[1323,\t\t2,\t\t0.012548332357131292,\t\t0.6274166178565647,\t\t0, 0, 0],\n\t\t[1324,\t\t3,\t\t0.0008316328586631403,\t\t0.04158164293315702,\t\t2.22, 61.69, 0.004502],\n\t\t[1325,\t\t2,\t\t0.0030722301205568813,\t\t0.15361150602784407,\t\t0, 0, 0],\n\t\t[1326,\t\t2,\t\t0.0036242041289439157,\t\t0.1812102064471958,\t\t0, 0, 0],\n\t\t[1327,\t\t2,\t\t0.0032338308031027566,\t\t0.16169154015513784,\t\t0, 0, 0],\n\t\t[1328,\t\t3,\t\t0.0010226241895011407,\t\t0.05113120947505704,\t\t2.22, 61.69, 0.004502],\n\t\t[1329,\t\t2,\t\t0.013921309839652627,\t\t0.6960654919826315,\t\t0, 0, 0],\n\t\t[1330,\t\t3,\t\t0.0017557263243104528,\t\t0.08778631621552264,\t\t2.22, 61.69, 0.004502],\n\t\t[1331,\t\t3,\t\t1.841349064624893e-05,\t\t0.0009206745323124464,\t\t2.22, 61.69, 0.004502],\n\t\t[1332,\t\t3,\t\t0.0016738699394560756,\t\t0.08369349697280379,\t\t2.22, 61.69, 0.004502],\n\t\t[1333,\t\t3,\t\t0.0029061854047842247,\t\t0.14530927023921122,\t\t2.22, 61.69, 0.004502],\n\t\t[1334,\t\t3,\t\t3.534379925800438e-05,\t\t0.0017671899629002194,\t\t2.22, 61.69, 0.004502],\n\t\t[1335,\t\t3,\t\t0.0001685787115929389,\t\t0.008428935579646947,\t\t2.22, 61.69, 0.004502],\n\t\t[1336,\t\t3,\t\t0.0017411252799983399,\t\t0.08705626399991699,\t\t2.22, 61.69, 0.004502],\n\t\t[1337,\t\t2,\t\t0.007722987880773172,\t\t0.3861493940386586,\t\t0, 0, 0],\n\t\t[1338,\t\t3,\t\t5.300015004820578e-05,\t\t0.0026500075024102894,\t\t2.22, 61.69, 0.004502],\n\t\t[1339,\t\t3,\t\t0.0006421253879349708,\t\t0.032106269396748544,\t\t2.22, 61.69, 0.004502],\n\t\t[1340,\t\t2,\t\t0.004462598113304154,\t\t0.22312990566520774,\t\t0, 0, 0],\n\t\t[1341,\t\t2,\t\t0.013083384367936227,\t\t0.6541692183968114,\t\t0, 0, 0],\n\t\t[1342,\t\t3,\t\t2.5492703609696483e-05,\t\t0.0012746351804848242,\t\t2.22, 61.69, 0.004502],\n\t\t[1343,\t\t3,\t\t2.9547582792187946e-05,\t\t0.0014773791396093974,\t\t2.22, 61.69, 0.004502],\n\t\t[1344,\t\t3,\t\t1.4391232894862565e-05,\t\t0.0007195616447431282,\t\t2.22, 61.69, 0.004502],\n\t\t[1345,\t\t3,\t\t0.00022284221535288394,\t\t0.011142110767644198,\t\t2.22, 61.69, 0.004502],\n\t\t[1346,\t\t2,\t\t0.013395509550816748,\t\t0.6697754775408374,\t\t0, 0, 0],\n\t\t[1347,\t\t2,\t\t0.023031020216814052,\t\t1.151551010840703,\t\t0, 0, 0],\n\t\t[1348,\t\t3,\t\t0.0014456315404578254,\t\t0.07228157702289127,\t\t2.22, 61.69, 0.004502],\n\t\t[1349,\t\t3,\t\t0.0026962338610516797,\t\t0.13481169305258398,\t\t2.22, 61.69, 0.004502],\n\t\t[1350,\t\t3,\t\t6.046050411995944e-06,\t\t0.0003023025205997972,\t\t2.22, 61.69, 0.004502],\n\t\t[1352,\t\t3,\t\t2.125607314436762e-05,\t\t0.001062803657218381,\t\t2.22, 61.69, 0.004502],\n\t\t[1355,\t\t3,\t\t0.0001074820707981226,\t\t0.005374103539906131,\t\t2.22, 61.69, 0.004502],\n\t\t[1356,\t\t2,\t\t0.004309496078670602,\t\t0.21547480393353008,\t\t0, 0, 0],\n\t\t[1357,\t\t2,\t\t0.0034388604660820295,\t\t0.1719430233041015,\t\t0, 0, 0],\n\t\t[1358,\t\t3,\t\t1.57431431082847e-05,\t\t0.0007871571554142351,\t\t2.22, 61.69, 0.004502],\n\t\t[1359,\t\t2,\t\t0.004496673943395517,\t\t0.22483369716977586,\t\t0, 0, 0],\n\t\t[1360,\t\t3,\t\t0.0010909105792324338,\t\t0.054545528961621695,\t\t2.22, 61.69, 0.004502],\n\t\t[1361,\t\t2,\t\t0.0040238936307783425,\t\t0.20119468153891715,\t\t0, 0, 0],\n\t\t[1362,\t\t2,\t\t0.005036121783141224,\t\t0.2518060891570612,\t\t0, 0, 0],\n\t\t[1363,\t\t3,\t\t2.0542105533339307e-06,\t\t0.00010271052766669654,\t\t2.22, 61.69, 0.004502],\n\t\t[1364,\t\t3,\t\t2.9656869999673927e-06,\t\t0.00014828434999836963,\t\t2.22, 61.69, 0.004502],\n\t\t[1365,\t\t3,\t\t2.604667538532545e-08,\t\t1.3023337692662726e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1366,\t\t3,\t\t3.732368726720164e-05,\t\t0.001866184363360082,\t\t2.22, 61.69, 0.004502],\n\t\t[1367,\t\t3,\t\t0.0027924620350495274,\t\t0.13962310175247636,\t\t2.22, 61.69, 0.004502],\n\t\t[1368,\t\t3,\t\t0.00011173510425788287,\t\t0.005586755212894144,\t\t2.22, 61.69, 0.004502],\n\t\t[1369,\t\t3,\t\t0.0005073133310147165,\t\t0.025365666550735824,\t\t2.22, 61.69, 0.004502],\n\t\t[1370,\t\t3,\t\t2.185563890765493e-05,\t\t0.0010927819453827466,\t\t2.22, 61.69, 0.004502],\n\t\t[1371,\t\t2,\t\t0.005205462164069383,\t\t0.26027310820346916,\t\t0, 0, 0],\n\t\t[1372,\t\t2,\t\t0.010885637631143507,\t\t0.5442818815571754,\t\t0, 0, 0],\n\t\t[1373,\t\t3,\t\t0.0018484942696640313,\t\t0.09242471348320157,\t\t2.22, 61.69, 0.004502],\n\t\t[1374,\t\t2,\t\t0.006889508467327262,\t\t0.3444754233663631,\t\t0, 0, 0],\n\t\t[1375,\t\t2,\t\t0.003897629175102736,\t\t0.1948814587551368,\t\t0, 0, 0],\n\t\t[1376,\t\t2,\t\t0.011218109707548912,\t\t0.5609054853774457,\t\t0, 0, 0],\n\t\t[1377,\t\t2,\t\t0.00931894272705397,\t\t0.4659471363526985,\t\t0, 0, 0],\n\t\t[1378,\t\t2,\t\t0.00863123723331097,\t\t0.43156186166554844,\t\t0, 0, 0],\n\t\t[1379,\t\t3,\t\t5.1310566028095876e-05,\t\t0.002565528301404794,\t\t2.22, 61.69, 0.004502],\n\t\t[1380,\t\t3,\t\t7.724465320438908e-05,\t\t0.003862232660219454,\t\t2.22, 61.69, 0.004502],\n\t\t[1381,\t\t3,\t\t6.446222679588771e-05,\t\t0.003223111339794386,\t\t2.22, 61.69, 0.004502],\n\t\t[1382,\t\t2,\t\t0.008838822964419164,\t\t0.4419411482209583,\t\t0, 0, 0],\n\t\t[1383,\t\t2,\t\t0.006991449967869686,\t\t0.34957249839348425,\t\t0, 0, 0],\n\t\t[1384,\t\t3,\t\t0.0002972463393517766,\t\t0.014862316967588829,\t\t2.22, 61.69, 0.004502],\n\t\t[1385,\t\t3,\t\t7.92302201959824e-06,\t\t0.0003961511009799121,\t\t2.22, 61.69, 0.004502],\n\t\t[1386,\t\t3,\t\t4.2899112828393286e-05,\t\t0.002144955641419664,\t\t2.22, 61.69, 0.004502],\n\t\t[1387,\t\t3,\t\t0.00022240699424911273,\t\t0.011120349712455638,\t\t2.22, 61.69, 0.004502],\n\t\t[1388,\t\t3,\t\t5.909025672850305e-05,\t\t0.0029545128364251525,\t\t2.22, 61.69, 0.004502],\n\t\t[1389,\t\t3,\t\t1.3594135764164036e-05,\t\t0.0006797067882082019,\t\t2.22, 61.69, 0.004502],\n\t\t[1390,\t\t3,\t\t0.00023763846235409512,\t\t0.011881923117704758,\t\t2.22, 61.69, 0.004502],\n\t\t[1391,\t\t3,\t\t3.321367742134543e-05,\t\t0.0016606838710672715,\t\t2.22, 61.69, 0.004502],\n\t\t[1392,\t\t3,\t\t0.0012290826914265437,\t\t0.06145413457132718,\t\t2.22, 61.69, 0.004502],\n\t\t[1393,\t\t3,\t\t8.763130962106806e-05,\t\t0.004381565481053403,\t\t2.22, 61.69, 0.004502],\n\t\t[1394,\t\t3,\t\t6.862035771367977e-05,\t\t0.003431017885683988,\t\t2.22, 61.69, 0.004502],\n\t\t[1395,\t\t3,\t\t4.696755105006889e-06,\t\t0.00023483775525034447,\t\t2.22, 61.69, 0.004502],\n\t\t[1396,\t\t3,\t\t1.6623117797696163e-06,\t\t8.311558898848081e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1397,\t\t3,\t\t0.0015969317375463513,\t\t0.07984658687731756,\t\t2.22, 61.69, 0.004502],\n\t\t[1398,\t\t3,\t\t0.00017695743260373348,\t\t0.008847871630186674,\t\t2.22, 61.69, 0.004502],\n\t\t[1399,\t\t3,\t\t0.0011375222056992432,\t\t0.05687611028496216,\t\t2.22, 61.69, 0.004502],\n\t\t[1400,\t\t3,\t\t8.258214886247176e-05,\t\t0.004129107443123589,\t\t2.22, 61.69, 0.004502],\n\t\t[1401,\t\t2,\t\t0.005687529053514607,\t\t0.28437645267573036,\t\t0, 0, 0],\n\t\t[1402,\t\t3,\t\t0.001676149990745289,\t\t0.08380749953726446,\t\t2.22, 61.69, 0.004502],\n\t\t[1403,\t\t2,\t\t0.007617262031172502,\t\t0.38086310155862513,\t\t0, 0, 0],\n\t\t[1404,\t\t2,\t\t0.008581667499251882,\t\t0.42908337496259413,\t\t0, 0, 0],\n\t\t[1405,\t\t3,\t\t0.0018812625008740895,\t\t0.09406312504370447,\t\t2.22, 61.69, 0.004502],\n\t\t[1406,\t\t3,\t\t0.0006852566793279422,\t\t0.03426283396639711,\t\t2.22, 61.69, 0.004502],\n\t\t[1407,\t\t3,\t\t1.3471796788943673e-05,\t\t0.0006735898394471837,\t\t2.22, 61.69, 0.004502],\n\t\t[1408,\t\t3,\t\t0.002615151153581973,\t\t0.13075755767909866,\t\t2.22, 61.69, 0.004502],\n\t\t[1409,\t\t3,\t\t0.0006526669786320644,\t\t0.032633348931603225,\t\t2.22, 61.69, 0.004502],\n\t\t[1410,\t\t3,\t\t0.002029412159381007,\t\t0.10147060796905036,\t\t2.22, 61.69, 0.004502],\n\t\t[1411,\t\t3,\t\t0.0025079869254713357,\t\t0.1253993462735668,\t\t2.22, 61.69, 0.004502],\n\t\t[1412,\t\t3,\t\t0.00022798442300424305,\t\t0.011399221150212153,\t\t2.22, 61.69, 0.004502],\n\t\t[1413,\t\t3,\t\t0.00020298638958725937,\t\t0.01014931947936297,\t\t2.22, 61.69, 0.004502],\n\t\t[1414,\t\t3,\t\t0.000925332566672142,\t\t0.046266628333607104,\t\t2.22, 61.69, 0.004502],\n\t\t[1415,\t\t3,\t\t0.00025734845281210695,\t\t0.012867422640605348,\t\t2.22, 61.69, 0.004502],\n\t\t[1416,\t\t3,\t\t0.0002670681053003713,\t\t0.013353405265018567,\t\t2.22, 61.69, 0.004502],\n\t\t[1417,\t\t3,\t\t8.343951196997786e-08,\t\t4.171975598498892e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1418,\t\t2,\t\t0.005140590016552821,\t\t0.2570295008276411,\t\t0, 0, 0],\n\t\t[1419,\t\t3,\t\t0.0016098297953353559,\t\t0.08049148976676779,\t\t2.22, 61.69, 0.004502],\n\t\t[1420,\t\t3,\t\t8.91112970779674e-05,\t\t0.00445556485389837,\t\t2.22, 61.69, 0.004502],\n\t\t[1421,\t\t3,\t\t0.00040947003164086717,\t\t0.02047350158204336,\t\t2.22, 61.69, 0.004502],\n\t\t[1422,\t\t3,\t\t0.0002810343588345646,\t\t0.014051717941728228,\t\t2.22, 61.69, 0.004502],\n\t\t[1423,\t\t3,\t\t0.00011452210640321215,\t\t0.0057261053201606085,\t\t2.22, 61.69, 0.004502],\n\t\t[1424,\t\t2,\t\t0.01394783725195249,\t\t0.6973918625976245,\t\t0, 0, 0],\n\t\t[1425,\t\t3,\t\t0.0013602274146640447,\t\t0.06801137073320224,\t\t2.22, 61.69, 0.004502],\n\t\t[1426,\t\t2,\t\t0.004377563184547638,\t\t0.2188781592273819,\t\t0, 0, 0],\n\t\t[1427,\t\t2,\t\t0.01492316500124391,\t\t0.7461582500621956,\t\t0, 0, 0],\n\t\t[1428,\t\t2,\t\t0.009895557445284836,\t\t0.49477787226424186,\t\t0, 0, 0],\n\t\t[1429,\t\t3,\t\t0.00037837920966970665,\t\t0.01891896048348533,\t\t2.22, 61.69, 0.004502],\n\t\t[1430,\t\t3,\t\t8.962714204016262e-07,\t\t4.481357102008131e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1431,\t\t2,\t\t0.010428076802064071,\t\t0.5214038401032035,\t\t0, 0, 0],\n\t\t[1432,\t\t3,\t\t0.0007529432834088135,\t\t0.03764716417044068,\t\t2.22, 61.69, 0.004502],\n\t\t[1433,\t\t2,\t\t0.08207564315805406,\t\t4.103782157902703,\t\t0, 0, 0],\n\t\t[1434,\t\t2,\t\t0.0057399583024114464,\t\t0.2869979151205723,\t\t0, 0, 0],\n\t\t[1435,\t\t2,\t\t0.005520334862536408,\t\t0.2760167431268204,\t\t0, 0, 0],\n\t\t[1436,\t\t2,\t\t0.006266510483771511,\t\t0.31332552418857557,\t\t0, 0, 0],\n\t\t[1437,\t\t2,\t\t0.011337481029181844,\t\t0.5668740514590922,\t\t0, 0, 0],\n\t\t[1438,\t\t2,\t\t0.014062363076737744,\t\t0.7031181538368872,\t\t0, 0, 0],\n\t\t[1439,\t\t2,\t\t0.0036576726096125903,\t\t0.18288363048062953,\t\t0, 0, 0],\n\t\t[1440,\t\t3,\t\t2.5447076563209588e-05,\t\t0.0012723538281604794,\t\t2.22, 61.69, 0.004502],\n\t\t[1441,\t\t3,\t\t1.0923020560921105e-05,\t\t0.0005461510280460552,\t\t2.22, 61.69, 0.004502],\n\t\t[1442,\t\t3,\t\t4.555157486056611e-05,\t\t0.0022775787430283057,\t\t2.22, 61.69, 0.004502],\n\t\t[1443,\t\t2,\t\t0.006557506818224797,\t\t0.3278753409112398,\t\t0, 0, 0],\n\t\t[1444,\t\t3,\t\t0.0005717925297728792,\t\t0.028589626488643962,\t\t2.22, 61.69, 0.004502],\n\t\t[1445,\t\t3,\t\t0.0015938921576921367,\t\t0.07969460788460683,\t\t2.22, 61.69, 0.004502],\n\t\t[1446,\t\t2,\t\t0.03816689495267523,\t\t1.9083447476337616,\t\t0, 0, 0],\n\t\t[1447,\t\t2,\t\t0.005696308888305882,\t\t0.2848154444152941,\t\t0, 0, 0],\n\t\t[1448,\t\t3,\t\t0.00047896583949883246,\t\t0.023948291974941624,\t\t2.22, 61.69, 0.004502],\n\t\t[1449,\t\t2,\t\t0.006075750962706547,\t\t0.3037875481353274,\t\t0, 0, 0],\n\t\t[1450,\t\t2,\t\t0.0037724056227270084,\t\t0.18862028113635043,\t\t0, 0, 0],\n\t\t[1451,\t\t2,\t\t0.0043416728967246255,\t\t0.21708364483623127,\t\t0, 0, 0],\n\t\t[1452,\t\t3,\t\t0.0015322750739690742,\t\t0.0766137536984537,\t\t2.22, 61.69, 0.004502],\n\t\t[1453,\t\t2,\t\t0.004134065549943135,\t\t0.20670327749715672,\t\t0, 0, 0],\n\t\t[1454,\t\t2,\t\t0.009875666531734596,\t\t0.49378332658672985,\t\t0, 0, 0],\n\t\t[1455,\t\t3,\t\t4.166284213856912e-05,\t\t0.0020831421069284557,\t\t2.22, 61.69, 0.004502],\n\t\t[1456,\t\t2,\t\t0.0031865889687578697,\t\t0.15932944843789354,\t\t0, 0, 0],\n\t\t[1457,\t\t3,\t\t0.00012749408723576006,\t\t0.006374704361788003,\t\t2.22, 61.69, 0.004502],\n\t\t[1458,\t\t3,\t\t1.5673534819523866e-05,\t\t0.0007836767409761935,\t\t2.22, 61.69, 0.004502],\n\t\t[1459,\t\t3,\t\t0.00033798517072819835,\t\t0.01689925853640992,\t\t2.22, 61.69, 0.004502],\n\t\t[1460,\t\t2,\t\t0.004379299441818276,\t\t0.21896497209091384,\t\t0, 0, 0],\n\t\t[1461,\t\t3,\t\t0.001142843079861875,\t\t0.05714215399309376,\t\t2.22, 61.69, 0.004502],\n\t\t[1462,\t\t3,\t\t0.00015295973435731913,\t\t0.007647986717865956,\t\t2.22, 61.69, 0.004502],\n\t\t[1463,\t\t3,\t\t4.5276834778775515e-05,\t\t0.002263841738938776,\t\t2.22, 61.69, 0.004502],\n\t\t[1464,\t\t2,\t\t0.013934601684842136,\t\t0.6967300842421068,\t\t0, 0, 0],\n\t\t[1465,\t\t3,\t\t0.0003374045759652472,\t\t0.01687022879826236,\t\t2.22, 61.69, 0.004502],\n\t\t[1466,\t\t3,\t\t0.0003619193984034768,\t\t0.01809596992017384,\t\t2.22, 61.69, 0.004502],\n\t\t[1467,\t\t3,\t\t0.00013344536897072216,\t\t0.006672268448536108,\t\t2.22, 61.69, 0.004502],\n\t\t[1468,\t\t3,\t\t0.0015144656821575462,\t\t0.0757232841078773,\t\t2.22, 61.69, 0.004502],\n\t\t[1469,\t\t2,\t\t0.004138503876498319,\t\t0.20692519382491598,\t\t0, 0, 0],\n\t\t[1470,\t\t2,\t\t0.005027084884666319,\t\t0.2513542442333159,\t\t0, 0, 0],\n\t\t[1471,\t\t2,\t\t0.010132763321185349,\t\t0.5066381660592674,\t\t0, 0, 0],\n\t\t[1472,\t\t3,\t\t0.0007626820845032627,\t\t0.03813410422516314,\t\t2.22, 61.69, 0.004502],\n\t\t[1473,\t\t3,\t\t0.0005323801851315335,\t\t0.026619009256576683,\t\t2.22, 61.69, 0.004502],\n\t\t[1474,\t\t3,\t\t8.905977123682595e-05,\t\t0.004452988561841298,\t\t2.22, 61.69, 0.004502],\n\t\t[1475,\t\t3,\t\t2.4884191103347185e-05,\t\t0.0012442095551673594,\t\t2.22, 61.69, 0.004502],\n\t\t[1476,\t\t2,\t\t0.015946059282369706,\t\t0.7973029641184852,\t\t0, 0, 0],\n\t\t[1477,\t\t3,\t\t0.0007717725169969112,\t\t0.03858862584984556,\t\t2.22, 61.69, 0.004502],\n\t\t[1479,\t\t3,\t\t0.00035603636123413484,\t\t0.01780181806170674,\t\t2.22, 61.69, 0.004502],\n\t\t[1480,\t\t3,\t\t0.0011893307912248102,\t\t0.05946653956124052,\t\t2.22, 61.69, 0.004502],\n\t\t[1481,\t\t3,\t\t3.3833873695351113e-06,\t\t0.00016916936847675558,\t\t2.22, 61.69, 0.004502],\n\t\t[1482,\t\t3,\t\t0.0008049713570614613,\t\t0.04024856785307307,\t\t2.22, 61.69, 0.004502],\n\t\t[1483,\t\t3,\t\t0.0002291607516312977,\t\t0.011458037581564884,\t\t2.22, 61.69, 0.004502],\n\t\t[1484,\t\t3,\t\t1.9041073525508303e-06,\t\t9.520536762754152e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1485,\t\t3,\t\t3.5876538426778735e-05,\t\t0.0017938269213389369,\t\t2.22, 61.69, 0.004502],\n\t\t[1486,\t\t3,\t\t0.00018457774197472868,\t\t0.009228887098736434,\t\t2.22, 61.69, 0.004502],\n\t\t[1487,\t\t3,\t\t7.276038526853737e-05,\t\t0.0036380192634268686,\t\t2.22, 61.69, 0.004502],\n\t\t[1488,\t\t3,\t\t0.00016899705763430794,\t\t0.008449852881715398,\t\t2.22, 61.69, 0.004502],\n\t\t[1489,\t\t3,\t\t7.571817467557017e-06,\t\t0.00037859087337785094,\t\t2.22, 61.69, 0.004502],\n\t\t[1490,\t\t2,\t\t0.04981318633597547,\t\t2.4906593167987734,\t\t0, 0, 0],\n\t\t[1491,\t\t2,\t\t0.005387257187745477,\t\t0.26936285938727383,\t\t0, 0, 0],\n\t\t[1492,\t\t2,\t\t0.014637639488319377,\t\t0.7318819744159688,\t\t0, 0, 0],\n\t\t[1493,\t\t2,\t\t0.005319414988695112,\t\t0.26597074943475557,\t\t0, 0, 0],\n\t\t[1494,\t\t2,\t\t0.01688797189278994,\t\t0.8443985946394971,\t\t0, 0, 0],\n\t\t[1495,\t\t2,\t\t0.002740014598409114,\t\t0.13700072992045573,\t\t0, 0, 0],\n\t\t[1496,\t\t3,\t\t1.795813430876661e-08,\t\t8.979067154383307e-07,\t\t2.22, 61.69, 0.004502],\n\t\t[1497,\t\t2,\t\t0.005670372667342641,\t\t0.28351863336713207,\t\t0, 0, 0],\n\t\t[1498,\t\t2,\t\t0.006735488235440387,\t\t0.3367744117720194,\t\t0, 0, 0],\n\t\t[1499,\t\t3,\t\t7.849477098292857e-05,\t\t0.0039247385491464285,\t\t2.22, 61.69, 0.004502],\n\t\t[1500,\t\t3,\t\t9.85597782087346e-06,\t\t0.000492798891043673,\t\t2.22, 61.69, 0.004502],\n\t\t[1501,\t\t3,\t\t0.0005198212383651805,\t\t0.02599106191825903,\t\t2.22, 61.69, 0.004502],\n\t\t[1502,\t\t3,\t\t2.391151309492037e-05,\t\t0.0011955756547460184,\t\t2.22, 61.69, 0.004502],\n\t\t[1503,\t\t3,\t\t0.002065098419918782,\t\t0.10325492099593911,\t\t2.22, 61.69, 0.004502],\n\t\t[1504,\t\t2,\t\t0.010196139877976951,\t\t0.5098069938988476,\t\t0, 0, 0],\n\t\t[1505,\t\t3,\t\t0.0008297502785158192,\t\t0.04148751392579096,\t\t2.22, 61.69, 0.004502],\n\t\t[1506,\t\t2,\t\t0.0018532538474645695,\t\t0.09266269237322847,\t\t0, 0, 0],\n\t\t[1507,\t\t3,\t\t0.000472361030860968,\t\t0.023618051543048402,\t\t2.22, 61.69, 0.004502],\n\t\t[1508,\t\t3,\t\t4.154538017488063e-06,\t\t0.00020772690087440316,\t\t2.22, 61.69, 0.004502],\n\t\t[1510,\t\t2,\t\t0.00681234986437375,\t\t0.34061749321868756,\t\t0, 0, 0],\n\t\t[1511,\t\t2,\t\t0.00988173435818505,\t\t0.4940867179092525,\t\t0, 0, 0],\n\t\t[1512,\t\t2,\t\t0.004082645917281524,\t\t0.20413229586407625,\t\t0, 0, 0],\n\t\t[1513,\t\t3,\t\t0.0012470048759170105,\t\t0.062350243795850524,\t\t2.22, 61.69, 0.004502],\n\t\t[1514,\t\t3,\t\t8.957659889985326e-07,\t\t4.4788299449926626e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1516,\t\t3,\t\t1.8340973111507537e-06,\t\t9.170486555753769e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1517,\t\t3,\t\t8.192048507877762e-05,\t\t0.0040960242539388805,\t\t2.22, 61.69, 0.004502],\n\t\t[1518,\t\t3,\t\t4.268803271333055e-05,\t\t0.0021344016356665274,\t\t2.22, 61.69, 0.004502],\n\t\t[1519,\t\t3,\t\t2.9627970642356104e-06,\t\t0.00014813985321178054,\t\t2.22, 61.69, 0.004502]\n\t])\n\tppc[\"branch_switch\"] = array([\n\t\t[586,\t\t1,\t\t0\t\t],\n\t\t[589,\t\t108,\t\t0\t\t],\n\t\t[590,\t\t108,\t\t0\t\t],\n\t\t[593,\t\t112,\t\t0\t\t],\n\t\t[595,\t\t115,\t\t0\t\t],\n\t\t[598,\t\t118,\t\t0\t\t],\n\t\t[599,\t\t119,\t\t0\t\t],\n\t\t[601,\t\t119,\t\t0\t\t],\n\t\t[602,\t\t121,\t\t0\t\t],\n\t\t[603,\t\t526,\t\t0\t\t],\n\t\t[607,\t\t127,\t\t0\t\t],\n\t\t[608,\t\t127,\t\t0\t\t],\n\t\t[609,\t\t529,\t\t0\t\t],\n\t\t[612,\t\t493,\t\t0\t\t],\n\t\t[614,\t\t130,\t\t0\t\t],\n\t\t[616,\t\t132,\t\t0\t\t],\n\t\t[617,\t\t133,\t\t0\t\t],\n\t\t[618,\t\t133,\t\t0\t\t],\n\t\t[619,\t\t134,\t\t0\t\t],\n\t\t[624,\t\t14,\t\t0\t\t],\n\t\t[629,\t\t145,\t\t0\t\t],\n\t\t[632,\t\t145,\t\t0\t\t],\n\t\t[637,\t\t148,\t\t0\t\t],\n\t\t[638,\t\t149,\t\t0\t\t],\n\t\t[640,\t\t153,\t\t0\t\t],\n\t\t[641,\t\t155,\t\t0\t\t],\n\t\t[642,\t\t533,\t\t0\t\t],\n\t\t[643,\t\t534,\t\t0\t\t],\n\t\t[647,\t\t536,\t\t0\t\t],\n\t\t[652,\t\t167,\t\t0\t\t],\n\t\t[655,\t\t170,\t\t0\t\t],\n\t\t[661,\t\t177,\t\t0\t\t],\n\t\t[663,\t\t178,\t\t0\t\t],\n\t\t[666,\t\t180,\t\t0\t\t],\n\t\t[668,\t\t183,\t\t0\t\t],\n\t\t[670,\t\t183,\t\t0\t\t],\n\t\t[672,\t\t185,\t\t0\t\t],\n\t\t[681,\t\t197,\t\t0\t\t],\n\t\t[683,\t\t200,\t\t0\t\t],\n\t\t[687,\t\t202,\t\t0\t\t],\n\t\t[694,\t\t21,\t\t0\t\t],\n\t\t[695,\t\t210,\t\t0\t\t],\n\t\t[696,\t\t211,\t\t0\t\t],\n\t\t[697,\t\t211,\t\t0\t\t],\n\t\t[698,\t\t212,\t\t0\t\t],\n\t\t[702,\t\t215,\t\t0\t\t],\n\t\t[704,\t\t217,\t\t0\t\t],\n\t\t[705,\t\t217,\t\t0\t\t],\n\t\t[707,\t\t219,\t\t0\t\t],\n\t\t[713,\t\t225,\t\t0\t\t],\n\t\t[714,\t\t225,\t\t0\t\t],\n\t\t[716,\t\t226,\t\t0\t\t],\n\t\t[717,\t\t227,\t\t0\t\t],\n\t\t[719,\t\t229,\t\t0\t\t],\n\t\t[724,\t\t238,\t\t0\t\t],\n\t\t[730,\t\t547,\t\t0\t\t],\n\t\t[732,\t\t247,\t\t0\t\t],\n\t\t[735,\t\t253,\t\t0\t\t],\n\t\t[738,\t\t258,\t\t0\t\t],\n\t\t[741,\t\t264,\t\t0\t\t],\n\t\t[742,\t\t264,\t\t0\t\t],\n\t\t[743,\t\t500,\t\t0\t\t],\n\t\t[747,\t\t273,\t\t0\t\t],\n\t\t[748,\t\t274,\t\t0\t\t],\n\t\t[749,\t\t274,\t\t0\t\t],\n\t\t[750,\t\t557,\t\t0\t\t],\n\t\t[753,\t\t28,\t\t0\t\t],\n\t\t[758,\t\t286,\t\t0\t\t],\n\t\t[761,\t\t288,\t\t0\t\t],\n\t\t[762,\t\t289,\t\t0\t\t],\n\t\t[763,\t\t560,\t\t0\t\t],\n\t\t[765,\t\t560,\t\t0\t\t],\n\t\t[767,\t\t292,\t\t0\t\t],\n\t\t[772,\t\t3,\t\t0\t\t],\n\t\t[774,\t\t300,\t\t0\t\t],\n\t\t[777,\t\t300,\t\t0\t\t],\n\t\t[778,\t\t300,\t\t0\t\t],\n\t\t[781,\t\t303,\t\t0\t\t],\n\t\t[784,\t\t563,\t\t0\t\t],\n\t\t[785,\t\t501,\t\t0\t\t],\n\t\t[788,\t\t311,\t\t0\t\t],\n\t\t[789,\t\t565,\t\t0\t\t],\n\t\t[791,\t\t314,\t\t0\t\t],\n\t\t[792,\t\t316,\t\t0\t\t],\n\t\t[795,\t\t319,\t\t0\t\t],\n\t\t[800,\t\t326,\t\t0\t\t],\n\t\t[801,\t\t327,\t\t0\t\t],\n\t\t[802,\t\t327,\t\t0\t\t],\n\t\t[805,\t\t328,\t\t0\t\t],\n\t\t[806,\t\t328,\t\t0\t\t],\n\t\t[808,\t\t329,\t\t0\t\t],\n\t\t[809,\t\t329,\t\t0\t\t],\n\t\t[811,\t\t568,\t\t0\t\t],\n\t\t[814,\t\t570,\t\t0\t\t],\n\t\t[816,\t\t335,\t\t0\t\t],\n\t\t[817,\t\t571,\t\t0\t\t],\n\t\t[821,\t\t338,\t\t0\t\t],\n\t\t[822,\t\t339,\t\t0\t\t],\n\t\t[826,\t\t339,\t\t0\t\t],\n\t\t[830,\t\t345,\t\t0\t\t],\n\t\t[835,\t\t572,\t\t0\t\t],\n\t\t[836,\t\t572,\t\t0\t\t],\n\t\t[839,\t\t350,\t\t0\t\t],\n\t\t[841,\t\t573,\t\t0\t\t],\n\t\t[844,\t\t352,\t\t0\t\t],\n\t\t[845,\t\t356,\t\t0\t\t],\n\t\t[849,\t\t574,\t\t0\t\t],\n\t\t[850,\t\t574,\t\t0\t\t],\n\t\t[851,\t\t575,\t\t0\t\t],\n\t\t[853,\t\t362,\t\t0\t\t],\n\t\t[855,\t\t363,\t\t0\t\t],\n\t\t[856,\t\t363,\t\t0\t\t],\n\t\t[857,\t\t365,\t\t0\t\t],\n\t\t[858,\t\t368,\t\t0\t\t],\n\t\t[860,\t\t371,\t\t0\t\t],\n\t\t[865,\t\t375,\t\t0\t\t],\n\t\t[869,\t\t503,\t\t0\t\t],\n\t\t[870,\t\t503,\t\t0\t\t],\n\t\t[872,\t\t378,\t\t0\t\t],\n\t\t[874,\t\t576,\t\t0\t\t],\n\t\t[875,\t\t381,\t\t0\t\t],\n\t\t[882,\t\t388,\t\t0\t\t],\n\t\t[883,\t\t388,\t\t0\t\t],\n\t\t[885,\t\t393,\t\t0\t\t],\n\t\t[886,\t\t394,\t\t0\t\t],\n\t\t[889,\t\t397,\t\t0\t\t],\n\t\t[890,\t\t40,\t\t0\t\t],\n\t\t[893,\t\t400,\t\t0\t\t],\n\t\t[894,\t\t400,\t\t0\t\t],\n\t\t[895,\t\t580,\t\t0\t\t],\n\t\t[896,\t\t581,\t\t0\t\t],\n\t\t[898,\t\t403,\t\t0\t\t],\n\t\t[900,\t\t405,\t\t0\t\t],\n\t\t[902,\t\t405,\t\t0\t\t],\n\t\t[903,\t\t406,\t\t0\t\t],\n\t\t[905,\t\t413,\t\t0\t\t],\n\t\t[906,\t\t414,\t\t0\t\t],\n\t\t[907,\t\t583,\t\t0\t\t],\n\t\t[909,\t\t417,\t\t0\t\t],\n\t\t[913,\t\t422,\t\t0\t\t],\n\t\t[915,\t\t423,\t\t0\t\t],\n\t\t[917,\t\t43,\t\t0\t\t],\n\t\t[918,\t\t424,\t\t0\t\t],\n\t\t[920,\t\t428,\t\t0\t\t],\n\t\t[921,\t\t428,\t\t0\t\t],\n\t\t[922,\t\t429,\t\t0\t\t],\n\t\t[923,\t\t432,\t\t0\t\t],\n\t\t[925,\t\t44,\t\t0\t\t],\n\t\t[931,\t\t439,\t\t0\t\t],\n\t\t[935,\t\t45,\t\t0\t\t],\n\t\t[936,\t\t445,\t\t0\t\t],\n\t\t[937,\t\t447,\t\t0\t\t],\n\t\t[939,\t\t450,\t\t0\t\t],\n\t\t[940,\t\t451,\t\t0\t\t],\n\t\t[944,\t\t458,\t\t0\t\t],\n\t\t[950,\t\t462,\t\t0\t\t],\n\t\t[952,\t\t47,\t\t0\t\t],\n\t\t[958,\t\t478,\t\t0\t\t],\n\t\t[959,\t\t478,\t\t0\t\t],\n\t\t[960,\t\t479,\t\t0\t\t],\n\t\t[963,\t\t481,\t\t0\t\t],\n\t\t[965,\t\t49,\t\t0\t\t],\n\t\t[966,\t\t49,\t\t0\t\t],\n\t\t[967,\t\t49,\t\t0\t\t],\n\t\t[969,\t\t486,\t\t0\t\t],\n\t\t[971,\t\t51,\t\t0\t\t],\n\t\t[973,\t\t506,\t\t0\t\t],\n\t\t[976,\t\t58,\t\t0\t\t],\n\t\t[978,\t\t491,\t\t0\t\t],\n\t\t[980,\t\t508,\t\t0\t\t],\n\t\t[981,\t\t62,\t\t0\t\t],\n\t\t[982,\t\t62,\t\t0\t\t],\n\t\t[983,\t\t62,\t\t0\t\t],\n\t\t[984,\t\t63,\t\t0\t\t],\n\t\t[985,\t\t63,\t\t0\t\t],\n\t\t[986,\t\t64,\t\t0\t\t],\n\t\t[987,\t\t65,\t\t0\t\t],\n\t\t[988,\t\t66,\t\t0\t\t],\n\t\t[993,\t\t67,\t\t0\t\t],\n\t\t[994,\t\t67,\t\t0\t\t],\n\t\t[995,\t\t509,\t\t0\t\t],\n\t\t[997,\t\t510,\t\t0\t\t],\n\t\t[999,\t\t70,\t\t0\t\t],\n\t\t[1000,\t\t71,\t\t0\t\t],\n\t\t[1002,\t\t71,\t\t0\t\t],\n\t\t[1003,\t\t72,\t\t0\t\t],\n\t\t[1007,\t\t511,\t\t0\t\t],\n\t\t[1008,\t\t75,\t\t0\t\t],\n\t\t[1010,\t\t79,\t\t0\t\t],\n\t\t[1011,\t\t79,\t\t0\t\t],\n\t\t[1012,\t\t81,\t\t0\t\t],\n\t\t[1014,\t\t83,\t\t0\t\t],\n\t\t[1026,\t\t518,\t\t0\t\t],\n\t\t[1027,\t\t218,\t\t0\t\t],\n\t\t[1028,\t\t221,\t\t0\t\t],\n\t\t[1029,\t\t268,\t\t0\t\t],\n\t\t[1030,\t\t269,\t\t0\t\t],\n\t\t[1031,\t\t498,\t\t0\t\t],\n\t\t[1032,\t\t1,\t\t0\t\t],\n\t\t[1033,\t\t3,\t\t0\t\t],\n\t\t[1034,\t\t4,\t\t0\t\t],\n\t\t[1035,\t\t6,\t\t0\t\t],\n\t\t[1036,\t\t7,\t\t0\t\t],\n\t\t[1037,\t\t8,\t\t0\t\t],\n\t\t[1038,\t\t9,\t\t0\t\t],\n\t\t[1039,\t\t11,\t\t0\t\t],\n\t\t[1040,\t\t14,\t\t0\t\t],\n\t\t[1041,\t\t16,\t\t0\t\t],\n\t\t[1042,\t\t17,\t\t0\t\t],\n\t\t[1043,\t\t19,\t\t0\t\t],\n\t\t[1044,\t\t21,\t\t0\t\t],\n\t\t[1045,\t\t23,\t\t0\t\t],\n\t\t[1046,\t\t25,\t\t0\t\t],\n\t\t[1047,\t\t27,\t\t0\t\t],\n\t\t[1048,\t\t28,\t\t0\t\t],\n\t\t[1049,\t\t29,\t\t0\t\t],\n\t\t[1050,\t\t31,\t\t0\t\t],\n\t\t[1051,\t\t33,\t\t0\t\t],\n\t\t[1052,\t\t34,\t\t0\t\t],\n\t\t[1053,\t\t35,\t\t0\t\t],\n\t\t[1054,\t\t36,\t\t0\t\t],\n\t\t[1055,\t\t38,\t\t0\t\t],\n\t\t[1056,\t\t39,\t\t0\t\t],\n\t\t[1057,\t\t40,\t\t0\t\t],\n\t\t[1058,\t\t41,\t\t0\t\t],\n\t\t[1059,\t\t43,\t\t0\t\t],\n\t\t[1060,\t\t44,\t\t0\t\t],\n\t\t[1061,\t\t45,\t\t0\t\t],\n\t\t[1062,\t\t47,\t\t0\t\t],\n\t\t[1063,\t\t48,\t\t0\t\t],\n\t\t[1064,\t\t49,\t\t0\t\t],\n\t\t[1065,\t\t50,\t\t0\t\t],\n\t\t[1066,\t\t51,\t\t0\t\t],\n\t\t[1067,\t\t53,\t\t0\t\t],\n\t\t[1068,\t\t54,\t\t0\t\t],\n\t\t[1069,\t\t55,\t\t0\t\t],\n\t\t[1070,\t\t57,\t\t0\t\t],\n\t\t[1071,\t\t58,\t\t0\t\t],\n\t\t[1072,\t\t59,\t\t0\t\t],\n\t\t[1073,\t\t60,\t\t0\t\t],\n\t\t[1074,\t\t62,\t\t0\t\t],\n\t\t[1075,\t\t63,\t\t0\t\t],\n\t\t[1076,\t\t64,\t\t0\t\t],\n\t\t[1077,\t\t65,\t\t0\t\t],\n\t\t[1078,\t\t66,\t\t0\t\t],\n\t\t[1079,\t\t67,\t\t0\t\t],\n\t\t[1080,\t\t70,\t\t0\t\t],\n\t\t[1081,\t\t71,\t\t0\t\t],\n\t\t[1082,\t\t72,\t\t0\t\t],\n\t\t[1083,\t\t73,\t\t0\t\t],\n\t\t[1084,\t\t75,\t\t0\t\t],\n\t\t[1085,\t\t76,\t\t0\t\t],\n\t\t[1086,\t\t77,\t\t0\t\t],\n\t\t[1087,\t\t79,\t\t0\t\t],\n\t\t[1088,\t\t80,\t\t0\t\t],\n\t\t[1089,\t\t81,\t\t0\t\t],\n\t\t[1090,\t\t82,\t\t0\t\t],\n\t\t[1091,\t\t83,\t\t0\t\t],\n\t\t[1092,\t\t84,\t\t0\t\t],\n\t\t[1093,\t\t85,\t\t0\t\t],\n\t\t[1094,\t\t88,\t\t0\t\t],\n\t\t[1095,\t\t89,\t\t0\t\t],\n\t\t[1096,\t\t90,\t\t0\t\t],\n\t\t[1097,\t\t91,\t\t0\t\t],\n\t\t[1098,\t\t92,\t\t0\t\t],\n\t\t[1099,\t\t93,\t\t0\t\t],\n\t\t[1100,\t\t97,\t\t0\t\t],\n\t\t[1101,\t\t98,\t\t0\t\t],\n\t\t[1102,\t\t101,\t\t0\t\t],\n\t\t[1103,\t\t102,\t\t0\t\t],\n\t\t[1104,\t\t103,\t\t0\t\t],\n\t\t[1105,\t\t108,\t\t0\t\t],\n\t\t[1106,\t\t109,\t\t0\t\t],\n\t\t[1107,\t\t110,\t\t0\t\t],\n\t\t[1108,\t\t111,\t\t0\t\t],\n\t\t[1109,\t\t112,\t\t0\t\t],\n\t\t[1110,\t\t113,\t\t0\t\t],\n\t\t[1111,\t\t114,\t\t0\t\t],\n\t\t[1112,\t\t115,\t\t0\t\t],\n\t\t[1113,\t\t116,\t\t0\t\t],\n\t\t[1114,\t\t118,\t\t0\t\t],\n\t\t[1115,\t\t119,\t\t0\t\t],\n\t\t[1116,\t\t121,\t\t0\t\t],\n\t\t[1117,\t\t122,\t\t0\t\t],\n\t\t[1118,\t\t126,\t\t0\t\t],\n\t\t[1119,\t\t127,\t\t0\t\t],\n\t\t[1120,\t\t130,\t\t0\t\t],\n\t\t[1121,\t\t131,\t\t0\t\t],\n\t\t[1122,\t\t132,\t\t0\t\t],\n\t\t[1123,\t\t133,\t\t0\t\t],\n\t\t[1124,\t\t134,\t\t0\t\t],\n\t\t[1125,\t\t135,\t\t0\t\t],\n\t\t[1126,\t\t136,\t\t0\t\t],\n\t\t[1127,\t\t137,\t\t0\t\t],\n\t\t[1128,\t\t139,\t\t0\t\t],\n\t\t[1129,\t\t140,\t\t0\t\t],\n\t\t[1130,\t\t141,\t\t0\t\t],\n\t\t[1131,\t\t142,\t\t0\t\t],\n\t\t[1132,\t\t144,\t\t0\t\t],\n\t\t[1133,\t\t145,\t\t0\t\t],\n\t\t[1134,\t\t146,\t\t0\t\t],\n\t\t[1135,\t\t147,\t\t0\t\t],\n\t\t[1136,\t\t148,\t\t0\t\t],\n\t\t[1137,\t\t149,\t\t0\t\t],\n\t\t[1138,\t\t150,\t\t0\t\t],\n\t\t[1139,\t\t151,\t\t0\t\t],\n\t\t[1140,\t\t152,\t\t0\t\t],\n\t\t[1141,\t\t153,\t\t0\t\t],\n\t\t[1142,\t\t154,\t\t0\t\t],\n\t\t[1143,\t\t155,\t\t0\t\t],\n\t\t[1144,\t\t158,\t\t0\t\t],\n\t\t[1145,\t\t161,\t\t0\t\t],\n\t\t[1146,\t\t162,\t\t0\t\t],\n\t\t[1147,\t\t163,\t\t0\t\t],\n\t\t[1148,\t\t164,\t\t0\t\t],\n\t\t[1149,\t\t166,\t\t0\t\t],\n\t\t[1150,\t\t167,\t\t0\t\t],\n\t\t[1151,\t\t168,\t\t0\t\t],\n\t\t[1152,\t\t169,\t\t0\t\t],\n\t\t[1153,\t\t170,\t\t0\t\t],\n\t\t[1154,\t\t171,\t\t0\t\t],\n\t\t[1155,\t\t172,\t\t0\t\t],\n\t\t[1156,\t\t173,\t\t0\t\t],\n\t\t[1157,\t\t174,\t\t0\t\t],\n\t\t[1158,\t\t175,\t\t0\t\t],\n\t\t[1159,\t\t176,\t\t0\t\t],\n\t\t[1160,\t\t177,\t\t0\t\t],\n\t\t[1161,\t\t178,\t\t0\t\t],\n\t\t[1162,\t\t179,\t\t0\t\t],\n\t\t[1163,\t\t180,\t\t0\t\t],\n\t\t[1164,\t\t181,\t\t0\t\t],\n\t\t[1165,\t\t182,\t\t0\t\t],\n\t\t[1166,\t\t183,\t\t0\t\t],\n\t\t[1167,\t\t185,\t\t0\t\t],\n\t\t[1168,\t\t186,\t\t0\t\t],\n\t\t[1169,\t\t187,\t\t0\t\t],\n\t\t[1170,\t\t188,\t\t0\t\t],\n\t\t[1171,\t\t189,\t\t0\t\t],\n\t\t[1172,\t\t190,\t\t0\t\t],\n\t\t[1173,\t\t192,\t\t0\t\t],\n\t\t[1174,\t\t193,\t\t0\t\t],\n\t\t[1175,\t\t194,\t\t0\t\t],\n\t\t[1176,\t\t196,\t\t0\t\t],\n\t\t[1177,\t\t197,\t\t0\t\t],\n\t\t[1178,\t\t198,\t\t0\t\t],\n\t\t[1179,\t\t199,\t\t0\t\t],\n\t\t[1180,\t\t200,\t\t0\t\t],\n\t\t[1181,\t\t202,\t\t0\t\t],\n\t\t[1182,\t\t203,\t\t0\t\t],\n\t\t[1183,\t\t204,\t\t0\t\t],\n\t\t[1184,\t\t205,\t\t0\t\t],\n\t\t[1185,\t\t206,\t\t0\t\t],\n\t\t[1186,\t\t207,\t\t0\t\t],\n\t\t[1187,\t\t208,\t\t0\t\t],\n\t\t[1188,\t\t209,\t\t0\t\t],\n\t\t[1189,\t\t210,\t\t0\t\t],\n\t\t[1190,\t\t211,\t\t0\t\t],\n\t\t[1191,\t\t212,\t\t0\t\t],\n\t\t[1192,\t\t213,\t\t0\t\t],\n\t\t[1193,\t\t214,\t\t0\t\t],\n\t\t[1194,\t\t215,\t\t0\t\t],\n\t\t[1195,\t\t216,\t\t0\t\t],\n\t\t[1196,\t\t217,\t\t0\t\t],\n\t\t[1197,\t\t218,\t\t0\t\t],\n\t\t[1198,\t\t219,\t\t0\t\t],\n\t\t[1199,\t\t221,\t\t0\t\t],\n\t\t[1200,\t\t222,\t\t0\t\t],\n\t\t[1201,\t\t223,\t\t0\t\t],\n\t\t[1202,\t\t224,\t\t0\t\t],\n\t\t[1203,\t\t225,\t\t0\t\t],\n\t\t[1204,\t\t226,\t\t0\t\t],\n\t\t[1205,\t\t227,\t\t0\t\t],\n\t\t[1206,\t\t228,\t\t0\t\t],\n\t\t[1207,\t\t229,\t\t0\t\t],\n\t\t[1208,\t\t230,\t\t0\t\t],\n\t\t[1209,\t\t234,\t\t0\t\t],\n\t\t[1210,\t\t235,\t\t0\t\t],\n\t\t[1211,\t\t237,\t\t0\t\t],\n\t\t[1212,\t\t238,\t\t0\t\t],\n\t\t[1213,\t\t239,\t\t0\t\t],\n\t\t[1214,\t\t240,\t\t0\t\t],\n\t\t[1215,\t\t241,\t\t0\t\t],\n\t\t[1216,\t\t242,\t\t0\t\t],\n\t\t[1217,\t\t243,\t\t0\t\t],\n\t\t[1218,\t\t244,\t\t0\t\t],\n\t\t[1219,\t\t247,\t\t0\t\t],\n\t\t[1220,\t\t251,\t\t0\t\t],\n\t\t[1221,\t\t252,\t\t0\t\t],\n\t\t[1222,\t\t253,\t\t0\t\t],\n\t\t[1223,\t\t254,\t\t0\t\t],\n\t\t[1224,\t\t255,\t\t0\t\t],\n\t\t[1225,\t\t256,\t\t0\t\t],\n\t\t[1226,\t\t257,\t\t0\t\t],\n\t\t[1227,\t\t258,\t\t0\t\t],\n\t\t[1228,\t\t260,\t\t0\t\t],\n\t\t[1229,\t\t263,\t\t0\t\t],\n\t\t[1230,\t\t264,\t\t0\t\t],\n\t\t[1231,\t\t266,\t\t0\t\t],\n\t\t[1232,\t\t267,\t\t0\t\t],\n\t\t[1233,\t\t268,\t\t0\t\t],\n\t\t[1235,\t\t271,\t\t0\t\t],\n\t\t[1236,\t\t272,\t\t0\t\t],\n\t\t[1237,\t\t273,\t\t0\t\t],\n\t\t[1238,\t\t274,\t\t0\t\t],\n\t\t[1239,\t\t275,\t\t0\t\t],\n\t\t[1240,\t\t276,\t\t0\t\t],\n\t\t[1241,\t\t278,\t\t0\t\t],\n\t\t[1242,\t\t281,\t\t0\t\t],\n\t\t[1243,\t\t282,\t\t0\t\t],\n\t\t[1244,\t\t283,\t\t0\t\t],\n\t\t[1245,\t\t284,\t\t0\t\t],\n\t\t[1246,\t\t285,\t\t0\t\t],\n\t\t[1247,\t\t286,\t\t0\t\t],\n\t\t[1248,\t\t287,\t\t0\t\t],\n\t\t[1249,\t\t288,\t\t0\t\t],\n\t\t[1250,\t\t289,\t\t0\t\t],\n\t\t[1251,\t\t291,\t\t0\t\t],\n\t\t[1252,\t\t292,\t\t0\t\t],\n\t\t[1253,\t\t293,\t\t0\t\t],\n\t\t[1254,\t\t294,\t\t0\t\t],\n\t\t[1255,\t\t295,\t\t0\t\t],\n\t\t[1256,\t\t296,\t\t0\t\t],\n\t\t[1257,\t\t297,\t\t0\t\t],\n\t\t[1258,\t\t298,\t\t0\t\t],\n\t\t[1259,\t\t299,\t\t0\t\t],\n\t\t[1260,\t\t300,\t\t0\t\t],\n\t\t[1261,\t\t302,\t\t0\t\t],\n\t\t[1262,\t\t303,\t\t0\t\t],\n\t\t[1263,\t\t304,\t\t0\t\t],\n\t\t[1264,\t\t307,\t\t0\t\t],\n\t\t[1265,\t\t308,\t\t0\t\t],\n\t\t[1266,\t\t309,\t\t0\t\t],\n\t\t[1267,\t\t311,\t\t0\t\t],\n\t\t[1268,\t\t312,\t\t0\t\t],\n\t\t[1269,\t\t314,\t\t0\t\t],\n\t\t[1270,\t\t316,\t\t0\t\t],\n\t\t[1271,\t\t317,\t\t0\t\t],\n\t\t[1272,\t\t318,\t\t0\t\t],\n\t\t[1273,\t\t319,\t\t0\t\t],\n\t\t[1274,\t\t321,\t\t0\t\t],\n\t\t[1275,\t\t322,\t\t0\t\t],\n\t\t[1276,\t\t323,\t\t0\t\t],\n\t\t[1277,\t\t324,\t\t0\t\t],\n\t\t[1278,\t\t325,\t\t0\t\t],\n\t\t[1279,\t\t326,\t\t0\t\t],\n\t\t[1280,\t\t327,\t\t0\t\t],\n\t\t[1281,\t\t328,\t\t0\t\t],\n\t\t[1282,\t\t329,\t\t0\t\t],\n\t\t[1283,\t\t331,\t\t0\t\t],\n\t\t[1284,\t\t333,\t\t0\t\t],\n\t\t[1285,\t\t335,\t\t0\t\t],\n\t\t[1286,\t\t337,\t\t0\t\t],\n\t\t[1287,\t\t338,\t\t0\t\t],\n\t\t[1288,\t\t339,\t\t0\t\t],\n\t\t[1289,\t\t340,\t\t0\t\t],\n\t\t[1290,\t\t341,\t\t0\t\t],\n\t\t[1291,\t\t342,\t\t0\t\t],\n\t\t[1292,\t\t343,\t\t0\t\t],\n\t\t[1293,\t\t344,\t\t0\t\t],\n\t\t[1294,\t\t345,\t\t0\t\t],\n\t\t[1295,\t\t346,\t\t0\t\t],\n\t\t[1296,\t\t347,\t\t0\t\t],\n\t\t[1297,\t\t348,\t\t0\t\t],\n\t\t[1298,\t\t350,\t\t0\t\t],\n\t\t[1299,\t\t352,\t\t0\t\t],\n\t\t[1300,\t\t353,\t\t0\t\t],\n\t\t[1301,\t\t354,\t\t0\t\t],\n\t\t[1302,\t\t355,\t\t0\t\t],\n\t\t[1303,\t\t356,\t\t0\t\t],\n\t\t[1304,\t\t357,\t\t0\t\t],\n\t\t[1305,\t\t359,\t\t0\t\t],\n\t\t[1306,\t\t361,\t\t0\t\t],\n\t\t[1307,\t\t362,\t\t0\t\t],\n\t\t[1308,\t\t363,\t\t0\t\t],\n\t\t[1309,\t\t364,\t\t0\t\t],\n\t\t[1310,\t\t365,\t\t0\t\t],\n\t\t[1311,\t\t366,\t\t0\t\t],\n\t\t[1312,\t\t367,\t\t0\t\t],\n\t\t[1313,\t\t368,\t\t0\t\t],\n\t\t[1314,\t\t369,\t\t0\t\t],\n\t\t[1315,\t\t370,\t\t0\t\t],\n\t\t[1316,\t\t371,\t\t0\t\t],\n\t\t[1317,\t\t372,\t\t0\t\t],\n\t\t[1318,\t\t373,\t\t0\t\t],\n\t\t[1319,\t\t374,\t\t0\t\t],\n\t\t[1320,\t\t375,\t\t0\t\t],\n\t\t[1321,\t\t376,\t\t0\t\t],\n\t\t[1322,\t\t377,\t\t0\t\t],\n\t\t[1323,\t\t378,\t\t0\t\t],\n\t\t[1324,\t\t379,\t\t0\t\t],\n\t\t[1325,\t\t381,\t\t0\t\t],\n\t\t[1326,\t\t384,\t\t0\t\t],\n\t\t[1327,\t\t385,\t\t0\t\t],\n\t\t[1328,\t\t386,\t\t0\t\t],\n\t\t[1329,\t\t387,\t\t0\t\t],\n\t\t[1330,\t\t388,\t\t0\t\t],\n\t\t[1331,\t\t390,\t\t0\t\t],\n\t\t[1332,\t\t391,\t\t0\t\t],\n\t\t[1333,\t\t392,\t\t0\t\t],\n\t\t[1334,\t\t393,\t\t0\t\t],\n\t\t[1335,\t\t394,\t\t0\t\t],\n\t\t[1336,\t\t395,\t\t0\t\t],\n\t\t[1337,\t\t396,\t\t0\t\t],\n\t\t[1338,\t\t397,\t\t0\t\t],\n\t\t[1339,\t\t398,\t\t0\t\t],\n\t\t[1340,\t\t399,\t\t0\t\t],\n\t\t[1341,\t\t400,\t\t0\t\t],\n\t\t[1342,\t\t403,\t\t0\t\t],\n\t\t[1343,\t\t404,\t\t0\t\t],\n\t\t[1344,\t\t405,\t\t0\t\t],\n\t\t[1345,\t\t406,\t\t0\t\t],\n\t\t[1346,\t\t407,\t\t0\t\t],\n\t\t[1347,\t\t408,\t\t0\t\t],\n\t\t[1348,\t\t410,\t\t0\t\t],\n\t\t[1349,\t\t411,\t\t0\t\t],\n\t\t[1350,\t\t412,\t\t0\t\t],\n\t\t[1352,\t\t414,\t\t0\t\t],\n\t\t[1355,\t\t418,\t\t0\t\t],\n\t\t[1356,\t\t419,\t\t0\t\t],\n\t\t[1357,\t\t420,\t\t0\t\t],\n\t\t[1358,\t\t421,\t\t0\t\t],\n\t\t[1359,\t\t422,\t\t0\t\t],\n\t\t[1360,\t\t423,\t\t0\t\t],\n\t\t[1361,\t\t424,\t\t0\t\t],\n\t\t[1362,\t\t425,\t\t0\t\t],\n\t\t[1363,\t\t426,\t\t0\t\t],\n\t\t[1364,\t\t427,\t\t0\t\t],\n\t\t[1365,\t\t428,\t\t0\t\t],\n\t\t[1366,\t\t429,\t\t0\t\t],\n\t\t[1367,\t\t430,\t\t0\t\t],\n\t\t[1368,\t\t431,\t\t0\t\t],\n\t\t[1369,\t\t432,\t\t0\t\t],\n\t\t[1370,\t\t433,\t\t0\t\t],\n\t\t[1371,\t\t434,\t\t0\t\t],\n\t\t[1372,\t\t435,\t\t0\t\t],\n\t\t[1373,\t\t436,\t\t0\t\t],\n\t\t[1374,\t\t437,\t\t0\t\t],\n\t\t[1375,\t\t438,\t\t0\t\t],\n\t\t[1376,\t\t439,\t\t0\t\t],\n\t\t[1377,\t\t440,\t\t0\t\t],\n\t\t[1378,\t\t441,\t\t0\t\t],\n\t\t[1379,\t\t442,\t\t0\t\t],\n\t\t[1380,\t\t443,\t\t0\t\t],\n\t\t[1381,\t\t445,\t\t0\t\t],\n\t\t[1382,\t\t446,\t\t0\t\t],\n\t\t[1383,\t\t447,\t\t0\t\t],\n\t\t[1384,\t\t448,\t\t0\t\t],\n\t\t[1385,\t\t449,\t\t0\t\t],\n\t\t[1386,\t\t450,\t\t0\t\t],\n\t\t[1387,\t\t451,\t\t0\t\t],\n\t\t[1388,\t\t453,\t\t0\t\t],\n\t\t[1389,\t\t454,\t\t0\t\t],\n\t\t[1390,\t\t455,\t\t0\t\t],\n\t\t[1391,\t\t456,\t\t0\t\t],\n\t\t[1392,\t\t457,\t\t0\t\t],\n\t\t[1393,\t\t458,\t\t0\t\t],\n\t\t[1394,\t\t459,\t\t0\t\t],\n\t\t[1395,\t\t460,\t\t0\t\t],\n\t\t[1396,\t\t461,\t\t0\t\t],\n\t\t[1397,\t\t462,\t\t0\t\t],\n\t\t[1398,\t\t463,\t\t0\t\t],\n\t\t[1399,\t\t464,\t\t0\t\t],\n\t\t[1400,\t\t465,\t\t0\t\t],\n\t\t[1401,\t\t466,\t\t0\t\t],\n\t\t[1402,\t\t467,\t\t0\t\t],\n\t\t[1403,\t\t468,\t\t0\t\t],\n\t\t[1404,\t\t469,\t\t0\t\t],\n\t\t[1405,\t\t470,\t\t0\t\t],\n\t\t[1406,\t\t471,\t\t0\t\t],\n\t\t[1407,\t\t472,\t\t0\t\t],\n\t\t[1408,\t\t473,\t\t0\t\t],\n\t\t[1409,\t\t474,\t\t0\t\t],\n\t\t[1410,\t\t475,\t\t0\t\t],\n\t\t[1411,\t\t476,\t\t0\t\t],\n\t\t[1412,\t\t477,\t\t0\t\t],\n\t\t[1413,\t\t478,\t\t0\t\t],\n\t\t[1414,\t\t479,\t\t0\t\t],\n\t\t[1415,\t\t480,\t\t0\t\t],\n\t\t[1416,\t\t481,\t\t0\t\t],\n\t\t[1417,\t\t482,\t\t0\t\t],\n\t\t[1418,\t\t483,\t\t0\t\t],\n\t\t[1419,\t\t484,\t\t0\t\t],\n\t\t[1420,\t\t485,\t\t0\t\t],\n\t\t[1421,\t\t486,\t\t0\t\t],\n\t\t[1422,\t\t487,\t\t0\t\t],\n\t\t[1423,\t\t488,\t\t0\t\t],\n\t\t[1424,\t\t489,\t\t0\t\t],\n\t\t[1425,\t\t490,\t\t0\t\t],\n\t\t[1426,\t\t491,\t\t0\t\t],\n\t\t[1427,\t\t492,\t\t0\t\t],\n\t\t[1428,\t\t493,\t\t0\t\t],\n\t\t[1429,\t\t494,\t\t0\t\t],\n\t\t[1430,\t\t495,\t\t0\t\t],\n\t\t[1431,\t\t496,\t\t0\t\t],\n\t\t[1432,\t\t497,\t\t0\t\t],\n\t\t[1433,\t\t498,\t\t0\t\t],\n\t\t[1434,\t\t499,\t\t0\t\t],\n\t\t[1435,\t\t500,\t\t0\t\t],\n\t\t[1436,\t\t501,\t\t0\t\t],\n\t\t[1437,\t\t502,\t\t0\t\t],\n\t\t[1438,\t\t503,\t\t0\t\t],\n\t\t[1439,\t\t504,\t\t0\t\t],\n\t\t[1440,\t\t505,\t\t0\t\t],\n\t\t[1441,\t\t506,\t\t0\t\t],\n\t\t[1442,\t\t507,\t\t0\t\t],\n\t\t[1443,\t\t508,\t\t0\t\t],\n\t\t[1444,\t\t509,\t\t0\t\t],\n\t\t[1445,\t\t510,\t\t0\t\t],\n\t\t[1446,\t\t511,\t\t0\t\t],\n\t\t[1447,\t\t512,\t\t0\t\t],\n\t\t[1448,\t\t513,\t\t0\t\t],\n\t\t[1449,\t\t514,\t\t0\t\t],\n\t\t[1450,\t\t515,\t\t0\t\t],\n\t\t[1451,\t\t516,\t\t0\t\t],\n\t\t[1452,\t\t517,\t\t0\t\t],\n\t\t[1453,\t\t518,\t\t0\t\t],\n\t\t[1454,\t\t519,\t\t0\t\t],\n\t\t[1455,\t\t520,\t\t0\t\t],\n\t\t[1456,\t\t521,\t\t0\t\t],\n\t\t[1457,\t\t522,\t\t0\t\t],\n\t\t[1458,\t\t523,\t\t0\t\t],\n\t\t[1459,\t\t524,\t\t0\t\t],\n\t\t[1460,\t\t525,\t\t0\t\t],\n\t\t[1461,\t\t526,\t\t0\t\t],\n\t\t[1462,\t\t527,\t\t0\t\t],\n\t\t[1463,\t\t528,\t\t0\t\t],\n\t\t[1464,\t\t529,\t\t0\t\t],\n\t\t[1465,\t\t530,\t\t0\t\t],\n\t\t[1466,\t\t531,\t\t0\t\t],\n\t\t[1467,\t\t532,\t\t0\t\t],\n\t\t[1468,\t\t533,\t\t0\t\t],\n\t\t[1469,\t\t534,\t\t0\t\t],\n\t\t[1470,\t\t535,\t\t0\t\t],\n\t\t[1471,\t\t536,\t\t0\t\t],\n\t\t[1472,\t\t537,\t\t0\t\t],\n\t\t[1473,\t\t538,\t\t0\t\t],\n\t\t[1474,\t\t539,\t\t0\t\t],\n\t\t[1475,\t\t540,\t\t0\t\t],\n\t\t[1476,\t\t541,\t\t0\t\t],\n\t\t[1477,\t\t542,\t\t0\t\t],\n\t\t[1479,\t\t544,\t\t0\t\t],\n\t\t[1480,\t\t545,\t\t0\t\t],\n\t\t[1481,\t\t546,\t\t0\t\t],\n\t\t[1482,\t\t547,\t\t0\t\t],\n\t\t[1483,\t\t548,\t\t0\t\t],\n\t\t[1484,\t\t549,\t\t0\t\t],\n\t\t[1485,\t\t550,\t\t0\t\t],\n\t\t[1486,\t\t551,\t\t0\t\t],\n\t\t[1487,\t\t552,\t\t0\t\t],\n\t\t[1488,\t\t554,\t\t0\t\t],\n\t\t[1489,\t\t555,\t\t0\t\t],\n\t\t[1490,\t\t556,\t\t0\t\t],\n\t\t[1491,\t\t557,\t\t0\t\t],\n\t\t[1492,\t\t558,\t\t0\t\t],\n\t\t[1493,\t\t559,\t\t0\t\t],\n\t\t[1494,\t\t560,\t\t0\t\t],\n\t\t[1495,\t\t561,\t\t0\t\t],\n\t\t[1496,\t\t562,\t\t0\t\t],\n\t\t[1497,\t\t563,\t\t0\t\t],\n\t\t[1498,\t\t564,\t\t0\t\t],\n\t\t[1499,\t\t565,\t\t0\t\t],\n\t\t[1500,\t\t566,\t\t0\t\t],\n\t\t[1501,\t\t567,\t\t0\t\t],\n\t\t[1502,\t\t568,\t\t0\t\t],\n\t\t[1503,\t\t569,\t\t0\t\t],\n\t\t[1504,\t\t570,\t\t0\t\t],\n\t\t[1505,\t\t571,\t\t0\t\t],\n\t\t[1506,\t\t572,\t\t0\t\t],\n\t\t[1507,\t\t573,\t\t0\t\t],\n\t\t[1508,\t\t574,\t\t0\t\t],\n\t\t[1510,\t\t576,\t\t0\t\t],\n\t\t[1511,\t\t577,\t\t0\t\t],\n\t\t[1512,\t\t578,\t\t0\t\t],\n\t\t[1513,\t\t579,\t\t0\t\t],\n\t\t[1514,\t\t580,\t\t0\t\t],\n\t\t[1516,\t\t582,\t\t0\t\t],\n\t\t[1517,\t\t583,\t\t0\t\t],\n\t\t[1518,\t\t584,\t\t0\t\t],\n\t\t[1519,\t\t585,\t\t0\t\t],\n\t\t[1,\t\t490,\t\t0\t\t],\n\t\t[3,\t\t4,\t\t1\t\t],\n\t\t[491,\t\t6,\t\t0\t\t],\n\t\t[7,\t\t5,\t\t0\t\t],\n\t\t[8,\t\t9,\t\t0\t\t],\n\t\t[492,\t\t11,\t\t0\t\t],\n\t\t[11,\t\t493,\t\t0\t\t],\n\t\t[492,\t\t493,\t\t1\t\t],\n\t\t[494,\t\t14,\t\t0\t\t],\n\t\t[13,\t\t15,\t\t0\t\t],\n\t\t[16,\t\t5,\t\t0\t\t],\n\t\t[17,\t\t18,\t\t1\t\t],\n\t\t[17,\t\t12,\t\t0\t\t],\n\t\t[14,\t\t495,\t\t0\t\t],\n\t\t[494,\t\t19,\t\t0\t\t],\n\t\t[20,\t\t21,\t\t0\t\t],\n\t\t[20,\t\t22,\t\t1\t\t],\n\t\t[497,\t\t23,\t\t0\t\t],\n\t\t[23,\t\t499,\t\t1\t\t],\n\t\t[25,\t\t26,\t\t0\t\t],\n\t\t[25,\t\t22,\t\t0\t\t],\n\t\t[23,\t\t27,\t\t0\t\t],\n\t\t[28,\t\t23,\t\t0\t\t],\n\t\t[8,\t\t21,\t\t0\t\t],\n\t\t[9,\t\t29,\t\t0\t\t],\n\t\t[30,\t\t25,\t\t1\t\t],\n\t\t[31,\t\t32,\t\t1\t\t],\n\t\t[32,\t\t33,\t\t1\t\t],\n\t\t[34,\t\t35,\t\t0\t\t],\n\t\t[35,\t\t36,\t\t0\t\t],\n\t\t[490,\t\t6,\t\t1\t\t],\n\t\t[37,\t\t10,\t\t1\t\t],\n\t\t[10,\t\t38,\t\t0\t\t],\n\t\t[37,\t\t38,\t\t1\t\t],\n\t\t[39,\t\t40,\t\t1\t\t],\n\t\t[39,\t\t41,\t\t1\t\t],\n\t\t[42,\t\t41,\t\t1\t\t],\n\t\t[18,\t\t42,\t\t1\t\t],\n\t\t[492,\t\t43,\t\t1\t\t],\n\t\t[44,\t\t45,\t\t0\t\t],\n\t\t[44,\t\t505,\t\t0\t\t],\n\t\t[46,\t\t12,\t\t0\t\t],\n\t\t[47,\t\t48,\t\t0\t\t],\n\t\t[49,\t\t50,\t\t0\t\t],\n\t\t[31,\t\t33,\t\t1\t\t],\n\t\t[31,\t\t51,\t\t0\t\t],\n\t\t[52,\t\t53,\t\t1\t\t],\n\t\t[52,\t\t54,\t\t0\t\t],\n\t\t[506,\t\t55,\t\t0\t\t],\n\t\t[506,\t\t507,\t\t1\t\t],\n\t\t[57,\t\t506,\t\t0\t\t],\n\t\t[57,\t\t58,\t\t0\t\t],\n\t\t[58,\t\t506,\t\t0\t\t],\n\t\t[59,\t\t60,\t\t1\t\t],\n\t\t[508,\t\t62,\t\t0\t\t],\n\t\t[30,\t\t61,\t\t1\t\t],\n\t\t[63,\t\t506,\t\t0\t\t],\n\t\t[13,\t\t64,\t\t0\t\t],\n\t\t[65,\t\t66,\t\t1\t\t],\n\t\t[59,\t\t67,\t\t0\t\t],\n\t\t[61,\t\t67,\t\t0\t\t],\n\t\t[68,\t\t69,\t\t1\t\t],\n\t\t[70,\t\t69,\t\t1\t\t],\n\t\t[71,\t\t72,\t\t1\t\t],\n\t\t[73,\t\t74,\t\t1\t\t],\n\t\t[37,\t\t75,\t\t1\t\t],\n\t\t[72,\t\t75,\t\t0\t\t],\n\t\t[37,\t\t72,\t\t1\t\t],\n\t\t[76,\t\t77,\t\t1\t\t],\n\t\t[77,\t\t51,\t\t0\t\t],\n\t\t[73,\t\t72,\t\t1\t\t],\n\t\t[18,\t\t40,\t\t1\t\t],\n\t\t[492,\t\t45,\t\t1\t\t],\n\t\t[10,\t\t74,\t\t1\t\t],\n\t\t[45,\t\t511,\t\t1\t\t],\n\t\t[78,\t\t32,\t\t1\t\t],\n\t\t[79,\t\t80,\t\t0\t\t],\n\t\t[81,\t\t79,\t\t1\t\t],\n\t\t[34,\t\t82,\t\t0\t\t],\n\t\t[83,\t\t84,\t\t0\t\t],\n\t\t[83,\t\t499,\t\t0\t\t],\n\t\t[85,\t\t86,\t\t0\t\t],\n\t\t[87,\t\t86,\t\t1\t\t],\n\t\t[88,\t\t89,\t\t0\t\t],\n\t\t[90,\t\t86,\t\t1\t\t],\n\t\t[91,\t\t86,\t\t0\t\t],\n\t\t[86,\t\t92,\t\t0\t\t],\n\t\t[86,\t\t93,\t\t0\t\t],\n\t\t[94,\t\t86,\t\t1\t\t],\n\t\t[86,\t\t95,\t\t1\t\t],\n\t\t[513,\t\t517,\t\t0\t\t],\n\t\t[97,\t\t66,\t\t1\t\t],\n\t\t[42,\t\t98,\t\t0\t\t],\n\t\t[99,\t\t100,\t\t1\t\t],\n\t\t[42,\t\t101,\t\t0\t\t],\n\t\t[102,\t\t42,\t\t1\t\t],\n\t\t[103,\t\t87,\t\t0\t\t],\n\t\t[104,\t\t103,\t\t0\t\t],\n\t\t[105,\t\t87,\t\t0\t\t],\n\t\t[106,\t\t107,\t\t0\t\t],\n\t\t[108,\t\t107,\t\t0\t\t],\n\t\t[109,\t\t106,\t\t0\t\t],\n\t\t[110,\t\t111,\t\t1\t\t],\n\t\t[87,\t\t112,\t\t0\t\t],\n\t\t[113,\t\t87,\t\t0\t\t],\n\t\t[87,\t\t85,\t\t1\t\t],\n\t\t[110,\t\t114,\t\t1\t\t],\n\t\t[115,\t\t116,\t\t0\t\t],\n\t\t[117,\t\t118,\t\t0\t\t],\n\t\t[117,\t\t119,\t\t0\t\t],\n\t\t[117,\t\t120,\t\t1\t\t],\n\t\t[121,\t\t122,\t\t0\t\t],\n\t\t[123,\t\t124,\t\t0\t\t],\n\t\t[125,\t\t126,\t\t0\t\t],\n\t\t[127,\t\t119,\t\t0\t\t],\n\t\t[118,\t\t128,\t\t0\t\t],\n\t\t[121,\t\t119,\t\t0\t\t],\n\t\t[530,\t\t527,\t\t0\t\t],\n\t\t[125,\t\t130,\t\t0\t\t],\n\t\t[125,\t\t123,\t\t0\t\t],\n\t\t[131,\t\t132,\t\t0\t\t],\n\t\t[133,\t\t123,\t\t0\t\t],\n\t\t[524,\t\t134,\t\t0\t\t],\n\t\t[135,\t\t136,\t\t0\t\t],\n\t\t[123,\t\t131,\t\t0\t\t],\n\t\t[117,\t\t128,\t\t1\t\t],\n\t\t[137,\t\t521,\t\t0\t\t],\n\t\t[531,\t\t514,\t\t0\t\t],\n\t\t[139,\t\t521,\t\t0\t\t],\n\t\t[140,\t\t514,\t\t0\t\t],\n\t\t[522,\t\t141,\t\t0\t\t],\n\t\t[142,\t\t523,\t\t0\t\t],\n\t\t[530,\t\t526,\t\t0\t\t],\n\t\t[140,\t\t532,\t\t0\t\t],\n\t\t[142,\t\t144,\t\t0\t\t],\n\t\t[140,\t\t522,\t\t0\t\t],\n\t\t[145,\t\t146,\t\t0\t\t],\n\t\t[147,\t\t523,\t\t0\t\t],\n\t\t[144,\t\t523,\t\t0\t\t],\n\t\t[139,\t\t523,\t\t0\t\t],\n\t\t[140,\t\t141,\t\t0\t\t],\n\t\t[528,\t\t526,\t\t0\t\t],\n\t\t[528,\t\t148,\t\t0\t\t],\n\t\t[149,\t\t150,\t\t0\t\t],\n\t\t[145,\t\t528,\t\t0\t\t],\n\t\t[530,\t\t151,\t\t0\t\t],\n\t\t[524,\t\t152,\t\t0\t\t],\n\t\t[149,\t\t525,\t\t1\t\t],\n\t\t[139,\t\t514,\t\t0\t\t],\n\t\t[126,\t\t120,\t\t1\t\t],\n\t\t[530,\t\t153,\t\t0\t\t],\n\t\t[528,\t\t147,\t\t1\t\t],\n\t\t[528,\t\t154,\t\t0\t\t],\n\t\t[130,\t\t120,\t\t1\t\t],\n\t\t[528,\t\t155,\t\t1\t\t],\n\t\t[524,\t\t533,\t\t0\t\t],\n\t\t[524,\t\t149,\t\t0\t\t],\n\t\t[154,\t\t150,\t\t0\t\t],\n\t\t[157,\t\t110,\t\t1\t\t],\n\t\t[119,\t\t158,\t\t0\t\t],\n\t\t[159,\t\t60,\t\t0\t\t],\n\t\t[536,\t\t161,\t\t0\t\t],\n\t\t[115,\t\t151,\t\t0\t\t],\n\t\t[162,\t\t134,\t\t0\t\t],\n\t\t[115,\t\t526,\t\t0\t\t],\n\t\t[138,\t\t87,\t\t0\t\t],\n\t\t[123,\t\t163,\t\t0\t\t],\n\t\t[112,\t\t164,\t\t0\t\t],\n\t\t[112,\t\t165,\t\t0\t\t],\n\t\t[166,\t\t165,\t\t0\t\t],\n\t\t[167,\t\t537,\t\t0\t\t],\n\t\t[168,\t\t104,\t\t0\t\t],\n\t\t[531,\t\t520,\t\t0\t\t],\n\t\t[139,\t\t520,\t\t0\t\t],\n\t\t[520,\t\t169,\t\t0\t\t],\n\t\t[168,\t\t105,\t\t0\t\t],\n\t\t[520,\t\t170,\t\t0\t\t],\n\t\t[171,\t\t89,\t\t0\t\t],\n\t\t[521,\t\t172,\t\t0\t\t],\n\t\t[123,\t\t173,\t\t0\t\t],\n\t\t[521,\t\t174,\t\t0\t\t],\n\t\t[37,\t\t39,\t\t0\t\t],\n\t\t[530,\t\t175,\t\t0\t\t],\n\t\t[530,\t\t176,\t\t0\t\t],\n\t\t[88,\t\t530,\t\t0\t\t],\n\t\t[177,\t\t496,\t\t1\t\t],\n\t\t[178,\t\t525,\t\t0\t\t],\n\t\t[179,\t\t493,\t\t1\t\t],\n\t\t[180,\t\t181,\t\t1\t\t],\n\t\t[182,\t\t180,\t\t0\t\t],\n\t\t[179,\t\t181,\t\t0\t\t],\n\t\t[180,\t\t493,\t\t1\t\t],\n\t\t[183,\t\t30,\t\t0\t\t],\n\t\t[183,\t\t21,\t\t0\t\t],\n\t\t[538,\t\t185,\t\t0\t\t],\n\t\t[538,\t\t89,\t\t0\t\t],\n\t\t[184,\t\t186,\t\t0\t\t],\n\t\t[184,\t\t187,\t\t0\t\t],\n\t\t[520,\t\t172,\t\t0\t\t],\n\t\t[89,\t\t175,\t\t0\t\t],\n\t\t[185,\t\t89,\t\t0\t\t],\n\t\t[89,\t\t188,\t\t0\t\t],\n\t\t[189,\t\t190,\t\t0\t\t],\n\t\t[539,\t\t172,\t\t0\t\t],\n\t\t[504,\t\t192,\t\t0\t\t],\n\t\t[105,\t\t186,\t\t0\t\t],\n\t\t[105,\t\t187,\t\t0\t\t],\n\t\t[539,\t\t193,\t\t0\t\t],\n\t\t[187,\t\t194,\t\t0\t\t],\n\t\t[539,\t\t540,\t\t0\t\t],\n\t\t[539,\t\t196,\t\t0\t\t],\n\t\t[197,\t\t540,\t\t0\t\t],\n\t\t[110,\t\t198,\t\t0\t\t],\n\t\t[197,\t\t539,\t\t0\t\t],\n\t\t[199,\t\t537,\t\t0\t\t],\n\t\t[134,\t\t526,\t\t0\t\t],\n\t\t[200,\t\t193,\t\t0\t\t],\n\t\t[4,\t\t201,\t\t1\t\t],\n\t\t[202,\t\t86,\t\t0\t\t],\n\t\t[85,\t\t203,\t\t0\t\t],\n\t\t[147,\t\t204,\t\t0\t\t],\n\t\t[147,\t\t205,\t\t0\t\t],\n\t\t[123,\t\t206,\t\t0\t\t],\n\t\t[537,\t\t207,\t\t0\t\t],\n\t\t[165,\t\t208,\t\t0\t\t],\n\t\t[4,\t\t94,\t\t1\t\t],\n\t\t[4,\t\t2,\t\t0\t\t],\n\t\t[209,\t\t4,\t\t0\t\t],\n\t\t[119,\t\t163,\t\t0\t\t],\n\t\t[210,\t\t3,\t\t0\t\t],\n\t\t[99,\t\t211,\t\t0\t\t],\n\t\t[99,\t\t69,\t\t1\t\t],\n\t\t[212,\t\t99,\t\t0\t\t],\n\t\t[213,\t\t214,\t\t0\t\t],\n\t\t[510,\t\t215,\t\t0\t\t],\n\t\t[128,\t\t69,\t\t1\t\t],\n\t\t[216,\t\t69,\t\t1\t\t],\n\t\t[217,\t\t98,\t\t0\t\t],\n\t\t[504,\t\t218,\t\t0\t\t],\n\t\t[177,\t\t504,\t\t1\t\t],\n\t\t[219,\t\t209,\t\t0\t\t],\n\t\t[219,\t\t220,\t\t0\t\t],\n\t\t[94,\t\t95,\t\t1\t\t],\n\t\t[159,\t\t221,\t\t1\t\t],\n\t\t[34,\t\t161,\t\t0\t\t],\n\t\t[222,\t\t221,\t\t0\t\t],\n\t\t[211,\t\t52,\t\t1\t\t],\n\t\t[215,\t\t223,\t\t1\t\t],\n\t\t[224,\t\t215,\t\t0\t\t],\n\t\t[225,\t\t224,\t\t1\t\t],\n\t\t[224,\t\t223,\t\t0\t\t],\n\t\t[226,\t\t6,\t\t0\t\t],\n\t\t[7,\t\t3,\t\t1\t\t],\n\t\t[216,\t\t227,\t\t1\t\t],\n\t\t[228,\t\t229,\t\t0\t\t],\n\t\t[227,\t\t230,\t\t0\t\t],\n\t\t[231,\t\t53,\t\t1\t\t],\n\t\t[544,\t\t545,\t\t0\t\t],\n\t\t[234,\t\t235,\t\t1\t\t],\n\t\t[546,\t\t214,\t\t1\t\t],\n\t\t[233,\t\t227,\t\t0\t\t],\n\t\t[237,\t\t238,\t\t0\t\t],\n\t\t[212,\t\t100,\t\t0\t\t],\n\t\t[519,\t\t239,\t\t0\t\t],\n\t\t[238,\t\t519,\t\t0\t\t],\n\t\t[213,\t\t240,\t\t0\t\t],\n\t\t[241,\t\t242,\t\t1\t\t],\n\t\t[70,\t\t241,\t\t0\t\t],\n\t\t[509,\t\t213,\t\t0\t\t],\n\t\t[68,\t\t243,\t\t0\t\t],\n\t\t[243,\t\t244,\t\t0\t\t],\n\t\t[68,\t\t244,\t\t0\t\t],\n\t\t[544,\t\t547,\t\t1\t\t],\n\t\t[245,\t\t227,\t\t1\t\t],\n\t\t[246,\t\t208,\t\t0\t\t],\n\t\t[112,\t\t208,\t\t0\t\t],\n\t\t[165,\t\t247,\t\t0\t\t],\n\t\t[537,\t\t549,\t\t0\t\t],\n\t\t[537,\t\t550,\t\t0\t\t],\n\t\t[537,\t\t551,\t\t0\t\t],\n\t\t[110,\t\t251,\t\t0\t\t],\n\t\t[510,\t\t252,\t\t1\t\t],\n\t\t[529,\t\t253,\t\t1\t\t],\n\t\t[237,\t\t239,\t\t1\t\t],\n\t\t[254,\t\t238,\t\t1\t\t],\n\t\t[69,\t\t255,\t\t0\t\t],\n\t\t[510,\t\t225,\t\t1\t\t],\n\t\t[256,\t\t257,\t\t0\t\t],\n\t\t[258,\t\t190,\t\t0\t\t],\n\t\t[258,\t\t259,\t\t0\t\t],\n\t\t[260,\t\t261,\t\t1\t\t],\n\t\t[554,\t\t553,\t\t1\t\t],\n\t\t[515,\t\t263,\t\t0\t\t],\n\t\t[14,\t\t264,\t\t1\t\t],\n\t\t[116,\t\t555,\t\t0\t\t],\n\t\t[151,\t\t116,\t\t0\t\t],\n\t\t[111,\t\t114,\t\t1\t\t],\n\t\t[77,\t\t111,\t\t0\t\t],\n\t\t[266,\t\t525,\t\t0\t\t],\n\t\t[267,\t\t120,\t\t1\t\t],\n\t\t[268,\t\t269,\t\t0\t\t],\n\t\t[556,\t\t271,\t\t0\t\t],\n\t\t[556,\t\t272,\t\t0\t\t],\n\t\t[529,\t\t273,\t\t0\t\t],\n\t\t[128,\t\t274,\t\t0\t\t],\n\t\t[34,\t\t275,\t\t0\t\t],\n\t\t[503,\t\t276,\t\t0\t\t],\n\t\t[503,\t\t504,\t\t1\t\t],\n\t\t[177,\t\t218,\t\t1\t\t],\n\t\t[277,\t\t278,\t\t1\t\t],\n\t\t[557,\t\t558,\t\t1\t\t],\n\t\t[557,\t\t559,\t\t1\t\t],\n\t\t[559,\t\t558,\t\t1\t\t],\n\t\t[277,\t\t78,\t\t1\t\t],\n\t\t[277,\t\t279,\t\t1\t\t],\n\t\t[78,\t\t279,\t\t0\t\t],\n\t\t[281,\t\t282,\t\t0\t\t],\n\t\t[283,\t\t161,\t\t1\t\t],\n\t\t[268,\t\t161,\t\t1\t\t],\n\t\t[256,\t\t284,\t\t0\t\t],\n\t\t[515,\t\t516,\t\t1\t\t],\n\t\t[263,\t\t516,\t\t0\t\t],\n\t\t[516,\t\t285,\t\t0\t\t],\n\t\t[63,\t\t286,\t\t0\t\t],\n\t\t[287,\t\t516,\t\t0\t\t],\n\t\t[8,\t\t102,\t\t1\t\t],\n\t\t[8,\t\t101,\t\t1\t\t],\n\t\t[80,\t\t288,\t\t0\t\t],\n\t\t[80,\t\t289,\t\t0\t\t],\n\t\t[276,\t\t560,\t\t0\t\t],\n\t\t[37,\t\t290,\t\t0\t\t],\n\t\t[290,\t\t74,\t\t1\t\t],\n\t\t[512,\t\t291,\t\t0\t\t],\n\t\t[78,\t\t292,\t\t1\t\t],\n\t\t[199,\t\t548,\t\t0\t\t],\n\t\t[491,\t\t293,\t\t0\t\t],\n\t\t[4,\t\t294,\t\t0\t\t],\n\t\t[490,\t\t541,\t\t1\t\t],\n\t\t[491,\t\t295,\t\t0\t\t],\n\t\t[491,\t\t296,\t\t0\t\t],\n\t\t[295,\t\t297,\t\t0\t\t],\n\t\t[508,\t\t161,\t\t0\t\t],\n\t\t[117,\t\t123,\t\t0\t\t],\n\t\t[133,\t\t117,\t\t0\t\t],\n\t\t[71,\t\t74,\t\t1\t\t],\n\t\t[74,\t\t278,\t\t1\t\t],\n\t\t[298,\t\t515,\t\t0\t\t],\n\t\t[5,\t\t299,\t\t0\t\t],\n\t\t[32,\t\t292,\t\t1\t\t],\n\t\t[5,\t\t29,\t\t1\t\t],\n\t\t[503,\t\t560,\t\t0\t\t],\n\t\t[300,\t\t301,\t\t1\t\t],\n\t\t[51,\t\t300,\t\t0\t\t],\n\t\t[244,\t\t302,\t\t1\t\t],\n\t\t[31,\t\t302,\t\t1\t\t],\n\t\t[51,\t\t282,\t\t1\t\t],\n\t\t[303,\t\t304,\t\t0\t\t],\n\t\t[305,\t\t304,\t\t0\t\t],\n\t\t[305,\t\t259,\t\t0\t\t],\n\t\t[306,\t\t307,\t\t1\t\t],\n\t\t[305,\t\t308,\t\t0\t\t],\n\t\t[305,\t\t309,\t\t0\t\t],\n\t\t[310,\t\t309,\t\t1\t\t],\n\t\t[306,\t\t309,\t\t1\t\t],\n\t\t[311,\t\t280,\t\t0\t\t],\n\t\t[280,\t\t278,\t\t1\t\t],\n\t\t[311,\t\t32,\t\t1\t\t],\n\t\t[13,\t\t312,\t\t1\t\t],\n\t\t[313,\t\t314,\t\t0\t\t],\n\t\t[312,\t\t313,\t\t1\t\t],\n\t\t[547,\t\t566,\t\t1\t\t],\n\t\t[245,\t\t315,\t\t1\t\t],\n\t\t[312,\t\t316,\t\t0\t\t],\n\t\t[312,\t\t314,\t\t0\t\t],\n\t\t[554,\t\t546,\t\t1\t\t],\n\t\t[262,\t\t216,\t\t1\t\t],\n\t\t[317,\t\t233,\t\t0\t\t],\n\t\t[318,\t\t317,\t\t0\t\t],\n\t\t[231,\t\t52,\t\t1\t\t],\n\t\t[319,\t\t567,\t\t0\t\t],\n\t\t[557,\t\t321,\t\t0\t\t],\n\t\t[277,\t\t65,\t\t1\t\t],\n\t\t[322,\t\t288,\t\t1\t\t],\n\t\t[322,\t\t323,\t\t0\t\t],\n\t\t[277,\t\t324,\t\t1\t\t],\n\t\t[324,\t\t325,\t\t0\t\t],\n\t\t[277,\t\t325,\t\t0\t\t],\n\t\t[326,\t\t327,\t\t0\t\t],\n\t\t[328,\t\t326,\t\t1\t\t],\n\t\t[328,\t\t327,\t\t1\t\t],\n\t\t[326,\t\t329,\t\t0\t\t],\n\t\t[568,\t\t329,\t\t1\t\t],\n\t\t[568,\t\t326,\t\t0\t\t],\n\t\t[332,\t\t78,\t\t1\t\t],\n\t\t[333,\t\t306,\t\t0\t\t],\n\t\t[332,\t\t333,\t\t0\t\t],\n\t\t[332,\t\t334,\t\t0\t\t],\n\t\t[66,\t\t334,\t\t1\t\t],\n\t\t[330,\t\t335,\t\t1\t\t],\n\t\t[336,\t\t66,\t\t0\t\t],\n\t\t[330,\t\t336,\t\t1\t\t],\n\t\t[68,\t\t70,\t\t0\t\t],\n\t\t[509,\t\t337,\t\t1\t\t],\n\t\t[324,\t\t288,\t\t0\t\t],\n\t\t[338,\t\t559,\t\t0\t\t],\n\t\t[339,\t\t559,\t\t0\t\t],\n\t\t[339,\t\t340,\t\t1\t\t],\n\t\t[559,\t\t340,\t\t1\t\t],\n\t\t[341,\t\t292,\t\t0\t\t],\n\t\t[557,\t\t342,\t\t0\t\t],\n\t\t[558,\t\t343,\t\t0\t\t],\n\t\t[502,\t\t340,\t\t1\t\t],\n\t\t[72,\t\t32,\t\t1\t\t],\n\t\t[344,\t\t345,\t\t0\t\t],\n\t\t[346,\t\t47,\t\t0\t\t],\n\t\t[46,\t\t47,\t\t0\t\t],\n\t\t[346,\t\t345,\t\t0\t\t],\n\t\t[347,\t\t328,\t\t0\t\t],\n\t\t[347,\t\t348,\t\t1\t\t],\n\t\t[571,\t\t348,\t\t1\t\t],\n\t\t[347,\t\t572,\t\t0\t\t],\n\t\t[571,\t\t570,\t\t1\t\t],\n\t\t[14,\t\t350,\t\t0\t\t],\n\t\t[350,\t\t573,\t\t0\t\t],\n\t\t[15,\t\t351,\t\t1\t\t],\n\t\t[352,\t\t15,\t\t0\t\t],\n\t\t[15,\t\t335,\t\t1\t\t],\n\t\t[232,\t\t227,\t\t0\t\t],\n\t\t[565,\t\t544,\t\t1\t\t],\n\t\t[235,\t\t567,\t\t1\t\t],\n\t\t[567,\t\t286,\t\t0\t\t],\n\t\t[353,\t\t519,\t\t0\t\t],\n\t\t[354,\t\t353,\t\t0\t\t],\n\t\t[355,\t\t354,\t\t0\t\t],\n\t\t[354,\t\t356,\t\t0\t\t],\n\t\t[357,\t\t358,\t\t0\t\t],\n\t\t[574,\t\t359,\t\t0\t\t],\n\t\t[235,\t\t575,\t\t0\t\t],\n\t\t[167,\t\t361,\t\t0\t\t],\n\t\t[528,\t\t362,\t\t0\t\t],\n\t\t[363,\t\t344,\t\t0\t\t],\n\t\t[259,\t\t364,\t\t1\t\t],\n\t\t[54,\t\t56,\t\t0\t\t],\n\t\t[365,\t\t364,\t\t0\t\t],\n\t\t[231,\t\t366,\t\t0\t\t],\n\t\t[30,\t\t367,\t\t0\t\t],\n\t\t[61,\t\t367,\t\t1\t\t],\n\t\t[254,\t\t368,\t\t0\t\t],\n\t\t[254,\t\t369,\t\t0\t\t],\n\t\t[254,\t\t370,\t\t0\t\t],\n\t\t[99,\t\t358,\t\t0\t\t],\n\t\t[354,\t\t519,\t\t0\t\t],\n\t\t[571,\t\t371,\t\t0\t\t],\n\t\t[207,\t\t372,\t\t0\t\t],\n\t\t[57,\t\t373,\t\t0\t\t],\n\t\t[209,\t\t374,\t\t0\t\t],\n\t\t[375,\t\t376,\t\t0\t\t],\n\t\t[376,\t\t377,\t\t0\t\t],\n\t\t[16,\t\t49,\t\t0\t\t],\n\t\t[318,\t\t377,\t\t0\t\t],\n\t\t[378,\t\t297,\t\t0\t\t],\n\t\t[562,\t\t379,\t\t0\t\t],\n\t\t[576,\t\t563,\t\t0\t\t],\n\t\t[576,\t\t381,\t\t0\t\t],\n\t\t[577,\t\t576,\t\t1\t\t],\n\t\t[244,\t\t383,\t\t0\t\t],\n\t\t[244,\t\t306,\t\t1\t\t],\n\t\t[383,\t\t306,\t\t1\t\t],\n\t\t[380,\t\t306,\t\t0\t\t],\n\t\t[252,\t\t225,\t\t0\t\t],\n\t\t[220,\t\t76,\t\t0\t\t],\n\t\t[542,\t\t384,\t\t0\t\t],\n\t\t[385,\t\t384,\t\t0\t\t],\n\t\t[542,\t\t385,\t\t0\t\t],\n\t\t[386,\t\t385,\t\t0\t\t],\n\t\t[387,\t\t578,\t\t0\t\t],\n\t\t[332,\t\t388,\t\t1\t\t],\n\t\t[382,\t\t332,\t\t1\t\t],\n\t\t[382,\t\t388,\t\t0\t\t],\n\t\t[579,\t\t578,\t\t0\t\t],\n\t\t[577,\t\t387,\t\t1\t\t],\n\t\t[144,\t\t390,\t\t0\t\t],\n\t\t[37,\t\t49,\t\t0\t\t],\n\t\t[391,\t\t233,\t\t0\t\t],\n\t\t[392,\t\t310,\t\t0\t\t],\n\t\t[260,\t\t393,\t\t0\t\t],\n\t\t[394,\t\t230,\t\t0\t\t],\n\t\t[395,\t\t282,\t\t1\t\t],\n\t\t[395,\t\t244,\t\t0\t\t],\n\t\t[25,\t\t396,\t\t1\t\t],\n\t\t[81,\t\t74,\t\t0\t\t],\n\t\t[278,\t\t80,\t\t1\t\t],\n\t\t[81,\t\t278,\t\t1\t\t],\n\t\t[569,\t\t570,\t\t0\t\t],\n\t\t[397,\t\t552,\t\t0\t\t],\n\t\t[542,\t\t398,\t\t0\t\t],\n\t\t[398,\t\t385,\t\t0\t\t],\n\t\t[399,\t\t499,\t\t0\t\t],\n\t\t[83,\t\t399,\t\t0\t\t],\n\t\t[498,\t\t400,\t\t0\t\t],\n\t\t[518,\t\t239,\t\t1\t\t],\n\t\t[575,\t\t543,\t\t0\t\t],\n\t\t[401,\t\t360,\t\t0\t\t],\n\t\t[580,\t\t581,\t\t0\t\t],\n\t\t[401,\t\t402,\t\t0\t\t],\n\t\t[403,\t\t231,\t\t0\t\t],\n\t\t[189,\t\t360,\t\t1\t\t],\n\t\t[234,\t\t404,\t\t0\t\t],\n\t\t[235,\t\t404,\t\t1\t\t],\n\t\t[235,\t\t580,\t\t0\t\t],\n\t\t[216,\t\t259,\t\t0\t\t],\n\t\t[405,\t\t259,\t\t0\t\t],\n\t\t[405,\t\t318,\t\t0\t\t],\n\t\t[406,\t\t230,\t\t0\t\t],\n\t\t[542,\t\t407,\t\t0\t\t],\n\t\t[23,\t\t408,\t\t0\t\t],\n\t\t[577,\t\t348,\t\t0\t\t],\n\t\t[562,\t\t564,\t\t1\t\t],\n\t\t[582,\t\t507,\t\t0\t\t],\n\t\t[27,\t\t410,\t\t0\t\t],\n\t\t[501,\t\t27,\t\t0\t\t],\n\t\t[27,\t\t411,\t\t0\t\t],\n\t\t[411,\t\t410,\t\t0\t\t],\n\t\t[403,\t\t360,\t\t0\t\t],\n\t\t[412,\t\t360,\t\t0\t\t],\n\t\t[326,\t\t413,\t\t0\t\t],\n\t\t[414,\t\t413,\t\t0\t\t],\n\t\t[6,\t\t297,\t\t0\t\t],\n\t\t[554,\t\t580,\t\t1\t\t],\n\t\t[262,\t\t401,\t\t1\t\t],\n\t\t[499,\t\t556,\t\t1\t\t],\n\t\t[224,\t\t229,\t\t0\t\t],\n\t\t[583,\t\t507,\t\t0\t\t],\n\t\t[415,\t\t307,\t\t0\t\t],\n\t\t[416,\t\t507,\t\t0\t\t],\n\t\t[284,\t\t561,\t\t0\t\t],\n\t\t[543,\t\t417,\t\t0\t\t],\n\t\t[418,\t\t506,\t\t0\t\t],\n\t\t[220,\t\t157,\t\t0\t\t],\n\t\t[295,\t\t419,\t\t0\t\t],\n\t\t[295,\t\t420,\t\t0\t\t],\n\t\t[541,\t\t62,\t\t0\t\t],\n\t\t[52,\t\t421,\t\t0\t\t],\n\t\t[60,\t\t160,\t\t0\t\t],\n\t\t[535,\t\t161,\t\t0\t\t],\n\t\t[267,\t\t282,\t\t0\t\t],\n\t\t[52,\t\t365,\t\t0\t\t],\n\t\t[28,\t\t27,\t\t0\t\t],\n\t\t[30,\t\t201,\t\t1\t\t],\n\t\t[422,\t\t81,\t\t0\t\t],\n\t\t[119,\t\t425,\t\t0\t\t],\n\t\t[423,\t\t425,\t\t0\t\t],\n\t\t[424,\t\t425,\t\t0\t\t],\n\t\t[426,\t\t428,\t\t0\t\t],\n\t\t[427,\t\t428,\t\t0\t\t],\n\t\t[19,\t\t428,\t\t1\t\t],\n\t\t[45,\t\t429,\t\t0\t\t],\n\t\t[44,\t\t429,\t\t0\t\t],\n\t\t[505,\t\t429,\t\t0\t\t],\n\t\t[231,\t\t431,\t\t1\t\t],\n\t\t[190,\t\t431,\t\t1\t\t],\n\t\t[430,\t\t431,\t\t0\t\t],\n\t\t[286,\t\t433,\t\t0\t\t],\n\t\t[432,\t\t433,\t\t0\t\t],\n\t\t[506,\t\t433,\t\t0\t\t],\n\t\t[23,\t\t434,\t\t0\t\t],\n\t\t[400,\t\t434,\t\t0\t\t],\n\t\t[500,\t\t434,\t\t0\t\t],\n\t\t[32,\t\t436,\t\t0\t\t],\n\t\t[435,\t\t436,\t\t0\t\t],\n\t\t[78,\t\t436,\t\t1\t\t],\n\t\t[86,\t\t438,\t\t1\t\t],\n\t\t[437,\t\t438,\t\t0\t\t],\n\t\t[221,\t\t438,\t\t0\t\t],\n\t\t[207,\t\t439,\t\t0\t\t],\n\t\t[516,\t\t439,\t\t0\t\t],\n\t\t[513,\t\t439,\t\t0\t\t],\n\t\t[181,\t\t441,\t\t1\t\t],\n\t\t[440,\t\t441,\t\t0\t\t],\n\t\t[504,\t\t441,\t\t1\t\t],\n\t\t[135,\t\t442,\t\t0\t\t],\n\t\t[109,\t\t442,\t\t0\t\t],\n\t\t[112,\t\t442,\t\t0\t\t],\n\t\t[113,\t\t443,\t\t0\t\t],\n\t\t[132,\t\t443,\t\t0\t\t],\n\t\t[107,\t\t443,\t\t0\t\t],\n\t\t[444,\t\t445,\t\t0\t\t],\n\t\t[112,\t\t445,\t\t0\t\t],\n\t\t[109,\t\t445,\t\t0\t\t],\n\t\t[119,\t\t447,\t\t1\t\t],\n\t\t[100,\t\t447,\t\t1\t\t],\n\t\t[446,\t\t447,\t\t0\t\t],\n\t\t[124,\t\t448,\t\t0\t\t],\n\t\t[125,\t\t448,\t\t0\t\t],\n\t\t[131,\t\t448,\t\t0\t\t],\n\t\t[449,\t\t450,\t\t0\t\t],\n\t\t[173,\t\t450,\t\t0\t\t],\n\t\t[184,\t\t450,\t\t0\t\t],\n\t\t[144,\t\t451,\t\t0\t\t],\n\t\t[140,\t\t451,\t\t0\t\t],\n\t\t[514,\t\t451,\t\t0\t\t],\n\t\t[537,\t\t585,\t\t1\t\t],\n\t\t[141,\t\t585,\t\t0\t\t],\n\t\t[584,\t\t585,\t\t0\t\t],\n\t\t[522,\t\t454,\t\t0\t\t],\n\t\t[144,\t\t454,\t\t0\t\t],\n\t\t[453,\t\t454,\t\t0\t\t],\n\t\t[199,\t\t456,\t\t0\t\t],\n\t\t[140,\t\t456,\t\t0\t\t],\n\t\t[455,\t\t456,\t\t0\t\t],\n\t\t[537,\t\t456,\t\t0\t\t],\n\t\t[538,\t\t457,\t\t0\t\t],\n\t\t[153,\t\t457,\t\t0\t\t],\n\t\t[176,\t\t457,\t\t0\t\t],\n\t\t[524,\t\t459,\t\t0\t\t],\n\t\t[458,\t\t459,\t\t0\t\t],\n\t\t[134,\t\t459,\t\t0\t\t],\n\t\t[460,\t\t461,\t\t0\t\t],\n\t\t[150,\t\t461,\t\t0\t\t],\n\t\t[149,\t\t461,\t\t0\t\t],\n\t\t[521,\t\t463,\t\t0\t\t],\n\t\t[462,\t\t463,\t\t0\t\t],\n\t\t[538,\t\t463,\t\t0\t\t],\n\t\t[110,\t\t464,\t\t0\t\t],\n\t\t[90,\t\t464,\t\t0\t\t],\n\t\t[165,\t\t464,\t\t0\t\t],\n\t\t[458,\t\t465,\t\t0\t\t],\n\t\t[134,\t\t465,\t\t0\t\t],\n\t\t[524,\t\t465,\t\t0\t\t],\n\t\t[466,\t\t467,\t\t0\t\t],\n\t\t[110,\t\t467,\t\t0\t\t],\n\t\t[165,\t\t467,\t\t0\t\t],\n\t\t[468,\t\t469,\t\t0\t\t],\n\t\t[541,\t\t469,\t\t0\t\t],\n\t\t[490,\t\t469,\t\t0\t\t],\n\t\t[263,\t\t471,\t\t0\t\t],\n\t\t[470,\t\t471,\t\t0\t\t],\n\t\t[534,\t\t471,\t\t0\t\t],\n\t\t[136,\t\t472,\t\t0\t\t],\n\t\t[110,\t\t472,\t\t0\t\t],\n\t\t[251,\t\t472,\t\t0\t\t],\n\t\t[226,\t\t474,\t\t0\t\t],\n\t\t[473,\t\t474,\t\t0\t\t],\n\t\t[257,\t\t474,\t\t0\t\t],\n\t\t[6,\t\t474,\t\t1\t\t],\n\t\t[299,\t\t475,\t\t1\t\t],\n\t\t[3,\t\t475,\t\t0\t\t],\n\t\t[210,\t\t475,\t\t0\t\t],\n\t\t[297,\t\t476,\t\t0\t\t],\n\t\t[296,\t\t476,\t\t0\t\t],\n\t\t[295,\t\t476,\t\t0\t\t],\n\t\t[313,\t\t478,\t\t1\t\t],\n\t\t[477,\t\t478,\t\t0\t\t],\n\t\t[245,\t\t478,\t\t0\t\t],\n\t\t[479,\t\t481,\t\t0\t\t],\n\t\t[565,\t\t481,\t\t0\t\t],\n\t\t[480,\t\t481,\t\t0\t\t],\n\t\t[415,\t\t482,\t\t0\t\t],\n\t\t[56,\t\t482,\t\t0\t\t],\n\t\t[409,\t\t482,\t\t0\t\t],\n\t\t[483,\t\t484,\t\t0\t\t],\n\t\t[3,\t\t484,\t\t0\t\t],\n\t\t[301,\t\t484,\t\t0\t\t],\n\t\t[233,\t\t485,\t\t0\t\t],\n\t\t[392,\t\t485,\t\t0\t\t],\n\t\t[391,\t\t485,\t\t0\t\t],\n\t\t[579,\t\t488,\t\t0\t\t],\n\t\t[486,\t\t488,\t\t0\t\t],\n\t\t[487,\t\t488,\t\t0\t\t],\n\t\t[270,\t\t489,\t\t0\t\t],\n\t\t[331,\t\t489,\t\t0\t\t],\n\t\t[396,\t\t489,\t\t1\t\t],\n\t\t[519,\t\t253,\t\t0\t\t],\n\t\t[382,\t\t349,\t\t1\t\t],\n\t\t[349,\t\t351,\t\t0\t\t],\n\t\t[459,\t\t465,\t\t0\t\t],\n\t\t[549,\t\t550,\t\t0\t\t],\n\t\t[550,\t\t551,\t\t0\t\t],\n\t\t[194,\t\t195,\t\t0\t\t],\n\t\t[247,\t\t248,\t\t0\t\t],\n\t\t[2,\t\t294,\t\t0\t\t],\n\t\t[549,\t\t551,\t\t0\t\t],\n\t\t[54,\t\t365,\t\t0\t\t],\n\t\t[131,\t\t265,\t\t0\t\t],\n\t\t[91,\t\t92,\t\t0\t\t],\n\t\t[247,\t\t249,\t\t0\t\t],\n\t\t[186,\t\t191,\t\t0\t\t],\n\t\t[129,\t\t173,\t\t0\t\t],\n\t\t[96,\t\t202,\t\t0\t\t],\n\t\t[53,\t\t320,\t\t0\t\t],\n\t\t[24,\t\t396,\t\t0\t\t],\n\t\t[133,\t\t156,\t\t0\t\t],\n\t\t[442,\t\t452,\t\t0\t\t],\n\t\t[445,\t\t452,\t\t0\t\t],\n\t\t[247,\t\t250,\t\t0\t\t],\n\t\t[187,\t\t195,\t\t0\t\t],\n\t\t[216,\t\t236,\t\t0\t\t],\n\t\t[244,\t\t389,\t\t0\t\t],\n\t\t[394,\t\t406,\t\t0\t\t],\n\t\t[442,\t\t445,\t\t0\t\t],\n\t\t[442,\t\t444,\t\t0\t\t],\n\t\t[198,\t\t472,\t\t0\t\t],\n\t\t[464,\t\t467,\t\t0\t\t],\n\t\t[198,\t\t251,\t\t0\t\t],\n\t\t[112,\t\t143,\t\t0\t\t],\n\t\t[2,\t\t490,\t\t0\t\t],\n\t\t[5,\t\t491,\t\t0\t\t],\n\t\t[10,\t\t492,\t\t0\t\t],\n\t\t[12,\t\t493,\t\t0\t\t],\n\t\t[13,\t\t494,\t\t0\t\t],\n\t\t[15,\t\t495,\t\t0\t\t],\n\t\t[18,\t\t496,\t\t0\t\t],\n\t\t[20,\t\t497,\t\t0\t\t],\n\t\t[22,\t\t498,\t\t0\t\t],\n\t\t[24,\t\t499,\t\t0\t\t],\n\t\t[26,\t\t500,\t\t0\t\t],\n\t\t[30,\t\t501,\t\t0\t\t],\n\t\t[32,\t\t502,\t\t0\t\t],\n\t\t[37,\t\t503,\t\t0\t\t],\n\t\t[42,\t\t504,\t\t0\t\t],\n\t\t[46,\t\t505,\t\t0\t\t],\n\t\t[52,\t\t506,\t\t0\t\t],\n\t\t[56,\t\t507,\t\t0\t\t],\n\t\t[61,\t\t508,\t\t0\t\t],\n\t\t[68,\t\t509,\t\t0\t\t],\n\t\t[69,\t\t510,\t\t0\t\t],\n\t\t[74,\t\t511,\t\t0\t\t],\n\t\t[78,\t\t512,\t\t0\t\t],\n\t\t[86,\t\t513,\t\t0\t\t],\n\t\t[87,\t\t514,\t\t0\t\t],\n\t\t[94,\t\t515,\t\t0\t\t],\n\t\t[95,\t\t516,\t\t0\t\t],\n\t\t[96,\t\t517,\t\t0\t\t],\n\t\t[99,\t\t518,\t\t0\t\t],\n\t\t[100,\t\t519,\t\t0\t\t],\n\t\t[104,\t\t520,\t\t0\t\t],\n\t\t[105,\t\t521,\t\t0\t\t],\n\t\t[106,\t\t522,\t\t0\t\t],\n\t\t[107,\t\t523,\t\t0\t\t],\n\t\t[117,\t\t524,\t\t0\t\t],\n\t\t[120,\t\t525,\t\t0\t\t],\n\t\t[123,\t\t526,\t\t0\t\t],\n\t\t[124,\t\t527,\t\t0\t\t],\n\t\t[125,\t\t528,\t\t0\t\t],\n\t\t[128,\t\t529,\t\t0\t\t],\n\t\t[129,\t\t530,\t\t0\t\t],\n\t\t[138,\t\t531,\t\t0\t\t],\n\t\t[143,\t\t532,\t\t0\t\t],\n\t\t[156,\t\t533,\t\t0\t\t],\n\t\t[157,\t\t534,\t\t0\t\t],\n\t\t[159,\t\t535,\t\t0\t\t],\n\t\t[160,\t\t536,\t\t0\t\t],\n\t\t[165,\t\t537,\t\t0\t\t],\n\t\t[184,\t\t538,\t\t0\t\t],\n\t\t[191,\t\t539,\t\t0\t\t],\n\t\t[195,\t\t540,\t\t0\t\t],\n\t\t[201,\t\t541,\t\t0\t\t],\n\t\t[220,\t\t542,\t\t0\t\t],\n\t\t[231,\t\t543,\t\t0\t\t],\n\t\t[232,\t\t544,\t\t0\t\t],\n\t\t[233,\t\t545,\t\t0\t\t],\n\t\t[236,\t\t546,\t\t0\t\t],\n\t\t[245,\t\t547,\t\t0\t\t],\n\t\t[246,\t\t548,\t\t0\t\t],\n\t\t[248,\t\t549,\t\t0\t\t],\n\t\t[249,\t\t550,\t\t0\t\t],\n\t\t[250,\t\t551,\t\t0\t\t],\n\t\t[259,\t\t552,\t\t0\t\t],\n\t\t[261,\t\t553,\t\t0\t\t],\n\t\t[262,\t\t554,\t\t0\t\t],\n\t\t[265,\t\t555,\t\t0\t\t],\n\t\t[270,\t\t556,\t\t0\t\t],\n\t\t[277,\t\t557,\t\t0\t\t],\n\t\t[279,\t\t558,\t\t0\t\t],\n\t\t[280,\t\t559,\t\t0\t\t],\n\t\t[290,\t\t560,\t\t0\t\t],\n\t\t[301,\t\t561,\t\t0\t\t],\n\t\t[305,\t\t562,\t\t0\t\t],\n\t\t[306,\t\t563,\t\t0\t\t],\n\t\t[310,\t\t564,\t\t0\t\t],\n\t\t[313,\t\t565,\t\t0\t\t],\n\t\t[315,\t\t566,\t\t0\t\t],\n\t\t[320,\t\t567,\t\t0\t\t],\n\t\t[330,\t\t568,\t\t0\t\t],\n\t\t[332,\t\t569,\t\t0\t\t],\n\t\t[334,\t\t570,\t\t0\t\t],\n\t\t[336,\t\t571,\t\t0\t\t],\n\t\t[349,\t\t572,\t\t0\t\t],\n\t\t[351,\t\t573,\t\t0\t\t],\n\t\t[358,\t\t574,\t\t0\t\t],\n\t\t[360,\t\t575,\t\t0\t\t],\n\t\t[380,\t\t576,\t\t0\t\t],\n\t\t[382,\t\t577,\t\t0\t\t],\n\t\t[383,\t\t578,\t\t0\t\t],\n\t\t[389,\t\t579,\t\t0\t\t],\n\t\t[401,\t\t580,\t\t0\t\t],\n\t\t[402,\t\t581,\t\t0\t\t],\n\t\t[409,\t\t582,\t\t0\t\t],\n\t\t[415,\t\t583,\t\t0\t\t],\n\t\t[444,\t\t584,\t\t0\t\t],\n\t\t[452,\t\t585,\t\t0\t\t]\n\t])\n\tppc[\"parameters\"] = {\n\t\t\"x_trans_sg\": 0.003, \n\t\t\"x_trans_fm\": 0.001, \n\t\t\"x_trans_fl\": 0.001, \n\t\t\"d_l\": 1e-3, \n\t\t\"d_l_perturb\": 1e-5, \n\t\t\"w_1_ij\": 1, \n\t\t\"w_2_ij\": 1, \n\t\t\"w_3_ij\": 1, \n\t\t\"w_4_ij\": 1, \n\t\t\"b_r\": 238, \n\t\t\"b_c\": 248 }\n\treturn ppc", "from numpy import array\ndef case_ln_101():\n\tppc = {\"version\": '2'}\n\tppc[\"baseMVA\"] = 100.0\n\tppc[\"bus\"] = array([\n\t\t[1.0, 1.0, 37.358, 9.9621, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[3.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[4.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[5.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[6.0, 1.0, 7.4716, 2.7396, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[7.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[8.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[9.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 11.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[10.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 11.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[11.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[12.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[13.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[14.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[15.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[16.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[17.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[18.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[19.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[20.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[21.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[22.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[23.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[24.0, 2.0, 2.4905, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 6.3, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[25.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[26.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[27.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[28.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[29.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[30.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[31.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[32.0, 2.0, 1.7434, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[33.0, 2.0, 1.4943, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[34.0, 2.0, 1.4943, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[35.0, 2.0, 3.4867, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[36.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[37.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[38.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[39.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[40.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[41.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[42.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[43.0, 1.0, 42.2395, 1.584, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[44.0, 1.0, 49.8107, 3.1181, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[45.0, 1.0, 49.8107, 3.1181, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[46.0, 1.0, 49.8107, 3.1181, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[47.0, 1.0, 49.8107, 3.1181, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[48.0, 1.0, 24.9053, 3.2427, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[49.0, 1.0, 51.8031, 3.2427, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[50.0, 1.0, 10.3606, 3.2427, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[51.0, 1.0, 29.8864, 9.9621, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[52.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[53.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[54.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[55.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[56.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[57.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[58.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[59.0, 1.0, 49.8107, 3.1181, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[107.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[108.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[109.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[110.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[111.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[112.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[113.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[114.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[115.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[116.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[117.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[118.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[119.0, 1.0, 59.7728, 29.8864, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[120.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[121.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[122.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[123.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[307.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[310.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[315.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[316.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[482.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[483.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[484.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[499.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[500.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[508.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[539.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[540.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[541.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[542.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[552.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[553.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[556.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[557.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1418.0, 1.0, 69.735, 19.9243, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1454.0, 1.0, 34.3694, 9.464, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1473.0, 1.0, 81.1914, 14.9432, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1545.0, 1.0, 32.3769, 7.4716, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1555.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1556.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1557.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1558.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1559.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1560.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1561.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1562.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1563.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1564.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1565.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1566.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1567.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1568.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1569.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1570.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1571.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1572.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1573.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1574.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1575.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1576.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1577.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1578.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1579.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1580.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1581.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1582.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1583.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1584.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1585.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1586.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1587.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1588.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1589.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1590.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1591.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1592.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1593.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1594.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1595.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1596.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1597.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1598.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1599.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1600.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1601.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1602.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1603.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1604.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1605.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1606.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1607.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1608.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1609.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1610.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1611.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1612.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1613.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1614.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1615.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1616.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1617.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1618.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1619.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1620.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1621.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1622.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1623.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1624.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1625.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1626.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1627.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1628.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1629.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1630.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1631.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1632.0, 2.0, 3.4867, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1633.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1634.0, 2.0, 3.4867, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1635.0, 1.0, 149.432, 17.8372, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1641.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1642.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1643.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1644.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1645.0, 2.0, 2.4905, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 6.3, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1646.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1647.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1648.0, 2.0, 3.4867, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1649.0, 2.0, 1.7434, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1650.0, 2.0, 3.4867, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1651.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1652.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1653.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1654.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1655.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1656.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1657.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1658.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1659.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1660.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1661.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1662.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1663.0, 3.0, 29.8864, 5.4792, 0.0, 0.0, 1.0, 1.0, 0.0, 27.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1664.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1665.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1666.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1667.0, 2.0, 21.9665, 6.2761, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1668.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1669.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1670.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1671.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1672.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1673.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1674.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1675.0, 2.0, 7.8452, 2.6151, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1676.0, 2.0, 7.8452, 2.6151, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1677.0, 2.0, 7.8452, 2.8542, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1678.0, 2.0, 7.8452, 2.8542, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1679.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1680.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1681.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1682.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1683.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1684.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1685.0, 2.0, 4.7071, 2.092, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1686.0, 2.0, 7.8452, 2.8542, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1687.0, 2.0, 7.8452, 2.8542, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1688.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1689.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1690.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1691.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1692.0, 2.0, 29.8864, 5.4792, 0.0, 0.0, 1.0, 1.0, 0.0, 27.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1693.0, 2.0, 8.9659, 3.0185, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1694.0, 2.0, 8.9659, 3.0185, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1695.0, 2.0, 8.9659, 3.0185, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1696.0, 2.0, 8.9659, 3.0185, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1697.0, 2.0, 14.9432, 4.483, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1698.0, 2.0, 14.9432, 4.483, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1699.0, 2.0, 14.9432, 4.483, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1700.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1701.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1702.0, 2.0, 8.9659, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1703.0, 2.0, 8.9659, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1704.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1705.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1706.0, 2.0, 8.9659, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 16.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1707.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1708.0, 2.0, 3.9849, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1709.0, 2.0, 3.9849, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1710.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1711.0, 2.0, 8.9659, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1712.0, 2.0, 8.9659, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1713.0, 2.0, 7.4716, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1714.0, 2.0, 7.4716, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1715.0, 2.0, 7.4716, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1716.0, 2.0, 7.4716, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1717.0, 2.0, 23.9091, 4.4929, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1718.0, 2.0, 23.9091, 4.4929, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1719.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1720.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1721.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1722.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1723.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1724.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1725.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1726.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1727.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1728.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1729.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1730.0, 2.0, 5.9773, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1731.0, 2.0, 5.9773, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1732.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1733.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1734.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1735.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1736.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1737.0, 2.0, 5.9773, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1738.0, 2.0, 5.9773, 1.7932, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1739.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1740.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1741.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1742.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1743.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1744.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 22.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1745.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 22.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1746.0, 2.0, 54.7917, 17.4337, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1747.0, 2.0, 4.9811, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1748.0, 2.0, 20.9205, 5.4343, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1749.0, 2.0, 20.9205, 5.4343, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1750.0, 2.0, 20.9205, 5.4343, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1751.0, 2.0, 20.9205, 5.4343, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1752.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1754.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1755.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1756.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1757.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1758.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1759.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1760.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1761.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1762.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1763.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1764.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1765.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1766.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1767.0, 1.0, 49.8107, 3.2526, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1768.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1769.0, 1.0, 49.8107, 3.1181, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1770.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1771.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1772.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1773.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1774.0, 1.0, 27.3959, 3.5764, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1775.0, 1.0, 49.8107, 3.1181, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1776.0, 1.0, 24.9053, 3.2526, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1777.0, 1.0, 42.3391, 9.9621, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1778.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1779.0, 1.0, 24.9053, 3.2526, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1780.0, 1.0, 74.716, 11.0879, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1781.0, 1.0, 27.3959, 3.5764, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1782.0, 1.0, 25.9016, 3.3822, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1783.0, 1.0, 25.9016, 3.3822, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1784.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1785.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1786.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1787.0, 1.0, 27.3959, 10.9584, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1788.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1789.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1790.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1791.0, 1.0, 166.2382, 50.4632, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1792.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1793.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1794.0, 1.0, 19.9243, 4.9811, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1795.0, 1.0, 19.6304, 2.7794, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1796.0, 1.0, 49.8107, 16.9456, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1797.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1798.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1799.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1800.0, 1.0, 51.8031, 17.623, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1801.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1802.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1803.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1804.0, 1.0, 36.4315, 21.8619, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1805.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1806.0, 1.0, 13.4638, -5.0707, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1807.0, 1.0, 49.8107, 9.9621, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1808.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1809.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1810.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1811.0, 1.0, 0.0, 0.0, 0.0, -2.40000384, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1812.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1813.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1814.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1815.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1816.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1817.0, 1.0, 4.9313, 0.8468, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1818.0, 1.0, 41.0241, 6.2313, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1819.0, 1.0, 2.4507, 0.6127, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1820.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1821.0, 1.0, 28.6561, 6.4256, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1822.0, 1.0, 49.8107, 3.1181, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1823.0, 1.0, 24.9053, 16.9456, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1824.0, 1.0, 27.0472, 4.6822, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1825.0, 1.0, 4.732, 0.8468, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1826.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1827.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1828.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1829.0, 1.0, 119.5855, 24.9302, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1830.0, 1.0, 13.947, 0.9962, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1831.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1832.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1833.0, 1.0, 54.7917, 17.9318, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1834.0, 1.0, 0.0, 0.0, 0.0, -1.4999925, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1835.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1836.0, 1.0, 23.7248, 6.7792, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1837.0, 1.0, 34.9023, -1.056, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1838.0, 1.0, 3.7507, 0.8966, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1839.0, 1.0, 11.3568, 4.2339, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1840.0, 1.0, 30.8727, 6.321, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1841.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1842.0, 1.0, 38.3542, 6.6846, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1843.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1844.0, 1.0, 14.9432, 16.9456, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1845.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1846.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1847.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1848.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1849.0, 1.0, 0.0, 0.0, 0.0, 5.74999045, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1850.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1851.0, 1.0, 0.0, 0.0, 0.0, -1.20000048, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1852.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1853.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1854.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1855.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1856.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1857.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1858.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1859.0, 1.0, 28.3921, 9.464, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1860.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1861.0, 1.0, 49.6264, 10.1514, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1862.0, 1.0, 0.0, 0.0, 0.0, 0.64800415, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1863.0, 1.0, 0.0, 0.0, 0.0, -3.8340098, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1864.0, 1.0, 0.0, 0.0, 0.0, -1.97550375, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1865.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1866.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1867.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1868.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1869.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1870.0, 1.0, 4.2837, 0.6127, 0.0, 0.0, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1871.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1872.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1873.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1874.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1875.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1876.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1877.0, 1.0, 0.0, 0.0, 0.0, -1.7999964, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1878.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1879.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1880.0, 1.0, 0.0, 0.0, 0.0, 0.599988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1881.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1882.0, 1.0, 0.0, 0.0, 0.0, -1.20000048, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1883.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1884.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1885.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1886.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1887.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1888.0, 1.0, 5.9075, 0.8816, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1889.0, 1.0, 0.0, 0.0, 0.0, -0.6000024, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1890.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1891.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1892.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1893.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1894.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1895.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1896.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1897.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1898.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1899.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1900.0, 1.0, 42.882, 2.9388, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1901.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1902.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1903.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1904.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1905.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1906.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1907.0, 1.0, 43.3353, 10.8089, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1908.0, 1.0, 18.43, 4.0845, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1909.0, 1.0, 28.2427, 11.2971, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1910.0, 1.0, 34.8675, 12.2036, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1911.0, 1.0, 56.4853, 11.2971, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1912.0, 1.0, 26.5989, 6.9237, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1913.0, 1.0, 62.3281, -1.7982, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1914.0, 1.0, 12.8661, 4.1642, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1915.0, 1.0, 16.8759, 5.4244, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1916.0, 1.0, 27.3959, 12.4278, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1917.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1918.0, 1.0, 103.6062, 25.3835, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1919.0, 1.0, 32.8252, -20.9703, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1920.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1921.0, 1.0, 37.5423, 0.0, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1922.0, 1.0, 34.5487, 13.0056, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1923.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1924.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1925.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1926.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1927.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1928.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1929.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1930.0, 1.0, 0.0, 0.0, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1931.0, 1.0, 54.7917, 3.427, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1932.0, 1.0, 29.0944, 10.52, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1933.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1934.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1935.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1936.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1937.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1938.0, 1.0, 16.5371, 4.8316, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1939.0, 1.0, 82.9896, 12.901, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1940.0, 1.0, 44.4809, 4.6822, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1941.0, 1.0, 52.1119, 12.6419, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1942.0, 1.0, 120.5418, 37.8063, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1943.0, 1.0, 29.8914, 5.1305, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1944.0, 1.0, 74.3375, 5.8029, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1945.0, 1.0, 28.2427, 11.2971, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1946.0, 1.0, 78.0533, 12.1538, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1947.0, 1.0, 73.919, 12.104, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1948.0, 1.0, 95.0388, 31.6796, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1949.0, 1.0, 37.0591, -0.4483, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1950.0, 1.0, 80.8427, 22.0163, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1951.0, 1.0, 66.5172, 16.2134, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1952.0, 1.0, 3.417, 0.6226, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1953.0, 1.0, 19.2369, 5.9424, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1954.0, 1.0, 66.2482, 9.464, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1955.0, 1.0, 49.8107, 3.2526, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1956.0, 1.0, 11.1576, 3.6362, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1957.0, 1.0, 0.0, 0.0, 0.0, -2.3999952, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1958.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1959.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1960.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1961.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1962.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1963.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1964.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1965.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1966.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1967.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1968.0, 1.0, 86.5909, 5.2799, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1969.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1970.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1971.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1972.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1973.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1974.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1975.0, 1.0, 0.0, 0.0, 0.0, -1.08843537, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1976.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1977.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1978.0, 1.0, 109.0953, 12.6918, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1979.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1980.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1981.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1982.0, 1.0, 9.3146, 3.3373, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1983.0, 1.0, 23.8593, 10.51, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1984.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1985.0, 1.0, 145.2479, 58.6371, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1986.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1987.0, 1.0, 0.0, 0.0, 0.0, -1.23967967, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1988.0, 1.0, 97.081, 18.3801, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1989.0, 1.0, 36.8599, 12.9508, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1990.0, 1.0, 59.7778, 22.0811, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1991.0, 1.0, 78.407, 30.7382, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1992.0, 1.0, 62.2633, 7.5712, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1993.0, 1.0, 27.4457, 12.8512, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1994.0, 1.0, 58.2287, 9.9123, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1995.0, 1.0, 53.4967, 17.0851, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1996.0, 1.0, 0.0, 0.0, 0.0, -2.999994, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1997.0, 1.0, 0.0, 0.0, 0.0, -1.7999964, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1998.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[1999.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2000.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2001.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2002.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2003.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2004.0, 1.0, 53.7457, 12.7515, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2005.0, 1.0, 18.8284, 3.1879, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2006.0, 1.0, 86.1725, 25.4533, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2007.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2008.0, 1.0, 62.1139, 7.6161, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2009.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2010.0, 1.0, 0.0, 0.0, 0.0, 13.8608871, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2011.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2012.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2013.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2014.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2015.0, 1.0, 68.6889, 2.3311, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2016.0, 1.0, 40.0428, 7.1478, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2017.0, 1.0, 0.0, 0.0, 0.0, 0.599988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2018.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2019.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2020.0, 1.0, 23.1769, 7.1777, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2021.0, 1.0, 54.5726, 8.4927, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2022.0, 1.0, 0.0, 0.0, 0.0, 1.29600829, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2023.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2024.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2025.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2026.0, 1.0, 47.9179, 4.8814, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2027.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2028.0, 1.0, 89.3105, 14.993, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2029.0, 1.0, 39.8485, 12.7515, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2030.0, 1.0, 55.788, 1.4943, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2031.0, 1.0, 0.0, 0.0, 0.0, -0.9000009, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2032.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2033.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2034.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2035.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2036.0, 1.0, 58.3283, 11.5561, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2037.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2038.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2039.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2040.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2041.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2042.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2043.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2044.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2045.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2046.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2047.0, 1.0, 64.7041, -8.9609, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2048.0, 1.0, 7.4068, 1.7085, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2049.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2050.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2051.0, 1.0, 64.7539, 9.9621, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2052.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2053.0, 1.0, 157.6508, 30.8826, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2054.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2055.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2056.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2057.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2058.0, 1.0, 49.0735, 6.0669, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2059.0, 1.0, 41.049, 7.9647, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2060.0, 1.0, 121.4335, 42.2146, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2061.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2062.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2063.0, 1.0, 55.1902, 10.7093, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2064.0, 1.0, 27.7794, 5.6435, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2065.0, 1.0, 53.148, 14.7938, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2066.0, 1.0, 82.6857, 12.9508, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2067.0, 1.0, 77.6548, 15.0428, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2068.0, 1.0, 54.2936, 6.1765, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2069.0, 1.0, 98.9539, 18.4648, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2070.0, 1.0, 133.5424, 30.6834, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2071.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2072.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2073.0, 1.0, 67.7077, 29.5776, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2074.0, 1.0, 47.6688, 15.4911, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2075.0, 1.0, 93.146, 23.6601, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2076.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2077.0, 1.0, 0.0, 0.0, 0.0, 0.900009, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2078.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2079.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2080.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2081.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2082.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2083.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2084.0, 1.0, 51.9027, 13.2994, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2085.0, 1.0, 27.6947, 10.4104, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2086.0, 1.0, 42.1398, 8.8165, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2087.0, 1.0, 70.9304, 21.8171, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2088.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2089.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2090.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2091.0, 1.0, 64.6543, -7.4517, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2092.0, 1.0, 69.3365, 22.8631, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2093.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2094.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2095.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2096.0, 1.0, 5.6485, 2.0522, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2097.0, 1.0, 51.2054, 20.273, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2098.0, 1.0, 48.7647, 16.9855, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2099.0, 1.0, 50.9015, 11.0679, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2100.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2101.0, 1.0, 94.3514, 26.9575, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2102.0, 1.0, 112.9158, 39.5796, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2103.0, 1.0, 81.9137, 8.2736, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2104.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2105.0, 1.0, 164.2607, 52.7993, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2106.0, 1.0, 38.4389, 1.4794, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2107.0, 1.0, 39.8834, 13.8474, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2108.0, 1.0, 190.3764, 34.0207, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2109.0, 1.0, 150.4283, 20.9205, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2110.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2111.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2112.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2113.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2114.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2115.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2116.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2117.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2118.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2119.0, 1.0, 16.4873, 0.0, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2120.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2121.0, 1.0, 191.7711, 43.8334, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2122.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2123.0, 1.0, 61.5859, 18.7936, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2124.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2125.0, 1.0, 122.1308, 38.409, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2126.0, 1.0, 151.6237, 23.6601, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2127.0, 1.0, 79.5975, 21.6178, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2128.0, 1.0, 89.2558, 8.9111, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2129.0, 1.0, 8.0942, 3.2128, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2130.0, 1.0, 68.2406, 16.4375, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2131.0, 1.0, 0.3835, 1.2202, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2132.0, 1.0, 59.992, 17.2893, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2133.0, 1.0, 107.3918, 3.2875, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2134.0, 1.0, 44.8296, 11.4565, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2135.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2136.0, 1.0, 0.0, 0.0, 0.0, -1.23967967, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2137.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2138.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2139.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2140.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2141.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2142.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2143.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2144.0, 1.0, 0.0, 0.0, 0.0, -1.500015, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2145.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2146.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2147.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2148.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2149.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2150.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2151.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2152.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2153.0, 1.0, 68.4947, 22.1658, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2154.0, 1.0, 53.4469, 6.1267, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2155.0, 1.0, 103.1579, 20.8209, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2156.0, 1.0, 34.469, 7.9697, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2157.0, 1.0, 19.6254, 11.2572, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2158.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2159.0, 1.0, 25.9514, 5.778, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2160.0, 1.0, 38.9021, 11.2074, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2161.0, 1.0, 131.301, 20.5718, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2162.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2163.0, 1.0, 92.4187, 11.2423, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2164.0, 1.0, 59.6732, 5.1305, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2165.0, 1.0, 21.9167, 2.0422, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2166.0, 1.0, 88.1151, 21.6178, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2167.0, 1.0, 43.2357, 8.9659, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2168.0, 1.0, 52.8392, 13.7776, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2169.0, 1.0, 101.2253, 7.7705, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2170.0, 1.0, 102.61, 16.9356, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2171.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2172.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2173.0, 1.0, 79.468, 19.1173, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2174.0, 1.0, 160.6394, 41.0938, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2175.0, 1.0, 108.8363, 40.5957, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2176.0, 1.0, 122.5343, 2.5403, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2177.0, 1.0, 103.4568, 16.7862, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2178.0, 1.0, 126.7184, 36.9595, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2179.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2180.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2181.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2182.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2183.0, 1.0, 38.3841, 8.4977, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2184.0, 1.0, 63.7676, 9.8924, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2185.0, 1.0, 64.0565, 24.6563, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2186.0, 1.0, 89.6592, 16.5371, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2187.0, 1.0, 109.5835, 24.6563, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2188.0, 1.0, 108.8861, 26.3996, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2189.0, 1.0, 48.6152, 5.4294, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2190.0, 1.0, 71.0798, 5.0309, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2191.0, 1.0, 73.919, 21.1197, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2192.0, 1.0, 89.4102, -1.6587, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2193.0, 1.0, 121.04, 14.4451, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2194.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2195.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2196.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2197.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2198.0, 1.0, 139.0913, 33.8264, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2199.0, 1.0, 29.7868, 8.1341, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2200.0, 1.0, 63.0454, 14.9183, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2201.0, 1.0, 130.235, 20.5668, 1e-07, -9.9e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2202.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2203.0, 1.0, 28.8902, 7.9697, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2204.0, 1.0, 131.9983, 46.3239, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2205.0, 1.0, 18.9281, 3.9849, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2206.0, 1.0, 78.402, 29.7868, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2207.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2208.0, 1.0, 28.5415, 9.1153, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2209.0, 1.0, 83.4329, 32.9747, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2210.0, 1.0, 31.7792, 14.993, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2211.0, 1.0, 35.7143, 5.9275, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2212.0, 1.0, 38.4538, 11.1576, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2213.0, 1.0, 15.9892, 4.3335, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2214.0, 1.0, 106.4952, 35.2161, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2215.0, 1.0, 57.083, 16.039, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2216.0, 1.0, 40.4961, 11.6557, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2217.0, 1.0, 160.3904, 52.5503, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2218.0, 1.0, 41.3429, 17.4338, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2219.0, 1.0, 24.9053, 3.2526, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2220.0, 1.0, 67.1946, 14.1462, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2221.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2222.0, 1.0, 73.5604, 10.8288, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2223.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2224.0, 1.0, 69.4859, 10.3108, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2225.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2226.0, 1.0, 90.6554, 41.3428, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2227.0, 1.0, 104.6024, 41.841, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2228.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2229.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2230.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2231.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2232.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2233.0, 1.0, 51.5541, 4.8316, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],\n\t\t[2234.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ]\n\t])\n\tppc[\"gen\"] = array([\n\t\t[1634.0, 40.0, 44.7, 68.2, 0.0, 1.07, 100.0, 1.0, 110.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 129.41, 22.0, 33.0, 33.0, 44.0 ],\n\t\t[1632.0, 60.0, 43.6, 68.2, 0.0, 1.07, 100.0, 0.0, 110.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 129.41, 22.0, 33.0, 33.0, 44.0 ],\n\t\t[1629.0, 90.0, 40.8, 77.46, 0.0, 1.07, 100.0, 1.0, 125.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 147.06, 25.0, 37.5, 37.5, 50.0 ],\n\t\t[1685.0, 154.8, 75.3, 80.0, 0.0, 1.07, 100.0, 1.0, 157.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 177.177, 31.4, 47.1, 47.1, 62.8 ],\n\t\t[1706.0, 282.3, 96.3, 185.9, 0.0, 1.07, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.0, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1747.0, 79.0, 23.2, 41.5, 0.0, 1.0, 100.0, 0.0, 75.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 88.8888, 15.0, 22.5, 22.5, 30.0 ],\n\t\t[1746.0, 77.8, 18.4, 41.5, 0.0, 1.0, 100.0, 0.0, 75.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 88.8888, 15.0, 22.5, 22.5, 30.0 ],\n\t\t[31.0, 100.0, 12.6, 62.0, 0.0, 1.0, 100.0, 1.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],\n\t\t[30.0, 100.0, 12.6, 62.0, 0.0, 1.0, 100.0, 0.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],\n\t\t[23.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 7.9312, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[4.0, 7.1, 1.8, 62.0, 0.0, 1.0, 100.0, 0.0, 27.9515, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1666.0, 193.0, 107.7, 185.9, 0.0, 1.0, 100.0, 1.0, 367.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.7, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1665.0, 264.8, 115.6, 185.9, 0.0, 1.0, 100.0, 1.0, 367.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.7, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1745.0, 234.1, 26.6, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1744.0, 231.6, 46.9, 216.9, 0.0, 1.02, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1743.0, 258.5, 46.6, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1742.0, 263.3, 101.2, 216.9, 0.0, 1.02, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1664.0, 350.0, 34.0, 216.9, 0.0, 1.015, 100.0, 0.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[26.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 22.0193, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[28.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 20.6181, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[19.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 14.8422, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1741.0, 283.9, 41.3, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1740.0, 262.8, 32.8, 216.9, 0.0, 1.03, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1670.0, 219.8, 92.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1669.0, 299.8, 103.9, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1687.0, 297.4, 102.2, 185.9, 0.0, 1.01, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1686.0, 297.7, 86.4, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1729.0, 266.4, 133.3, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1728.0, 225.0, 140.2, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1696.0, 209.0, 112.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1695.0, 209.0, 89.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1690.0, 133.1, 0.0, 88.0, 0.0, 1.0, 100.0, 1.0, 30.695999999999998,0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1659.0, 22.2, -0.9, 62.0, 0.0, 1.0, 100.0, 1.0, 13.7135, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1738.0, 134.2, 51.3, 50.0, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1737.0, 155.4, 40.6, 50.0, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1707.0, 264.3, 28.2, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1752.0, 254.3, 31.4, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[13.0, 90.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.2705, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1703.0, 93.2, 0.0, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],\n\t\t[1702.0, 144.4, 17.6, 123.9, 0.0, 1.0, 100.0, 0.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],\n\t\t[1704.0, 107.3, 0.0, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],\n\t\t[1705.0, 107.7, 9.9, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],\n\t\t[34.0, 30.0, 20.0, 35.0, 0.0, 1.003, 100.0, 1.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 40.0, 6.0, 9.0, 9.0, 12.0 ],\n\t\t[33.0, 30.0, 20.0, 35.0, 0.0, 1.0, 100.0, 1.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 40.0, 6.0, 9.0, 9.0, 12.0 ],\n\t\t[1678.0, 257.9, 99.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1677.0, 128.6, 88.6, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1655.0, 49.5, 0.0, 4.95, -0.0, 1.0, 100.0, 0.0, 0.2438, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 110.0, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[27.0, 48.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 4.949, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1657.0, 90.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 2.2975, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1650.0, 1068.2, 202.5, 600.0, 0.0, 1.0, 100.0, 1.0, 1150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1278.0, 223.6, 335.4, 335.4, 447.2 ],\n\t\t[1648.0, 1000.0, 300.0, 600.0, 0.0, 1.0, 100.0, 1.0, 1150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1277.778, 230.0, 345.0, 345.0, 460.0 ],\n\t\t[35.0, 1118.0, 300.0, 600.0, 0.0, 1.0, 100.0, 0.0, 1150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1278.0, 223.6, 335.4, 335.4, 447.2 ],\n\t\t[1682.0, 246.6, 95.4, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],\n\t\t[1681.0, 275.9, 100.9, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],\n\t\t[2116.0, 58.3, 2.4, 44.9, 0.0, 1.0, 100.0, 0.0, 72.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 85.294, 14.5, 21.75, 21.75, 29.0 ],\n\t\t[2114.0, 67.9, 2.3, 44.9, 0.0, 1.0, 100.0, 0.0, 72.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 85.294, 14.5, 21.75, 21.75, 29.0 ],\n\t\t[2113.0, 67.0, 4.7, 44.9, 0.0, 1.0, 100.0, 0.0, 72.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 85.294, 14.5, 21.75, 21.75, 29.0 ],\n\t\t[2112.0, 32.2, 5.0, 5.0, 0.0, 1.0, 100.0, 0.0, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 41.14, 7.2, 10.8, 10.8, 14.4 ],\n\t\t[2110.0, 32.6, 5.4, 5.0, 0.0, 1.0, 100.0, 0.0, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 41.14, 7.2, 10.8, 10.8, 14.4 ],\n\t\t[1736.0, 30.2, 5.9, 20.0, 0.0, 1.0, 100.0, 0.0, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 49.412, 8.4, 12.6, 12.6, 16.8 ],\n\t\t[1735.0, 30.8, 6.3, 20.0, 0.0, 1.0, 100.0, 0.0, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 49.412, 8.4, 12.6, 12.6, 16.8 ],\n\t\t[1734.0, 200.0, 88.0, 123.9, 0.0, 1.0, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1733.0, 200.0, 123.9, 123.9, 0.0, 1.03, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1732.0, 130.3, 19.7, 123.9, 0.0, 1.0, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1694.0, 212.5, 27.6, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1693.0, 215.3, 38.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[25.0, 48.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 1.3553, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1701.0, 472.5, 159.0, 290.6, 0.0, 1.03, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],\n\t\t[1700.0, 563.6, 210.1, 290.6, 0.0, 1.03, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],\n\t\t[1652.0, 50.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.2813, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1645.0, 50.0, 20.0, 60.0, 0.0, 1.03, 100.0, 1.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 58.0, 10.0, 15.0, 15.0, 20.0 ],\n\t\t[24.0, 50.0, 20.0, 60.0, 0.0, 1.03, 100.0, 0.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 58.0, 10.0, 15.0, 15.0, 20.0 ],\n\t\t[1656.0, 49.5, 0.0, 4.95, -0.0, 1.0, 100.0, 1.0, 3.1519999999999997,0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 110.0, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[14.0, 49.5, 0.0, 4.95, -0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 110.0, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1679.0, 140.0, 9.6, 62.0, 0.0, 1.0, 100.0, 1.0, 14.6025, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[116.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.3821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[18.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 2.6694, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[17.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 1.217, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[16.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 13.5941, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[15.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.2705, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1612.0, 80.6, 23.4, 62.0, 0.0, 1.0, 100.0, 1.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],\n\t\t[1609.0, 85.9, 28.5, 62.0, 0.0, 1.0, 100.0, 1.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],\n\t\t[1691.0, 100.8, 44.0, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.471, 30.0, 45.0, 45.0, 60.0 ],\n\t\t[1662.0, 106.9, 43.8, 123.9, 0.0, 1.0, 100.0, 0.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.471, 30.0, 45.0, 45.0, 60.0 ],\n\t\t[1731.0, 119.9, 64.6, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1730.0, 121.8, 59.9, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1649.0, 200.0, 180.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[32.0, 200.0, 34.0, 216.9, 0.0, 1.015, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],\n\t\t[1651.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1653.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1654.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 0.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1674.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 0.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[20.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 2.5111, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1668.0, 600.0, 283.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],\n\t\t[1727.0, 200.0, 54.0, 130.1, 0.0, 0.98, 100.0, 0.0, 210.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 247.06, 42.0, 63.0, 63.0, 84.0 ],\n\t\t[1726.0, 120.7, 61.9, 123.9, 0.0, 0.98, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1697.0, 450.0, 154.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],\n\t\t[1643.0, 345.0, 100.0, 62.0, 0.0, 1.0, 100.0, 0.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1725.0, 142.8, 36.0, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1724.0, 138.7, 67.0, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1710.0, 128.8, 69.5, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.294, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1672.0, 184.5, 123.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1671.0, 181.3, 127.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1723.0, 34.9, 3.9, 20.0, 0.0, 1.0, 100.0, 0.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 58.0, 10.0, 15.0, 15.0, 20.0 ],\n\t\t[1722.0, 90.0, 1.0, 50.0, 0.0, 1.01, 100.0, 1.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],\n\t\t[1721.0, 90.0, 1.0, 50.0, 0.0, 1.0, 100.0, 0.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],\n\t\t[1720.0, 90.0, 1.0, 50.0, 0.0, 1.0, 100.0, 0.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],\n\t\t[1719.0, 90.0, 1.0, 50.0, 0.0, 1.0, 100.0, 0.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],\n\t\t[1646.0, 125.0, 40.0, 80.0, 0.0, 1.03, 100.0, 1.0, 125.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 177.177, 31.4, 47.1, 47.1, 62.8 ],\n\t\t[1647.0, 125.0, 40.0, 80.0, 0.0, 1.03, 100.0, 1.0, 125.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 177.177, 31.4, 47.1, 47.1, 62.8 ],\n\t\t[1676.0, 159.5, 85.5, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1675.0, 159.5, 79.9, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],\n\t\t[1718.0, 610.2, 90.7, 387.5, 0.0, 1.0, 100.0, 1.0, 800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 888.89, 160.0, 240.0, 240.0, 320.0 ],\n\t\t[1717.0, 574.5, 167.0, 387.5, 0.0, 1.0, 100.0, 1.0, 800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 888.89, 160.0, 240.0, 240.0, 320.0 ],\n\t\t[1692.0, 1004.3, 224.5, 484.0, 0.0, 1.0, 100.0, 1.0, 1000.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1120.0, 201.6, 302.4, 302.4, 403.2 ],\n\t\t[1663.0, 814.4, 190.8, 484.0, 0.0, 1.0, 100.0, 1.0, 1000.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1120.0, 201.6, 302.4, 302.4, 403.2 ],\n\t\t[1709.0, 105.1, 50.2, 77.46, 0.0, 1.03, 100.0, 1.0, 135.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 147.06, 27.0, 40.5, 40.5, 54.0 ],\n\t\t[1708.0, 101.3, 47.1, 77.46, 0.0, 1.03, 100.0, 1.0, 135.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 147.06, 27.0, 40.5, 40.5, 54.0 ],\n\t\t[5.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 1.0, 26.8411, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[29.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 9.9352, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[2042.0, 39.5, 8.5, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],\n\t\t[2040.0, 38.7, 4.5, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],\n\t\t[2039.0, 39.0, 4.8, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],\n\t\t[2037.0, 40.1, 6.6, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],\n\t\t[1599.0, 50.0, 27.0, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 60.0, 10.0, 15.0, 15.0, 20.0 ],\n\t\t[1597.0, 50.0, 27.0, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 60.0, 10.0, 15.0, 15.0, 20.0 ],\n\t\t[1661.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 8.0792, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1699.0, 597.1, 168.2, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],\n\t\t[1698.0, 551.0, 167.2, 290.6, 0.0, 1.0, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],\n\t\t[1714.0, 213.5, 57.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1713.0, 235.0, 71.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1716.0, 222.7, 53.2, 185.9, 0.0, 1.0, 100.0, 0.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1715.0, 202.3, 59.3, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1680.0, 20.6, 6.6, 4.95, -0.0, 1.0, 100.0, 1.0, 13.9289, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 49.5, 9.9, 14.85, 14.85, 19.8 ],\n\t\t[1658.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 25.5205, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[21.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 14.9237, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1667.0, 594.9, 157.8, 290.6, 0.0, 1.03, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],\n\t\t[1673.0, 600.0, 137.0, 290.6, 0.0, 1.03, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],\n\t\t[1712.0, 256.7, 92.1, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1711.0, 256.7, 75.7, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],\n\t\t[1749.0, 564.0, 103.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],\n\t\t[1748.0, 543.0, 116.0, 290.6, 0.0, 1.0, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],\n\t\t[1684.0, 235.0, 80.0, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],\n\t\t[1683.0, 234.4, 74.8, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],\n\t\t[22.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 1.0, 14.9237, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1660.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 27.6718, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1689.0, 114.9, -7.7, 62.0, 0.0, 1.0, 100.0, 1.0, 7.0619, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[117.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 28.0952, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[110.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 20.2656, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[108.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 12.3172, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1688.0, 91.2, -3.3, 62.0, 0.0, 1.0, 100.0, 1.0, 6.6153, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[118.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.5386, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[111.0, 50.0, 10.0, 62.0, 0.0, 1.0, 100.0, 0.0, 5.6076, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[107.0, 50.0, 10.0, 62.0, 0.0, 1.0, 100.0, 0.0, 11.3177, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],\n\t\t[1751.0, 497.9, 119.0, 290.6, 0.0, 1.0, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],\n\t\t[1750.0, 506.0, 142.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ]\n\t])\n\tppc[\"branch\"] = array([\n\t\t[1418.0, 2021.0, 0.000709, 0.03936, 0.0061, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[541.0, 2024.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[540.0, 2024.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1545.0, 1418.0, 0.00764, 0.040964, 0.06498, 70.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1545.0, 1418.0, 0.007179, 0.042257, 0.064288, 70.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1545.0, 2021.0, 0.0124, 0.0812, 0.1232, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[542.0, 1960.0, 0.001528, 0.02064, 2.0724, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[539.0, 1960.0, 0.00172, 0.02296, 2.21372, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2234.0, 2233.0, 0.0, 0.187, 0.281, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1870.0, 1871.0, 0.0055, 0.2, 0.3, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1821.0, 1804.0, 0.0017, 0.0122, 0.03806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1821.0, 1804.0, 0.0017, 0.0122, 0.03806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1821.0, 1913.0, 0.002785, 0.020342, 0.06345, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1821.0, 1913.0, 0.002804, 0.020317, 0.063616, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2194.0, 2193.0, 0.0007, 0.0031, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2194.0, 2193.0, 0.0007, 0.0031, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1869.0, 2170.0, 0.0, 0.0001, 0.0002, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2232.0, 2231.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2232.0, 1962.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2232.0, 1988.0, 0.00046, 0.003737, 0.012788, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2232.0, 1988.0, 0.000424, 0.003818, 0.01291, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2232.0, 1993.0, 0.001928, 0.011229, 0.034974, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2232.0, 1993.0, 0.001775, 0.011229, 0.034426, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2232.0, 1824.0, 0.00242, 0.01694, 0.049586, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2232.0, 1824.0, 5e-06, 3.5e-05, 2.4e-05, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2232.0, 1839.0, 0.000545, 0.004212, 0.013316, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2232.0, 1839.0, 0.000541, 0.004268, 0.013416, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1966.0, 1965.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1966.0, 1961.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1966.0, 2034.0, 0.000436, 0.005137, 0.500594, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1763.0, 2099.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2192.0, 1782.0, 0.002004, 0.011367, 0.016964, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2192.0, 1840.0, 0.001859, 0.011245, 0.03521, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2192.0, 1840.0, 0.001995, 0.011437, 0.033768, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1794.0, 2208.0, 0.002049, 0.019073, 0.054854, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1794.0, 2026.0, 0.004879, 0.030837, 0.09544, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1796.0, 2220.0, 0.001408, 0.006842, 0.024408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1796.0, 2220.0, 0.001394, 0.006874, 0.024286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 1999.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 1998.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 2153.0, 0.008206, 0.048173, 0.133258, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 2153.0, 0.007348, 0.042683, 0.114282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 2152.0, 0.007455, 0.049655, 0.13954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 1776.0, 0.007141, 0.033921, 0.09508, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 2065.0, 0.0017, 0.0076, 0.0198, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 2065.0, 0.0018, 0.00704, 0.0182, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 2004.0, 0.0041, 0.0196, 0.0546, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 1989.0, 0.005358, 0.0248, 0.0503, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 1989.0, 0.004066, 0.021045, 0.057736, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 2036.0, 0.0139, 0.0491, 0.1352, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2000.0, 1931.0, 0.001403, 0.007678, 0.020786, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2003.0, 2002.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2003.0, 2001.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2003.0, 115.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2003.0, 1970.0, 0.000812, 0.015612, 1.68775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2003.0, 1972.0, 0.000816, 0.015984, 1.68775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2003.0, 1789.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2003.0, 483.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[115.0, 109.0, 0.001236, 0.013293, 1.480528, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2191.0, 1837.0, 0.001635, 0.012705, 0.037662, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2191.0, 1818.0, 0.01022, 0.042629, 0.06611, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2226.0, 2210.0, 0.001173, 0.005248, 0.008748, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2226.0, 2190.0, 0.00036, 0.0073, 0.0134, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2189.0, 2188.0, 0.0023, 0.0078, 0.0138, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2189.0, 1907.0, 0.002424, 0.014193, 0.040774, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2189.0, 2187.0, 0.007996, 0.039339, 0.110062, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2186.0, 2217.0, 0.0055, 0.0238, 0.0364, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2186.0, 1956.0, 0.002, 0.01, 0.016, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2186.0, 2185.0, 0.0028, 0.0141, 0.0216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2219.0, 2218.0, 0.002676, 0.015582, 0.050366, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2219.0, 2218.0, 0.002791, 0.015447, 0.050366, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 1796.0, 0.001819, 0.009567, 0.03228, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 1796.0, 0.00179, 0.009574, 0.03228, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 2219.0, 0.001167, 0.006646, 0.023698, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 2219.0, 0.001154, 0.006607, 0.023536, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 2215.0, 0.0029, 0.0172, 0.0498, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 2215.0, 0.003, 0.0174, 0.0496, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 1947.0, 0.00434, 0.02042, 0.09428, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 2216.0, 0.0005, 0.00293, 0.008814, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 2216.0, 0.0005, 0.00293, 0.008814, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 1938.0, 0.001983, 0.0125, 0.038, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 2217.0, 0.0026, 0.0159, 0.045, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 2217.0, 0.0025, 0.0156, 0.04604, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 1956.0, 0.001996, 0.015004, 0.049722, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 1956.0, 0.001942, 0.015223, 0.048658, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 2214.0, 0.00705, 0.0366, 0.0638, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1970.0, 122.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1970.0, 2032.0, 0.001038, 0.010782, 0.99978, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1972.0, 112.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1972.0, 1970.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1972.0, 1971.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1972.0, 2034.0, 0.000863, 0.008857, 0.583716, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[122.0, 121.0, 0.000863, 0.008857, 0.583716, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1898.0, 1970.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1898.0, 122.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1898.0, 120.0, 0.001351, 0.015445, 1.51142, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1896.0, 1972.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1896.0, 1897.0, 0.001355, 0.017948, 1.76, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2184.0, 2169.0, 0.002551, 0.012, 0.032826, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2184.0, 2169.0, 0.002288, 0.012288, 0.051244, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2203.0, 2134.0, 0.0149, 0.0858, 0.1412, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2203.0, 1949.0, 0.0105, 0.05925, 0.0525, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2203.0, 2208.0, 0.00447, 0.02537, 0.03784, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2183.0, 2222.0, 0.001446, 0.009469, 0.030074, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2212.0, 1473.0, 0.0218, 0.0638, 0.066, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2212.0, 1831.0, 0.004731, 0.023671, 0.047954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2212.0, 2097.0, 0.003778, 0.017949, 0.05031, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2212.0, 2182.0, 0.0035, 0.0205, 0.0556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2212.0, 2182.0, 0.007552, 0.0302, 0.046742, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2212.0, 1909.0, 0.004017, 0.028224, 0.081516, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2181.0, 57.0, 1e-06, 1e-06, 2e-06, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2181.0, 2209.0, 0.0143, 0.075, 0.1148, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2181.0, 2180.0, 0.0006, 0.0032, 0.005, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2181.0, 2179.0, 0.0052, 0.0259, 0.038, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 1912.0, 0.0004, 0.003044, 0.009322, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 1912.0, 0.0004, 0.003044, 0.009322, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 2155.0, 0.000856, 0.006515, 0.019094, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 2155.0, 0.000856, 0.006515, 0.019094, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 2224.0, 0.00164, 0.012482, 0.036582, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 2224.0, 0.00164, 0.012482, 0.036582, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 2030.0, 0.001344, 0.010229, 0.02998, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 2030.0, 0.001344, 0.010229, 0.02998, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 1940.0, 0.001313, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 1940.0, 0.001313, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1772.0, 1771.0, 0.000697, 0.008904, 0.966246, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1772.0, 1771.0, 0.000697, 0.008904, 0.966246, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1944.0, 42.0, 0.003347, 0.019091, 0.05291, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1944.0, 1888.0, 0.00452, 0.021267, 0.06035, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1944.0, 1888.0, 0.0033, 0.021, 0.061034, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[40.0, 2157.0, 0.002254, 0.015419, 0.044362, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1988.0, 1985.0, 0.0004, 0.0018, 0.0044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1988.0, 1985.0, 0.0004, 0.0018, 0.0044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1988.0, 2193.0, 0.0003, 0.0017, 0.004, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1988.0, 2193.0, 0.0003, 0.0025, 0.005, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1988.0, 2090.0, 0.0019, 0.0086, 0.0214, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1988.0, 2087.0, 0.0008, 0.0055, 0.0142, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1924.0, 2226.0, 0.002291, 0.017079, 0.050654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1924.0, 2226.0, 0.00258, 0.018126, 0.05235, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1924.0, 1856.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1924.0, 2227.0, 0.004044, 0.029321, 0.090328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1924.0, 2227.0, 0.003984, 0.029357, 0.09127, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1924.0, 2074.0, 0.001113, 0.006391, 0.02179, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1924.0, 2074.0, 0.001088, 0.006441, 0.021698, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1813.0, 1928.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1812.0, 1924.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1928.0, 1970.0, 0.0012, 0.015315, 1.662034, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1928.0, 1972.0, 0.0012, 0.015315, 1.662034, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1928.0, 1855.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1928.0, 1790.0, 0.0005, 0.009109, 0.977482, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1928.0, 1790.0, 0.000499, 0.009108, 0.977482, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1928.0, 2034.0, 0.000494, 0.009033, 0.96659, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1928.0, 2024.0, 0.000363, 0.006412, 0.672766, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1912.0, 2155.0, 0.000721, 0.003805, 0.023416, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2177.0, 2175.0, 0.0018, 0.0107, 0.0208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2177.0, 2175.0, 0.0013, 0.0109, 0.0364, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2177.0, 2174.0, 0.003659, 0.01587, 0.045896, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2177.0, 2176.0, 0.001, 0.004, 0.0076, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2177.0, 2176.0, 0.0009, 0.0039, 0.00888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2173.0, 2171.0, 0.0049, 0.0203, 0.0352, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2173.0, 2172.0, 0.0014, 0.0089, 0.0272, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1810.0, 1939.0, 0.000764, 0.005558, 0.06534, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1810.0, 2202.0, 0.001198, 0.009194, 0.095348, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2171.0, 2168.0, 0.002645, 0.016233, 0.122918, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2171.0, 1829.0, 0.000831, 0.007075, 0.049208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2171.0, 2169.0, 0.0006, 0.0048, 0.0144, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2171.0, 2169.0, 0.0007, 0.005, 0.0146, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2171.0, 1941.0, 0.0005, 0.003, 0.0076, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1809.0, 2218.0, 0.000453, 0.005, 0.0074, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1809.0, 2218.0, 0.000453, 0.005, 0.0074, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[53.0, 1909.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[55.0, 1909.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[36.0, 1831.0, 0.001722, 0.010968, 0.017098, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2167.0, 1982.0, 0.0036, 0.0317, 0.0886, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2167.0, 1983.0, 0.00206, 0.01115, 0.01946, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2162.0, 1908.0, 0.000426, 0.002537, 0.00866, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2162.0, 1908.0, 0.00045, 0.002581, 0.008058, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2162.0, 2161.0, 0.001, 0.006138, 0.017238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2162.0, 2161.0, 0.001, 0.00539, 0.01767, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 1794.0, 0.004382, 0.027697, 0.085722, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 1794.0, 0.003049, 0.028391, 0.081652, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 1887.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 2166.0, 0.003412, 0.01859, 0.035532, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 2209.0, 0.005598, 0.030473, 0.051208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 2209.0, 0.005475, 0.032322, 0.077422, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 1908.0, 0.005469, 0.034514, 0.10096, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 1908.0, 0.005539, 0.034934, 0.100658, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 2164.0, 0.00228, 0.015838, 0.046554, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 2208.0, 0.005808, 0.044554, 0.131736, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 2026.0, 0.014736, 0.08342, 0.159408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1927.0, 1928.0, 0.001024, 0.01164, 1.045364, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1927.0, 1928.0, 0.00083, 0.011237, 1.038556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1927.0, 1886.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1927.0, 1814.0, 0.00049, 0.005109, 0.49856, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2166.0, 2164.0, 0.0019, 0.0094, 0.0118, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2166.0, 2165.0, 0.0011, 0.006921, 0.0214, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2166.0, 2165.0, 0.001254, 0.006957, 0.020732, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2166.0, 1783.0, 0.018061, 0.104849, 0.16225, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2166.0, 2163.0, 0.02, 0.128, 0.184, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1841.0, 1925.0, 0.002005, 0.015458, 0.048382, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1841.0, 1925.0, 0.001952, 0.015406, 0.048262, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2160.0, 1842.0, 0.009545, 0.050416, 0.0775, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2160.0, 1910.0, 0.001505, 0.00955, 0.029252, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2159.0, 2156.0, 0.0024, 0.0141, 0.0394, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2159.0, 2156.0, 0.002467, 0.012564, 0.036174, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2159.0, 2158.0, 0.0036, 0.0224, 0.0614, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2159.0, 2157.0, 0.0066, 0.0357, 0.056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2159.0, 2157.0, 0.0066, 0.0357, 0.066724, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1906.0, 2156.0, 0.001131, 0.010327, 0.03263, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1906.0, 2156.0, 0.00134, 0.010137, 0.032934, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2155.0, 2232.0, 0.002, 0.011176, 0.022224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2155.0, 2232.0, 0.002, 0.011176, 0.022224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2155.0, 2154.0, 0.000957, 0.004942, 0.015, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2155.0, 1940.0, 0.0013, 0.0068, 0.06552, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[45.0, 1995.0, 0.007107, 0.034738, 0.060772, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[45.0, 1995.0, 0.004876, 0.023832, 0.041692, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[45.0, 2185.0, 0.002149, 0.010502, 0.018372, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[45.0, 2185.0, 0.00157, 0.007675, 0.013426, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2188.0, 2228.0, 0.0032, 0.0124, 0.033, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2188.0, 2228.0, 0.003, 0.0143, 0.0408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2153.0, 2152.0, 0.0053, 0.0319, 0.0654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1987.0, 2003.0, 0.00057, 0.005567, 0.51967, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2151.0, 2150.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2151.0, 2149.0, 0.0003, 0.0024, 0.0064, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2151.0, 2149.0, 0.0003, 0.0024, 0.0064, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2148.0, 2147.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2148.0, 2146.0, 0.0003, 0.0024, 0.0062, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2145.0, 2143.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2145.0, 2142.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2145.0, 2141.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2145.0, 2144.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2142.0, 1987.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2142.0, 2139.0, 0.0016, 0.0178, 1.672, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2142.0, 2140.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2141.0, 2138.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2137.0, 2142.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2137.0, 2141.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2137.0, 2135.0, 0.0015, 0.0181, 1.6626, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2137.0, 2136.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1807.0, 2106.0, 0.001225, 0.00965, 0.029664, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2156.0, 51.0, 0.00113, 0.008562, 0.02454, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2156.0, 51.0, 0.001024, 0.007755, 0.022224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2156.0, 2130.0, 0.008293, 0.046318, 0.129332, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2175.0, 2207.0, 0.001095, 0.007076, 0.019756, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2175.0, 2207.0, 0.001116, 0.007079, 0.019756, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2175.0, 1784.0, 0.000787, 0.004344, 0.014244, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2175.0, 1784.0, 0.000787, 0.004344, 0.014244, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1947.0, 2220.0, 0.000603, 0.003376, 0.009118, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1947.0, 2220.0, 0.000475, 0.00314, 0.009422, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2209.0, 2134.0, 0.0137, 0.0773, 0.1374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2209.0, 2208.0, 0.00517, 0.0294, 0.04392, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 1791.0, 0.000869, 0.007208, 0.024548, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 1791.0, 0.000738, 0.007235, 0.024668, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 1990.0, 0.001151, 0.007729, 0.026286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 1990.0, 0.000871, 0.007813, 0.026216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 48.0, 0.005823, 0.027349, 0.07467, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 48.0, 0.005823, 0.027349, 0.07467, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 1842.0, 0.001531, 0.010085, 0.030386, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 1842.0, 0.001531, 0.010085, 0.030386, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 2228.0, 0.007567, 0.040931, 0.114362, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 2228.0, 0.006829, 0.035599, 0.10737, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 2228.0, 0.010092, 0.044787, 0.083766, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 1.0, 0.006166, 0.027296, 0.045504, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1937.0, 1792.0, 0.0, 1e-06, 0.0, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1937.0, 2133.0, 0.00124, 0.008152, 0.014254, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1937.0, 2014.0, 0.002055, 0.016456, 0.05077, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1937.0, 2014.0, 0.002055, 0.016456, 0.05077, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1937.0, 1774.0, 0.005207, 0.03944, 0.113034, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1792.0, 2123.0, 0.00124, 0.01052, 0.018254, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1792.0, 2014.0, 0.002055, 0.016456, 0.05077, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1792.0, 1774.0, 0.005207, 0.03944, 0.113034, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1901.0, 1913.0, 0.0037, 0.0294, 0.085666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1802.0, 1913.0, 0.002304, 0.015628, 0.04459, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2152.0, 2132.0, 0.002, 0.0066, 0.0096, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2152.0, 2131.0, 0.002, 0.0084, 0.0176, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2152.0, 2131.0, 0.0027, 0.009, 0.0144, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1820.0, 1821.0, 0.003241, 0.020126, 0.057066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[59.0, 1804.0, 0.0, 0.0001, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[58.0, 1804.0, 0.0, 0.0001, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2227.0, 2226.0, 0.0006, 0.00225, 0.007, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2227.0, 2226.0, 0.0006, 0.00225, 0.007, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2227.0, 1955.0, 0.000528, 0.005104, 0.00836, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2227.0, 1955.0, 0.000528, 0.005104, 0.00836, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2216.0, 2214.0, 0.0072, 0.0325, 0.047, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1854.0, 2128.0, 0.00069, 0.004434, 0.014444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1854.0, 2198.0, 0.002688, 0.016159, 0.048504, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1854.0, 2172.0, 0.000758, 0.004368, 0.015356, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1854.0, 2172.0, 0.000706, 0.004367, 0.015052, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2200.0, 1943.0, 0.0003, 0.0029, 0.00475, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2010.0, 557.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2010.0, 556.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2010.0, 553.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2010.0, 552.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2010.0, 2009.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2130.0, 51.0, 0.006325, 0.047909, 0.137306, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2130.0, 2156.0, 0.006231, 0.047431, 0.139012, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2130.0, 2129.0, 0.008403, 0.052574, 0.08514, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2130.0, 2129.0, 0.008106, 0.03814, 0.0886, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2128.0, 1840.0, 0.001822, 0.010859, 0.032462, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2211.0, 2210.0, 0.0043, 0.0204, 0.0302, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[46.0, 1925.0, 0.007438, 0.056343, 0.161476, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[46.0, 2166.0, 0.005702, 0.043196, 0.123798, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[46.0, 1783.0, 0.005678, 0.043008, 0.12326, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2210.0, 1910.0, 0.004774, 0.033037, 0.094882, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2127.0, 2225.0, 0.0016, 0.0087, 0.0092, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2127.0, 1824.0, 0.002094, 0.01628, 0.048262, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1837.0, 43.0, 0.002851, 0.021598, 0.0619, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1837.0, 43.0, 0.002851, 0.021598, 0.0619, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1837.0, 3.0, 0.007298, 0.023277, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1826.0, 1827.0, 0.002963, 0.017781, 0.051432, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2168.0, 2172.0, 0.001353, 0.007979, 0.09775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2126.0, 2177.0, 0.001083, 0.006426, 0.017174, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2125.0, 2133.0, 0.001, 0.0066, 0.01932, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2125.0, 2133.0, 0.0011, 0.0066, 0.0216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2125.0, 2124.0, 0.001048, 0.007655, 0.021428, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2125.0, 2124.0, 0.001064, 0.007566, 0.02158, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1806.0, 1968.0, 0.004027, 0.025987, 0.06444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1806.0, 1968.0, 0.006024, 0.031897, 0.07314, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[41.0, 1777.0, 0.002361, 0.01109, 0.030276, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[41.0, 1777.0, 0.002361, 0.01109, 0.030276, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[41.0, 2036.0, 0.001453, 0.011009, 0.031552, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[41.0, 2036.0, 0.001453, 0.011009, 0.031552, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[41.0, 1817.0, 0.002715, 0.020567, 0.058944, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[41.0, 1817.0, 0.002715, 0.020567, 0.058944, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[54.0, 2064.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1800.0, 1944.0, 0.00362, 0.02356, 0.070238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1800.0, 1944.0, 0.00362, 0.02356, 0.070238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1917.0, 1978.0, 0.001756, 0.012722, 0.039038, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1917.0, 1978.0, 0.001756, 0.012768, 0.039174, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2193.0, 2232.0, 0.00036, 0.00247, 0.008304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2193.0, 2232.0, 0.00036, 0.002473, 0.008404, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1793.0, 1831.0, 0.004018, 0.02119, 0.031322, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1952.0, 1951.0, 0.00445, 0.02678, 0.0424, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1834.0, 1973.0, 0.001166, 0.01489, 1.616022, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1834.0, 1897.0, 0.000188, 0.003424, 0.356704, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1834.0, 1897.0, 0.000184, 0.003403, 0.358824, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1834.0, 1897.0, 0.000222, 0.003421, 0.351524, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1858.0, 1859.0, 0.0011, 0.0097, 0.030288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1858.0, 1859.0, 0.0011, 0.0097, 0.030288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2174.0, 2126.0, 0.0016, 0.0111, 0.0326, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2174.0, 2126.0, 0.002435, 0.013008, 0.039056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2174.0, 2121.0, 0.0012, 0.0051, 0.017, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2174.0, 2182.0, 0.01269, 0.070386, 0.213056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2174.0, 2120.0, 0.0205, 0.0676, 0.291, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2174.0, 44.0, 0.005062, 0.023775, 0.064912, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2015.0, 2196.0, 0.0006, 0.0031, 0.0436, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1861.0, 2196.0, 0.0006, 0.0031, 0.0436, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2118.0, 1780.0, 0.014222, 0.06951, 0.121602, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2118.0, 1780.0, 0.014222, 0.06951, 0.121602, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2116.0, 2115.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2114.0, 2115.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2113.0, 2115.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2119.0, 1924.0, 0.024837, 0.137353, 0.21539, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2119.0, 2118.0, 0.0018, 0.0039, 0.0067, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2119.0, 1780.0, 0.013636, 0.077335, 0.11541, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2119.0, 1780.0, 0.013636, 0.077335, 0.11541, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2119.0, 2117.0, 0.00714, 0.021, 0.0326, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2119.0, 1992.0, 0.015847, 0.094112, 0.149088, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2119.0, 1992.0, 0.0163, 0.097, 0.1432, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1977.0, 1927.0, 0.000918, 0.012759, 1.2575, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1977.0, 1927.0, 0.000926, 0.012736, 1.256638, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1977.0, 1883.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1977.0, 1976.0, 0.001129, 0.015209, 1.424948, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1977.0, 1902.0, 0.000146, 0.001874, 0.18991, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1977.0, 1903.0, 0.000172, 0.001884, 0.195408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1780.0, 1992.0, 0.004254, 0.024125, 0.036002, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1780.0, 1992.0, 0.004254, 0.024125, 0.036002, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1975.0, 1977.0, 0.001129, 0.015209, 0.142494, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1975.0, 1974.0, 0.0, 0.0001, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2112.0, 2111.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2110.0, 2111.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2109.0, 1844.0, 0.002676, 0.015397, 0.031688, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2109.0, 2207.0, 0.0017, 0.0107, 0.0284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2109.0, 2207.0, 0.0006, 0.0105, 0.0286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2109.0, 1769.0, 0.003999, 0.030444, 0.089226, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2109.0, 1769.0, 0.003999, 0.030444, 0.089226, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2109.0, 2005.0, 0.0016, 0.0048, 0.1224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2109.0, 2204.0, 0.001983, 0.011962, 0.03345, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2109.0, 2108.0, 0.0017, 0.0091, 0.0272, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2109.0, 2108.0, 0.002178, 0.011857, 0.128572, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2107.0, 1948.0, 0.01167, 0.052547, 0.12149, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2107.0, 1953.0, 0.0086, 0.0528, 0.15631, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2106.0, 1948.0, 0.004412, 0.025837, 0.072956, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2106.0, 1921.0, 0.0041, 0.0339, 0.104598, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2106.0, 2105.0, 0.005559, 0.034409, 0.034118, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2106.0, 2105.0, 0.006452, 0.030781, 0.04556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 1939.0, 0.001728, 0.014502, 0.11525, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 1939.0, 0.001774, 0.014573, 0.113328, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 2200.0, 0.000613, 0.004558, 0.02771, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 2200.0, 0.000609, 0.004555, 0.027656, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 1943.0, 0.000486, 0.004698, 0.007696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 1943.0, 0.000486, 0.004698, 0.007696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 1874.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 2223.0, 0.00323, 0.013, 0.04, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 2223.0, 0.00323, 0.013, 0.04, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 2199.0, 0.00423, 0.0233, 0.06904, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 2199.0, 0.002383, 0.018144, 0.053178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 2201.0, 0.000809, 0.006324, 0.084454, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 2201.0, 0.0008, 0.0063, 0.01612, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1976.0, 1875.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1976.0, 1974.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1976.0, 1897.0, 0.001027, 0.013427, 1.31672, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1976.0, 1897.0, 0.001027, 0.013427, 1.31672, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1976.0, 1926.0, 0.00054, 0.007314, 0.736074, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1974.0, 1973.0, 0.001798, 0.017107, 0.320912, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1984.0, 2153.0, 0.0013, 0.0098, 0.0296, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1984.0, 2153.0, 0.0013, 0.0098, 0.0298, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2104.0, 2119.0, 0.0099, 0.035083, 0.048204, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2012.0, 2011.0, 0.043836, 0.178923, 0.032564, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2102.0, 1930.0, 0.00553, 0.029104, 0.081816, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2102.0, 1930.0, 0.003466, 0.018151, 0.05141, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2102.0, 2101.0, 0.0019, 0.012, 0.0332, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2102.0, 2100.0, 0.0098, 0.0256, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2146.0, 2149.0, 0.0, 1e-06, 2e-06, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2146.0, 2075.0, 0.004, 0.0362, 0.0958, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2146.0, 2098.0, 0.0042, 0.0213, 0.0612, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2146.0, 2098.0, 0.00376, 0.021467, 0.060712, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2146.0, 1931.0, 0.005604, 0.031448, 0.087188, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2149.0, 2099.0, 0.0023, 0.0112, 0.03, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2149.0, 2099.0, 0.0026, 0.013, 0.03, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2149.0, 1915.0, 0.001405, 0.006673, 0.0208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2149.0, 1915.0, 0.001368, 0.00666, 0.020638, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2103.0, 1806.0, 0.009481, 0.05461, 0.09703, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2103.0, 1942.0, 0.00216, 0.01062, 0.0171, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2103.0, 1942.0, 0.00216, 0.01062, 0.0171, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2103.0, 1915.0, 0.002927, 0.011569, 0.03306, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2103.0, 1915.0, 0.002199, 0.011585, 0.0324, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1936.0, 2069.0, 0.001533, 0.01167, 0.03418, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1936.0, 2069.0, 0.001405, 0.01136, 0.03412, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1938.0, 2217.0, 0.000413, 0.002459, 0.0076, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[52.0, 2098.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1948.0, 1838.0, 0.004812, 0.029932, 0.088632, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1948.0, 1838.0, 0.004831, 0.030014, 0.0893, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1948.0, 2105.0, 0.004686, 0.03165, 0.96246, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1948.0, 2105.0, 0.004761, 0.03174, 0.945046, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2097.0, 2182.0, 0.0012, 0.0056, 0.0108, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1959.0, 1876.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2164.0, 2179.0, 0.0053, 0.0326, 0.0446, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2134.0, 2096.0, 0.0064, 0.061, 0.0914, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1949.0, 1795.0, 0.001026, 0.009918, 0.016246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1949.0, 1795.0, 0.001026, 0.009918, 0.016246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1949.0, 2211.0, 0.00437, 0.0184, 0.0161, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1788.0, 2098.0, 0.008655, 0.03852, 0.0579, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2187.0, 1991.0, 0.00095, 0.00498, 0.008738, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2187.0, 1842.0, 0.001028, 0.005377, 0.008848, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2187.0, 1842.0, 0.001367, 0.007231, 0.011618, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2187.0, 1774.0, 0.000967, 0.008013, 0.027288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2187.0, 1774.0, 0.000967, 0.008013, 0.027288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1778.0, 1948.0, 0.001734, 0.013202, 0.038696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1778.0, 1948.0, 0.001734, 0.013202, 0.038696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1778.0, 2105.0, 0.00244, 0.018575, 0.05444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1778.0, 2105.0, 0.00244, 0.018575, 0.05444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2093.0, 2092.0, 0.0021, 0.009, 0.0162, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2093.0, 2092.0, 0.0021, 0.0092, 0.0164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2122.0, 2091.0, 0.0018, 0.0107, 0.0316, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2122.0, 1.0, 0.0025, 0.01318, 0.01978, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2090.0, 2089.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2090.0, 2088.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2090.0, 1993.0, 0.001073, 0.006678, 0.020362, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2090.0, 1993.0, 0.001068, 0.006721, 0.020362, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2090.0, 2087.0, 0.0007, 0.004, 0.0106, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2090.0, 2087.0, 0.0007, 0.004, 0.0106, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2090.0, 2086.0, 0.0014, 0.0061, 0.0178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2090.0, 2086.0, 0.0015, 0.0062, 0.0178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 2092.0, 0.000577, 0.004153, 0.012844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 2092.0, 0.000577, 0.004153, 0.013046, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 2084.0, 0.0085, 0.0302, 0.0566, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 2084.0, 0.0085, 0.0393, 0.0566, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 2085.0, 0.0019, 0.0104, 0.0164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 2085.0, 0.0016, 0.008, 0.022, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 1779.0, 0.001312, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 1779.0, 0.001312, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 1859.0, 0.002117, 0.014224, 0.044428, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 1859.0, 0.014442, 0.014442, 0.04484, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2083.0, 2082.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2083.0, 2135.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2083.0, 2139.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2083.0, 1771.0, 0.000327, 0.00455, 0.448486, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2135.0, 1966.0, 0.000205, 0.002384, 0.23393, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2135.0, 1966.0, 0.000168, 0.00234, 0.237148, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2135.0, 2081.0, 0.0006, 0.0071, 0.697466, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2080.0, 2135.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2080.0, 2139.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2080.0, 2079.0, 0.0007, 0.0071, 0.6752, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1767.0, 1795.0, 0.0007, 0.003549, 0.011358, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1767.0, 1795.0, 0.0007, 0.003549, 0.011358, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[114.0, 109.0, 0.001236, 0.013293, 1.480528, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[114.0, 1786.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[113.0, 112.0, 0.001641, 0.01764, 1.964682, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[113.0, 1786.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1785.0, 2205.0, 0.001323, 0.013531, 0.041808, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1785.0, 2205.0, 0.001323, 0.013531, 0.041808, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1785.0, 2084.0, 9.8e-05, 0.001366, 0.134654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1785.0, 2084.0, 9.8e-05, 0.001366, 0.134654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1785.0, 119.0, 0.003842, 0.035772, 0.102888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1785.0, 119.0, 0.003842, 0.035772, 0.102888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1929.0, 1932.0, 0.00352, 0.01739, 0.027392, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2099.0, 2075.0, 0.0075, 0.0333, 0.0862, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2099.0, 1932.0, 0.000571, 0.003917, 0.011298, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2099.0, 1932.0, 0.000625, 0.004002, 0.011024, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2198.0, 2192.0, 0.005799, 0.044143, 0.129376, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2198.0, 2192.0, 0.005799, 0.044143, 0.129376, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2198.0, 2197.0, 0.000333, 0.001914, 0.010434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2198.0, 2197.0, 0.000335, 0.001915, 0.010716, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2198.0, 2195.0, 0.000709, 0.004256, 0.014632, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2198.0, 2196.0, 0.001161, 0.006866, 0.02572, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1934.0, 1933.0, 0.006777, 0.036325, 0.099522, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1766.0, 2098.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1968.0, 1948.0, 0.007335, 0.040468, 0.132678, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1968.0, 1948.0, 0.007335, 0.040468, 0.132678, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2123.0, 1986.0, 0.0014, 0.008, 0.012, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2123.0, 2133.0, 0.0024, 0.0152, 0.0254, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2123.0, 2133.0, 0.0028, 0.0165, 0.0256, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2123.0, 2122.0, 0.0014, 0.008, 0.0134, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2123.0, 2122.0, 0.0007, 0.0052, 0.0224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2123.0, 2021.0, 0.012484, 0.069281, 0.11486, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2132.0, 2131.0, 0.0015, 0.0066, 0.012, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2178.0, 2191.0, 0.006813, 0.043, 0.06108, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2178.0, 1818.0, 0.001267, 0.006536, 0.0117, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2178.0, 1818.0, 0.001185, 0.006504, 0.010946, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[12.0, 1679.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[12.0, 116.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[11.0, 18.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[11.0, 17.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[11.0, 16.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[11.0, 15.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1857.0, 51.0, 0.002531, 0.019174, 0.05495, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1857.0, 2156.0, 0.003173, 0.027163, 0.078504, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1982.0, 1911.0, 0.004746, 0.035379, 0.105292, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1918.0, 1917.0, 0.00248, 0.01851, 0.055088, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1918.0, 1917.0, 0.002438, 0.01845, 0.055446, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1918.0, 2202.0, 0.001864, 0.014205, 0.044768, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1918.0, 2202.0, 0.001869, 0.014081, 0.044908, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1914.0, 2107.0, 0.0036, 0.019, 0.051544, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1914.0, 2058.0, 0.0061, 0.0313, 0.0847, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1914.0, 1953.0, 0.0113, 0.0675, 0.199492, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[49.0, 2171.0, 0.001603, 0.012145, 0.034808, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[49.0, 2169.0, 0.001099, 0.008326, 0.023862, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2218.0, 2185.0, 0.001653, 0.010407, 0.0294, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1849.0, 1966.0, 0.000152, 0.001935, 0.20991, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1849.0, 1966.0, 0.000124, 0.001938, 0.209752, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1849.0, 1848.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1849.0, 1847.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1849.0, 1846.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1849.0, 1845.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2074.0, 2233.0, 0.0045, 0.0226, 0.0614, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2172.0, 2198.0, 0.003409, 0.020465, 0.11888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2172.0, 1829.0, 0.000246, 0.001611, 0.03219, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2172.0, 1829.0, 0.000222, 0.001538, 0.032516, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2172.0, 1867.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2172.0, 1865.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2172.0, 1840.0, 0.002366, 0.01494, 0.043588, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2172.0, 2073.0, 0.001, 0.0068, 0.0192, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2172.0, 2073.0, 0.001, 0.0072, 0.0196, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2172.0, 2169.0, 0.0016, 0.008, 0.0176, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2172.0, 2169.0, 0.002, 0.0121, 0.0176, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1973.0, 1868.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1973.0, 1866.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1973.0, 1897.0, 0.0014, 0.0163, 1.604962, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1973.0, 1926.0, 0.000371, 0.004039, 0.2452, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 2221.0, 0.002538, 0.018658, 0.057658, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 1947.0, 0.000244, 0.001883, 0.006854, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 1947.0, 0.000319, 0.001779, 0.007006, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 1947.0, 0.000316, 0.001744, 0.006838, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 2216.0, 0.0032, 0.01325, 0.0247, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 2220.0, 0.000283, 0.001786, 0.007918, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 2220.0, 0.000276, 0.001786, 0.00784, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 1823.0, 0.006105, 0.032408, 0.092494, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 1823.0, 0.006105, 0.032408, 0.092494, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 2214.0, 0.00572, 0.02325, 0.0247, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1799.0, 1970.0, 0.000271, 0.002947, 0.303246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1799.0, 1798.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1799.0, 1897.0, 0.000631, 0.009242, 0.194064, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1799.0, 1969.0, 9.4e-05, 0.000882, 0.09577, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1798.0, 1972.0, 0.00026, 0.00296, 0.303556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1798.0, 1897.0, 0.000581, 0.009148, 0.197, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1798.0, 1969.0, 9.5e-05, 0.000894, 0.096712, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1776.0, 2066.0, 0.000748, 0.003551, 0.009954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1776.0, 2066.0, 0.000748, 0.003551, 0.009954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2013.0, 1806.0, 0.004027, 0.025987, 0.06444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2013.0, 1819.0, 0.000878, 0.008242, 0.022352, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2013.0, 1819.0, 0.001401, 0.008357, 0.023872, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2069.0, 1930.0, 0.003186, 0.016051, 0.046862, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2069.0, 1930.0, 0.003638, 0.018825, 0.052778, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2069.0, 1942.0, 0.001495, 0.008215, 0.023988, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2069.0, 1932.0, 0.003694, 0.020963, 0.05775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2095.0, 1991.0, 0.0038, 0.0265, 0.0452, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2095.0, 1774.0, 0.002207, 0.016799, 0.049234, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2095.0, 1774.0, 0.002207, 0.016799, 0.049234, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2206.0, 1954.0, 0.000436, 0.003126, 0.010554, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2206.0, 1954.0, 0.00048, 0.003156, 0.010722, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2206.0, 2205.0, 0.0035, 0.0208, 0.0568, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2154.0, 2232.0, 0.001636, 0.007686, 0.020984, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2154.0, 2232.0, 0.001636, 0.007686, 0.020984, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2154.0, 1824.0, 0.001747, 0.011028, 0.02, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2068.0, 2174.0, 0.0053, 0.0356, 0.1608, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1995.0, 2127.0, 0.002277, 0.013038, 0.02106, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1995.0, 2185.0, 0.009767, 0.035062, 0.048936, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1995.0, 2185.0, 0.005959, 0.032066, 0.049696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1819.0, 2062.0, 0.003176, 0.015785, 0.043182, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1819.0, 1953.0, 0.004039, 0.022981, 0.066948, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1775.0, 1817.0, 0.00056, 0.004262, 0.012492, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1775.0, 1817.0, 0.00056, 0.004262, 0.012492, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2067.0, 2004.0, 0.0011, 0.0053, 0.0164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2067.0, 2066.0, 0.0035, 0.01357, 0.0193, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2205.0, 2130.0, 0.005, 0.0289, 0.081, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2205.0, 2130.0, 0.003152, 0.02578, 0.0731, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 2177.0, 0.002603, 0.021498, 0.07278, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 2177.0, 0.002582, 0.021425, 0.0731, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 1919.0, 0.001405, 0.011326, 0.219716, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 1919.0, 0.00139, 0.011124, 0.22341, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 2156.0, 0.005768, 0.043001, 0.127542, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 2156.0, 0.005768, 0.043001, 0.127542, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 2175.0, 0.002549, 0.017938, 0.059848, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 2175.0, 0.002488, 0.01794, 0.059848, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 2126.0, 0.002403, 0.02124, 0.071276, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 2126.0, 0.002353, 0.021196, 0.072128, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 1833.0, 0.003269, 0.018545, 0.027674, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 1833.0, 0.003269, 0.018545, 0.027674, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 1833.0, 0.003269, 0.018545, 0.027674, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 1832.0, 0.000607, 0.004514, 0.015152, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 2.0, 0.000607, 0.004504, 0.015044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1960.0, 1790.0, 0.000544, 0.007352, 0.76844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1960.0, 1790.0, 0.000544, 0.007352, 0.76844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1960.0, 1786.0, 0.000733, 0.009358, 1.015624, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1960.0, 1786.0, 0.000733, 0.009358, 1.015624, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1960.0, 123.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1960.0, 2079.0, 0.000508, 0.0044, 0.4396, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1960.0, 2081.0, 0.000464, 0.00536, 0.5338, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[123.0, 1959.0, 0.000968, 0.01148, 1.1461, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1978.0, 2183.0, 0.0019, 0.0102, 0.0276, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1978.0, 1888.0, 0.0035, 0.0221, 0.064074, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1978.0, 1888.0, 0.0036, 0.0222, 0.064304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2121.0, 2071.0, 0.0028, 0.0171, 0.0458, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[37.0, 2149.0, 0.001399, 0.00713, 0.021124, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1791.0, 2187.0, 0.000547, 0.004293, 0.012496, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1791.0, 2187.0, 0.000564, 0.003571, 0.010164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2087.0, 2203.0, 0.01588, 0.0793, 0.1166, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1840.0, 1782.0, 0.002004, 0.011367, 0.016964, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1888.0, 42.0, 0.001897, 0.010818, 0.029982, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2065.0, 2064.0, 0.0047, 0.0232, 0.0596, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2065.0, 1825.0, 0.010653, 0.057707, 0.104974, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2182.0, 1831.0, 0.006864, 0.041913, 0.08442, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2182.0, 2097.0, 0.001925, 0.009143, 0.02563, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2182.0, 2120.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2182.0, 44.0, 0.007721, 0.036266, 0.099012, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2120.0, 1454.0, 0.0152, 0.069, 0.1232, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2120.0, 2068.0, 0.0076, 0.0355, 0.1318, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2120.0, 2124.0, 0.0107, 0.0548, 0.1562, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2120.0, 2063.0, 0.0078, 0.0253, 0.08, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1958.0, 2230.0, 0.000968, 0.01148, 1.2124, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1765.0, 2212.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1765.0, 1909.0, 0.009008, 0.044028, 0.077024, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 2102.0, 0.0019, 0.0088, 0.0194, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 2102.0, 0.0016, 0.0072, 0.021, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 2102.0, 0.001246, 0.007242, 0.0218, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 1942.0, 0.0066, 0.03245, 0.0523, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 2061.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 2058.0, 0.0101, 0.0509, 0.141, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 2060.0, 0.0013, 0.0092, 0.025, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 2060.0, 0.00201, 0.01179, 0.0338, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 2059.0, 0.0034, 0.01617, 0.044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 1953.0, 0.0025, 0.014, 0.036, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 1953.0, 0.0025, 0.014, 0.036, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2057.0, 2003.0, 0.001561, 0.014418, 1.393376, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2057.0, 2141.0, 0.000512, 0.008616, 0.84623, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2057.0, 2010.0, 0.000932, 0.01154, 1.07545, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2057.0, 2009.0, 0.001, 0.0116, 1.0912, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2057.0, 2140.0, 0.0007, 0.008796, 0.873706, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2057.0, 2056.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2207.0, 2206.0, 0.00062, 0.00339, 0.00774, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2207.0, 2206.0, 0.00054, 0.00357, 0.00774, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2207.0, 2205.0, 0.003, 0.0161, 0.0416, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2207.0, 2054.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2207.0, 2052.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2207.0, 2018.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2207.0, 1784.0, 0.00052, 0.00287, 0.00941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2207.0, 1784.0, 0.00052, 0.00287, 0.00941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2207.0, 2053.0, 0.0015, 0.0078, 0.022, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2052.0, 2051.0, 0.0013, 0.0078, 0.0226, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2079.0, 315.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2079.0, 2050.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2079.0, 2019.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2079.0, 2081.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2079.0, 2230.0, 0.000544, 0.007352, 0.76844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2081.0, 307.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2081.0, 2230.0, 0.00054, 0.00738, 0.766086, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2124.0, 2187.0, 0.00126, 0.007397, 0.019756, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2124.0, 1916.0, 0.000818, 0.0061, 0.001808, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2124.0, 1916.0, 0.000818, 0.0061, 0.001808, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2124.0, 6.0, 0.000717, 0.002597, 0.003648, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2124.0, 2121.0, 0.002019, 0.0095, 0.046, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2124.0, 2014.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2124.0, 2006.0, 0.0087, 0.0339, 0.2008, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2124.0, 1774.0, 0.001156, 0.006379, 0.020912, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2124.0, 1774.0, 0.001156, 0.006379, 0.020912, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2014.0, 2174.0, 0.0026, 0.0129, 0.0374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2014.0, 2174.0, 0.0023, 0.0129, 0.0374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2014.0, 2121.0, 0.002312, 0.016324, 0.04676, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2014.0, 2063.0, 0.0081, 0.0314, 0.0662, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2230.0, 1773.0, 0.000279, 0.003874, 0.381812, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2230.0, 1773.0, 0.000279, 0.003874, 0.381812, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2230.0, 2229.0, 0.000612, 0.007548, 0.76969, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2230.0, 2229.0, 0.000684, 0.007548, 0.761836, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2230.0, 2024.0, 0.000436, 0.006384, 0.62015, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2230.0, 2024.0, 0.00044, 0.00638, 0.6202, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2230.0, 2024.0, 0.00044, 0.00638, 0.6202, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2071.0, 2070.0, 0.0004, 0.0025, 0.0666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2071.0, 2070.0, 0.0003, 0.0013, 0.0666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2071.0, 2108.0, 0.0025, 0.0133, 0.0396, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1769.0, 1844.0, 0.003178, 0.024071, 0.068986, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1769.0, 1844.0, 0.003178, 0.024071, 0.068986, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1773.0, 2024.0, 0.000296, 0.004117, 0.40581, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1773.0, 2024.0, 0.000296, 0.004117, 0.40581, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1843.0, 1954.0, 0.000196, 0.001444, 0.005702, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1843.0, 1954.0, 0.00017, 0.001475, 0.00593, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1990.0, 1781.0, 0.002351, 0.017893, 0.052442, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1990.0, 1781.0, 0.002515, 0.019148, 0.05612, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1990.0, 1791.0, 0.001184, 0.005796, 0.016876, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1990.0, 1791.0, 0.000773, 0.005178, 0.014792, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1990.0, 2091.0, 0.002873, 0.014873, 0.026988, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1990.0, 2091.0, 0.001843, 0.012695, 0.028906, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2092.0, 1949.0, 0.000576, 0.005568, 0.00912, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2075.0, 1776.0, 0.003123, 0.014847, 0.041616, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2075.0, 1776.0, 0.003123, 0.014847, 0.041616, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2075.0, 2066.0, 0.003, 0.0162, 0.0458, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2075.0, 2066.0, 0.003, 0.0162, 0.0458, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1909.0, 1831.0, 0.000425, 0.002347, 0.007694, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1909.0, 1831.0, 0.000425, 0.002347, 0.007694, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2004.0, 2000.0, 0.0043, 0.0189, 0.0516, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[50.0, 1894.0, 0.007438, 0.037376, 0.062508, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[50.0, 1894.0, 0.007438, 0.037376, 0.062508, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2180.0, 2166.0, 0.011111, 0.065754, 0.098978, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2180.0, 2134.0, 0.0056, 0.0304, 0.0504, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2131.0, 2000.0, 0.0109, 0.0472, 0.1306, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2131.0, 2064.0, 0.00604, 0.037441, 0.111652, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2131.0, 2064.0, 0.006511, 0.037267, 0.111562, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2131.0, 2065.0, 0.015, 0.0413, 0.0936, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2048.0, 2047.0, 0.0049, 0.021, 0.034, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2048.0, 2214.0, 0.0132, 0.0474, 0.074, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1913.0, 2153.0, 0.0017, 0.0122, 0.03806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1913.0, 2153.0, 0.0017, 0.0123, 0.038104, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1913.0, 2132.0, 0.0015, 0.0104, 0.03276, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1913.0, 2132.0, 0.0014, 0.0105, 0.03257, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1850.0, 2204.0, 0.0007, 0.003549, 0.011358, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1850.0, 2204.0, 0.00068, 0.003595, 0.011282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1935.0, 1934.0, 0.00093, 0.005165, 0.014484, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2046.0, 2010.0, 0.00011, 0.0016, 0.157, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2046.0, 2010.0, 0.000112, 0.001608, 0.1727, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2046.0, 2045.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2045.0, 2010.0, 0.00011, 0.0016, 0.157, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2044.0, 2045.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2058.0, 1933.0, 0.001967, 0.011025, 0.032296, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2058.0, 1934.0, 0.00524, 0.028022, 0.078426, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2084.0, 1779.0, 0.003284, 0.025003, 0.07328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2084.0, 1779.0, 0.003284, 0.025003, 0.07328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2195.0, 2196.0, 0.0006, 0.0034, 0.016282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1764.0, 1831.0, 4.9e-05, 0.000287, 0.001824, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[56.0, 2153.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2042.0, 2041.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2040.0, 2041.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2039.0, 2038.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2037.0, 2038.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2006.0, 1769.0, 0.005199, 0.039577, 0.115992, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2028.0, 1907.0, 0.001632, 0.014674, 0.046224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2028.0, 1955.0, 1e-06, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2028.0, 2228.0, 0.0022, 0.016793, 0.049218, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1805.0, 2064.0, 0.004105, 0.025004, 0.073654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1989.0, 2075.0, 0.002775, 0.01195, 0.031086, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1989.0, 2075.0, 0.002042, 0.009724, 0.0056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2036.0, 1777.0, 0.001686, 0.01625, 0.028548, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2036.0, 1776.0, 0.002319, 0.017657, 0.05175, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2036.0, 1776.0, 0.002319, 0.017657, 0.05175, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2158.0, 2159.0, 0.003785, 0.035893, 0.102126, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2158.0, 1832.0, 0.003733, 0.026363, 0.08693, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2158.0, 2.0, 0.003679, 0.026454, 0.08693, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2063.0, 2068.0, 0.0013, 0.0076, 0.1, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2085.0, 1949.0, 0.001026, 0.009918, 0.016246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2060.0, 2101.0, 0.001194, 0.006769, 0.02107, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2060.0, 2101.0, 0.00123, 0.00755, 0.0216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1828.0, 1827.0, 0.002291, 0.013129, 0.037544, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 1951.0, 0.000967, 0.005386, 0.015858, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 1951.0, 0.00083, 0.005543, 0.015894, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 1800.0, 0.0032, 0.0256, 0.050238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 1800.0, 0.0032, 0.0256, 0.050238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 1952.0, 0.0053, 0.0287, 0.043366, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 1888.0, 0.0046, 0.0265, 0.07574, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 1888.0, 0.0049, 0.0281, 0.076512, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 1893.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 1891.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 2047.0, 0.003, 0.0182, 0.052822, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 2047.0, 0.003, 0.0183, 0.052868, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 1827.0, 0.000858, 0.005166, 0.015054, 10.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1894.0, 1827.0, 0.000914, 0.005525, 0.01506, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1897.0, 1895.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1897.0, 1892.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[120.0, 1897.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2047.0, 1917.0, 0.006735, 0.04502, 0.1218, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2047.0, 1978.0, 0.005, 0.0273, 0.0742, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2047.0, 2048.0, 0.011661, 0.047648, 0.068356, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2047.0, 2163.0, 0.0157, 0.0776, 0.1892, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1762.0, 1921.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 1912.0, 0.0035, 0.0199, 0.055758, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 2167.0, 0.0014, 0.0093, 0.02272, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 2167.0, 0.0026, 0.0129, 0.0206, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 2224.0, 0.0008, 0.00608, 0.018, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 2224.0, 0.0007, 0.0061, 0.01778, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 1982.0, 0.004371, 0.036771, 0.102082, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 1911.0, 0.000587, 0.005466, 0.015722, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 1911.0, 0.001272, 0.011845, 0.034066, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 1995.0, 0.0032, 0.0166, 0.0476, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 2035.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 1980.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2225.0, 1983.0, 0.005, 0.0147, 0.0374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2034.0, 1966.0, 0.000356, 0.005065, 0.51967, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2034.0, 2003.0, 0.00121, 0.01355, 1.2482, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2034.0, 1772.0, 0.000317, 0.00405, 0.439468, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2034.0, 1772.0, 0.000309, 0.004298, 0.42362, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2034.0, 2033.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2034.0, 1981.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2034.0, 2032.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2034.0, 1771.0, 0.000759, 0.010812, 1.0325, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[121.0, 2034.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1801.0, 2131.0, 0.0037, 0.0294, 0.085666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2220.0, 2170.0, 0.000467, 0.004897, 0.015144, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2220.0, 2170.0, 0.000467, 0.0049, 0.015136, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2030.0, 1940.0, 0.000667, 0.003612, 0.055194, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2204.0, 1844.0, 0.001053, 0.007978, 0.022864, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2204.0, 1844.0, 0.001053, 0.007978, 0.022864, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2204.0, 2206.0, 0.0023, 0.0127, 0.033, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2233.0, 1992.0, 0.0055, 0.0269, 0.044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2233.0, 1871.0, 0.0055, 0.0269, 0.044, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2233.0, 2190.0, 0.0017, 0.0128, 0.0398, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2233.0, 2228.0, 0.001919, 0.010339, 0.029802, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2233.0, 2228.0, 0.003985, 0.013988, 0.035304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2223.0, 2169.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2223.0, 2222.0, 0.003, 0.0199, 0.0546, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2223.0, 2222.0, 0.002477, 0.015386, 0.086506, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1946.0, 2124.0, 0.002181, 0.012442, 0.034482, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1946.0, 1769.0, 0.004399, 0.033488, 0.098148, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2213.0, 2212.0, 0.00872, 0.0415, 0.0603, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1823.0, 1822.0, 0.001557, 0.008831, 0.013178, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1823.0, 1822.0, 0.001557, 0.008831, 0.013178, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1992.0, 47.0, 0.008124, 0.030296, 0.05087, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1992.0, 1871.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[38.0, 1921.0, 0.005421, 0.030248, 0.044896, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1832.0, 2.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2199.0, 2163.0, 0.012972, 0.060245, 0.0882, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2029.0, 1825.0, 0.002794, 0.015736, 0.030542, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2029.0, 1825.0, 0.002779, 0.016037, 0.030802, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2029.0, 2004.0, 0.0061, 0.0282, 0.0736, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2029.0, 119.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2161.0, 2165.0, 0.002758, 0.017246, 0.05042, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2161.0, 2165.0, 0.00281, 0.017192, 0.050784, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2190.0, 1955.0, 0.0015, 0.005, 0.008, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2059.0, 1933.0, 0.007141, 0.03759, 0.110426, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2059.0, 2060.0, 0.001137, 0.007726, 0.021632, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2066.0, 1777.0, 0.008535, 0.047552, 0.135966, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2066.0, 2036.0, 0.0277, 0.0546, 0.1086, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2066.0, 1817.0, 0.001193, 0.008897, 0.028558, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2066.0, 1817.0, 0.001271, 0.008926, 0.028726, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2214.0, 1822.0, 0.001297, 0.008265, 0.028008, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2214.0, 2048.0, 0.004664, 0.019059, 0.027342, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2228.0, 2188.0, 0.0032, 0.0124, 0.033, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2228.0, 47.0, 0.002432, 0.009068, 0.015226, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2228.0, 1907.0, 0.000749, 0.006419, 0.019036, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2228.0, 1907.0, 0.000404, 0.006082, 0.019234, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2228.0, 48.0, 0.002281, 0.010715, 0.029254, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2228.0, 48.0, 0.002281, 0.010715, 0.029254, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2228.0, 2028.0, 0.003431, 0.018104, 0.05278, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2228.0, 2028.0, 0.002438, 0.018489, 0.053282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2228.0, 2025.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2024.0, 1790.0, 0.000393, 0.006763, 0.725106, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2024.0, 2139.0, 0.0012, 0.0095, 0.8706, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2024.0, 2034.0, 0.0009, 0.0131, 1.2058, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2024.0, 2023.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2024.0, 1771.0, 0.00041, 0.005233, 0.567852, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2024.0, 1771.0, 0.000362, 0.005035, 0.496268, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1816.0, 2003.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1816.0, 1899.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1815.0, 2003.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1815.0, 1899.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 1807.0, 0.004043, 0.031502, 0.092992, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 1837.0, 0.00419, 0.032116, 0.097538, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 1837.0, 0.003923, 0.032344, 0.097258, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 2106.0, 0.005601, 0.039221, 0.120638, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 2106.0, 0.00442, 0.04115, 0.118408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 1921.0, 0.008033, 0.074789, 0.215092, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 1968.0, 8.3e-05, 0.001479, 0.004712, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 1968.0, 6.2e-05, 0.001495, 0.004682, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 2178.0, 0.001489, 0.009279, 0.019006, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 2178.0, 0.0019, 0.008904, 0.019006, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 1818.0, 0.000639, 0.003844, 0.011098, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 1818.0, 0.000629, 0.00385, 0.011346, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1899.0, 2136.0, 0.000834, 0.010243, 0.944442, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1899.0, 2144.0, 0.000915, 0.009985, 0.950792, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1899.0, 500.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1899.0, 499.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1836.0, 1968.0, 0.001023, 0.007793, 0.02284, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1836.0, 1968.0, 0.001023, 0.007793, 0.02284, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1835.0, 1899.0, 3.5e-05, 0.000554, 0.01563, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 2160.0, 0.000808, 0.00615, 0.018024, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 2160.0, 0.000808, 0.00615, 0.018024, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 1795.0, 0.002839, 0.021615, 0.06335, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 1795.0, 0.002839, 0.021615, 0.06335, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 2210.0, 0.001992, 0.015161, 0.044434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 2210.0, 0.002895, 0.022041, 0.0646, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 1844.0, 0.002519, 0.019179, 0.056212, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 1994.0, 0.002367, 0.013057, 0.042808, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 1994.0, 0.001992, 0.015161, 0.044434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 1910.0, 0.001432, 0.010899, 0.031942, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 1910.0, 0.001432, 0.010899, 0.031942, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2196.0, 2008.0, 0.002104, 0.008588, 0.01563, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2196.0, 2016.0, 0.002104, 0.008588, 0.01563, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2196.0, 1852.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1926.0, 1853.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1830.0, 2159.0, 0.005669, 0.029498, 0.084286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1830.0, 1831.0, 0.005312, 0.030531, 0.088372, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1830.0, 1831.0, 0.005391, 0.030252, 0.088402, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1830.0, 2097.0, 0.003948, 0.020204, 0.05813, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1983.0, 1950.0, 0.0012, 0.0116, 0.019, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2086.0, 2030.0, 0.00086, 0.004229, 0.012674, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2185.0, 2217.0, 0.0024, 0.0101, 0.0152, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2027.0, 1947.0, 0.000579, 0.003409, 0.008058, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2027.0, 1947.0, 0.000579, 0.00341, 0.00806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2027.0, 1822.0, 0.003665, 0.023351, 0.069198, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1860.0, 1956.0, 0.000192, 0.001612, 0.007754, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1860.0, 1956.0, 0.00019, 0.001612, 0.008058, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[39.0, 2146.0, 0.005056, 0.02051, 0.02918, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1994.0, 2160.0, 0.003787, 0.015066, 0.02744, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1994.0, 1844.0, 0.006343, 0.034897, 0.072984, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1994.0, 2088.0, 0.003409, 0.018265, 0.06, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1994.0, 2088.0, 0.00339, 0.018097, 0.06, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1774.0, 2125.0, 0.000519, 0.002865, 0.009394, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1774.0, 2125.0, 0.000519, 0.002865, 0.009394, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2053.0, 2051.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1900.0, 2196.0, 0.00048, 0.0046, 0.0076, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2091.0, 1781.0, 0.000508, 0.003865, 0.011328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2091.0, 1787.0, 0.000211, 0.000705, 0.03415, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2091.0, 1.0, 0.0, 1e-06, 2e-06, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1.0, 1781.0, 0.00044, 0.003349, 0.009814, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1.0, 1787.0, 0.000216, 0.000738, 0.035304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1803.0, 2153.0, 0.004651, 0.032568, 0.093178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1905.0, 2129.0, 0.004099, 0.034324, 0.09695, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1904.0, 2129.0, 0.004105, 0.025004, 0.073654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2108.0, 2124.0, 0.004633, 0.02824, 0.08162, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2108.0, 1769.0, 0.003559, 0.027095, 0.07941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2108.0, 1769.0, 0.003559, 0.027095, 0.07941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2108.0, 1945.0, 0.00096, 0.00928, 0.0152, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1941.0, 1829.0, 0.001096, 0.005395, 0.043434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2021.0, 2020.0, 0.00781, 0.0352, 0.0262, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2021.0, 2091.0, 0.014, 0.0727, 0.110892, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2163.0, 1783.0, 0.004747, 0.036136, 0.10591, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2163.0, 2026.0, 0.0123, 0.0679, 0.104, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1902.0, 1903.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1859.0, 2204.0, 0.0049, 0.0288, 0.08016, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[2222.0, 1917.0, 0.002438, 0.01471, 0.04222, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1950.0, 2215.0, 0.00095, 0.005619, 0.018094, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1950.0, 2215.0, 0.001591, 0.007644, 0.012924, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1950.0, 2218.0, 0.003325, 0.02037, 0.03325, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[316.0, 315.0, 0.001572, 0.02166, 3.44616, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[310.0, 307.0, 0.001592, 0.021628, 3.43046, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1922.0, 1921.0, 0.0055, 0.0332, 0.048824, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[482.0, 1789.0, 0.001904, 0.030428, 2.94106, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[484.0, 483.0, 0.001926, 0.030303, 2.93952, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[508.0, 1899.0, 0.001544, 0.016148, 1.54645, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[508.0, 1899.0, 0.00134, 0.014248, 1.32665, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[508.0, 482.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[508.0, 484.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[500.0, 508.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[499.0, 508.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1685.0, 1869.0, 0.00131, 0.072778, 0.0027, 180.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1706.0, 1985.0, 0.0003, 0.019557, 0.0, 360.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1642.0, 1763.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1747.0, 2181.0, 0.0047, 0.1573, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1746.0, 2181.0, 0.0047, 0.156, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[31.0, 57.0, 0.0047, 0.1573, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[30.0, 57.0, 0.0047, 0.1573, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[23.0, 40.0, 0.002828, 0.1393, 0.0011, 100.0, 0.0,0.0,0.940909, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[4.0, 3.0, 0.002083, 0.116667, 0.00156, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1666.0, 1810.0, 0.000508, 0.037, 0.004284, 420.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1665.0, 1810.0, 0.000507, 0.036952, 0.003864, 420.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1745.0, 2171.0, 0.000585, 0.034067, 0.006103, 436.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1744.0, 2171.0, 0.000585, 0.034067, 0.061027, 436.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1743.0, 2171.0, 0.000526, 0.030275, 0.00981, 418.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1742.0, 2171.0, 0.000526, 0.030275, 0.00981, 418.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1664.0, 1809.0, 0.0012, 0.074111, 0.0018, 180.0, 0.0,0.0,1.097727, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[26.0, 53.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[28.0, 55.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[19.0, 36.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1741.0, 2162.0, 0.0006, 0.0345, 0.0, 418.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1740.0, 2162.0, 0.0006, 0.0343, 0.0, 418.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1670.0, 1841.0, 0.000544, 0.037838, 0.0148, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1669.0, 1841.0, 0.000544, 0.037838, 0.0148, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1687.0, 1906.0, 0.000791, 0.048433, 0.0033, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1686.0, 1906.0, 0.000791, 0.048433, 0.0033, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1729.0, 1986.0, 0.000659, 0.043486, 0.00189, 430.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1728.0, 2122.0, 0.000659, 0.043486, 0.00189, 430.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1696.0, 1937.0, 0.000802, 0.048833, 0.0051, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1695.0, 1792.0, 0.000802, 0.048833, 0.0051, 370.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1690.0, 1901.0, 0.002669, 0.136, 0.0009, 100.0, 0.0,0.0,1.00625, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1659.0, 1802.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1739.0, 2152.0, 0.0041, 0.0942, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1738.0, 2152.0, 0.001394, 0.0686, 0.005, 240.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1737.0, 2152.0, 0.002018, 0.0757, 0.00184, 240.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1707.0, 2152.0, 0.000659, 0.066286, 0.00819, 430.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1752.0, 2152.0, 0.000659, 0.041543, 0.00945, 430.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[13.0, 1820.0, 0.003265, 0.139, 0.00076, 120.0, 0.0,0.0,0.940909, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1703.0, 1984.0, 0.001884, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1702.0, 1984.0, 0.001871, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1704.0, 1984.0, 0.001876, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1705.0, 1984.0, 0.001867, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[34.0, 59.0, 0.0064, 0.1807, 0.0, 75.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[33.0, 58.0, 0.0064, 0.1807, 0.0, 75.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1678.0, 1854.0, 0.000769, 0.050067, 0.00276, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1677.0, 1854.0, 0.000762, 0.0499, 0.00276, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1655.0, 1826.0, 0.000959, 0.192917, 0.00084, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[27.0, 54.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1657.0, 1793.0, 0.00298, 0.1364, 0.0013, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1650.0, 1834.0, 7e-06, 0.00569, 0.01386, 1260.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1648.0, 1834.0, 7e-06, 0.00569, 0.01386, 1260.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[35.0, 1834.0, 7e-06, 0.00569, 0.01386, 1260.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1682.0, 1858.0, 0.000527, 0.04415, 0.0034, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1681.0, 1858.0, 0.000527, 0.04415, 0.0034, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2115.0, 2118.0, 0.0029, 0.0762, 0.0, 300.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2111.0, 2117.0, 0.0045, 0.1801, 0.0, 90.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2104.0, 2012.0, 0.005505, 0.199524, 0.001512, 63.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1736.0, 2104.0, 0.006292, 0.268, 0.00075, 50.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1735.0, 2104.0, 0.006204, 0.268, 0.00075, 50.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1734.0, 2149.0, 0.002101, 0.056458, 0.014304, 240.0, 0.0,0.0,1.1, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1733.0, 2149.0, 0.001332, 0.059167, 0.008592, 240.0, 0.0,0.0,1.1, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1732.0, 2149.0, 0.001465, 0.057917, 0.009744, 240.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1694.0, 1936.0, 0.000531, 0.036378, 0.00407, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1693.0, 1936.0, 0.000531, 0.036378, 0.00407, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[25.0, 52.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1701.0, 1959.0, 0.000326, 0.0237, 0.0072, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1700.0, 1959.0, 0.000326, 0.0237, 0.0072, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1652.0, 1788.0, 0.003869, 0.14, 0.002, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1645.0, 1767.0, 0.0115, 0.2541, 0.0, 400.0, 0.0,0.0,1.025, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[24.0, 1767.0, 0.0115, 0.2541, 0.0, 400.0, 0.0,0.0,1.025, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1656.0, 1929.0, 0.002209, 0.100333, 2.4e-05, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[14.0, 1929.0, 0.002431, 0.116667, 6e-05, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1644.0, 1766.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[12.0, 1857.0, 0.000929, 0.054167, 0.00648, 240.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[11.0, 1857.0, 0.000948, 0.054167, 0.00648, 240.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[11.0, 1857.0, 0.003124, 0.133, 0.0022, 100.0, 0.0,0.0,1.04546, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1691.0, 2013.0, 0.004251, 0.1313, 0.0015, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1662.0, 2013.0, 0.001786, 0.099067, 0.003675, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1731.0, 2095.0, 0.001658, 0.068, 0.0046, 240.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1730.0, 2095.0, 0.001598, 0.0681, 0.004, 240.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1649.0, 1775.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[32.0, 1775.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1651.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1653.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1654.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1674.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[20.0, 37.0, 0.002851, 0.13, 0.00066, 100.0, 0.0,0.0,1.05852, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1668.0, 2182.0, 0.0029, 0.0694, 0.0107, 720.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1727.0, 2120.0, 0.000367, 0.023333, 0.0321, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1726.0, 2120.0, 0.000367, 0.023333, 0.0321, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1697.0, 1958.0, 0.000117, 0.023367, 0.01176, 720.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1643.0, 1765.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1725.0, 2071.0, 0.0013, 0.0643, 0.0, 240.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1724.0, 2071.0, 0.0013, 0.0643, 0.0, 240.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1710.0, 2071.0, 0.0013, 0.0643, 0.0, 240.0, 0.0,0.0,1.06818, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1672.0, 1843.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1671.0, 1843.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1723.0, 2011.0, 0.005759, 0.207937, 0.001512, 32.0, 0.0,0.0,1.0375, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1722.0, 2180.0, 0.004, 0.119, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1721.0, 2180.0, 0.004, 0.119, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1720.0, 2180.0, 0.004, 0.119, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1719.0, 2180.0, 0.0054, 0.116, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1676.0, 1850.0, 0.000178, 0.053846, 0.0, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1675.0, 1850.0, 0.000178, 0.053846, 0.0, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1718.0, 2045.0, 0.000218, 0.01863, 0.0, 120.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1717.0, 2046.0, 0.000218, 0.01827, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1692.0, 2045.0, 0.000175, 0.015526, 0.013338, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1663.0, 2045.0, 0.000175, 0.015526, 0.013338, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1709.0, 2195.0, 0.001558, 0.08475, 0.00336, 160.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1708.0, 2195.0, 0.001879, 0.088667, 0.00435, 160.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[5.0, 1764.0, 0.002083, 0.116667, 0.00156, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[29.0, 56.0, 0.002914, 0.127, 0.0012, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2038.0, 2096.0, 0.0022, 0.114, 0.0, 120.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1661.0, 1805.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1699.0, 2229.0, 0.000375, 0.022667, 0.00294, 720.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1698.0, 2229.0, 0.001028, 0.046333, 0.0054, 720.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1714.0, 2158.0, 0.0008, 0.0461, 0.0, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1713.0, 2158.0, 0.0008, 0.0463, 0.0, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1716.0, 2229.0, 0.0008, 0.0451, 0.0, 370.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1715.0, 2229.0, 0.0007, 0.0411, 0.0, 370.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1680.0, 1828.0, 0.002439, 0.111755, 0.000752, 120.0, 0.0,0.0,0.988943, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1641.0, 1762.0, 0.003175, 0.1308, 0.00239, 100.0, 0.0,0.0,1.05852, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1658.0, 1801.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[21.0, 38.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1667.0, 1836.0, 0.000318, 0.02355, 0.00108, 720.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1673.0, 1835.0, 0.000328, 0.023833, 0.00168, 720.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1712.0, 2027.0, 0.0006, 0.0348, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1711.0, 2027.0, 0.0006, 0.0348, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1749.0, 1969.0, 0.000223, 0.0195, 0.004392, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1748.0, 1969.0, 0.000228, 0.019319, 0.004248, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1684.0, 1860.0, 0.000526, 0.037775, 0.0028, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1683.0, 1860.0, 0.000528, 0.0378, 0.00236, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[22.0, 39.0, 0.000706, 0.0772, 0.00092, 100.0, 0.0,0.0,1.05852, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1660.0, 1803.0, 0.003032, 0.14, 0.0013, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1689.0, 1905.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[117.0, 1905.0, 0.002828, 0.141, 1e-05, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[110.0, 1905.0, 0.002841, 0.141, 1e-05, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[108.0, 1905.0, 0.002828, 0.141, 1e-05, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1688.0, 1904.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[118.0, 1904.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[111.0, 1904.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[107.0, 1904.0, 0.00297, 0.137, 0.0027, 50.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1751.0, 1902.0, 0.000223, 0.0195, 0.004176, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1750.0, 1902.0, 0.000219, 0.019278, 0.00432, 720.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2194.0, 1633.0, 0.002, 0.0983, 0.0, 150.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1635.0, 1633.0, 0.0014, 0.0563, 0.0, 150.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1634.0, 1633.0, 0.0009, -0.003, 0.0, 75.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2194.0, 1631.0, 0.002, 0.0997, 0.0, 150.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1635.0, 1631.0, 0.0014, 0.0567, 0.0, 150.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1632.0, 1631.0, 0.0008, -0.0033, 0.0, 75.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2194.0, 1628.0, 0.001271, 0.096333, 0.00115, 150.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1630.0, 1628.0, 0.001185, 0.057, 0.00115, 150.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1629.0, 1628.0, 0.001033, -0.005, 0.00115, 75.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1965.0, 1587.0, 6.7e-05, 0.018139, 0.00103533, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2231.0, 1587.0, 5.6e-05, -0.00171, 0.00103533, 1002.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1964.0, 1587.0, 0.000397, 0.03773, 0.00103533, 270.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1961.0, 1586.0, 6.4e-05, 0.01821, 0.00103533, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1962.0, 1586.0, 5.9e-05, -0.00176, 0.00103533, 1002.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1963.0, 1586.0, 0.000397, 0.037788, 0.00103533, 270.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2002.0, 1627.0, 8.6e-05, 0.01918, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1999.0, 1627.0, 8.8e-05, -0.00199, 0.0, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1997.0, 1627.0, 0.000652, 0.04874, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2001.0, 1626.0, 8.6e-05, 0.01918, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1998.0, 1626.0, 8.8e-05, -0.00199, 0.0, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1996.0, 1626.0, 0.000652, 0.04874, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1970.0, 1592.0, 6.6e-05, 0.018757, 0.00120233, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 1592.0, 5.9e-05, -0.00301, 0.00120233, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1864.0, 1592.0, 0.000397, 0.038328, 0.00120233, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1972.0, 1591.0, 6.6e-05, 0.018757, 0.00126933, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2221.0, 1591.0, 5.9e-05, -0.00301, 0.00126933, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1863.0, 1591.0, 0.000397, 0.038328, 0.00126933, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1772.0, 1556.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 1556.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1759.0, 1556.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1772.0, 1555.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1770.0, 1555.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1758.0, 1555.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,0.0,-30.0, 30.0, 0.1 ],\n\t\t[1855.0, 1584.0, 8.3e-05, 0.021439, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1856.0, 1584.0, 6.5e-05, -0.00326, 0.0, 400.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1957.0, 1584.0, 0.000454, 0.038229, 0.0, 400.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1813.0, 1570.0, 7.8e-05, 0.018807, 0.001336, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1812.0, 1570.0, 5.7e-05, -0.00212, 0.001336, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1811.0, 1570.0, 0.000428, 0.033328, 0.001336, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1886.0, 1573.0, 6.3e-05, 0.018623, 0.00153633, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1887.0, 1573.0, 6.3e-05, -0.00257, 0.00153633, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1884.0, 1573.0, 0.000381, 0.035269, 0.00153633, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1927.0, 1578.0, 5.8e-05, 0.017275, 0.002004, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1925.0, 1578.0, 6.9e-05, -0.00173, 0.002004, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1885.0, 1578.0, 0.000349, 0.039152, 0.002004, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2143.0, 1624.0, 0.000125, 0.02587, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2150.0, 1624.0, 9.2e-05, -0.00513, 0.0, 750.0, 0.0,0.0,1.07273, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1625.0, 1624.0, 0.000505, 0.04532, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2138.0, 1622.0, 0.000228, 0.02372, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2147.0, 1622.0, 0.000123, -0.00264, 0.0, 750.0, 0.0,0.0,1.06818, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1623.0, 1622.0, 0.000586, 0.02816, 0.0, 240.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1790.0, 1564.0, 9.6e-05, 0.0209, 0.002, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 1564.0, 7.9e-05, -0.00277, 0.002, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1565.0, 1564.0, 0.000524, 0.052407, 0.002, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1790.0, 1563.0, 9.6e-05, 0.0209, 0.002, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2094.0, 1563.0, 7.9e-05, -0.00277, 0.002, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1565.0, 1563.0, 0.000524, 0.052407, 0.002, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2152.0, 1619.0, 0.00085, 0.01, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1621.0, 1619.0, 0.0048, 0.1195, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1620.0, 1619.0, 0.0027, 0.1195, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1875.0, 1590.0, 8e-05, 0.01881, 0.0, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1874.0, 1590.0, 0.00277, -0.00232, 0.0, 1002.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1873.0, 1590.0, 0.0004, 0.037, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1974.0, 1572.0, 8e-06, 0.018685, 0.00153333, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2202.0, 1572.0, -1e-05, -0.0033, 0.00153333, 10000.0, 0.0,0.0,1.01932, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1872.0, 1572.0, 0.000442, 0.039535, 0.00153333, 300.0, 0.0,0.0,0.978409, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2082.0, 1618.0, 0.000117, 0.02364, 0.00205, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2089.0, 1618.0, 4.2e-05, -0.00236, 0.00205, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2078.0, 1618.0, 0.000345, 0.031, 0.00205, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2083.0, 1617.0, 6.6e-05, 0.022113, 0.001075, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 1617.0, 9e-05, -0.00185, 0.001075, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2077.0, 1617.0, 0.000509, 0.047513, 0.001075, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2080.0, 1616.0, 0.000115, 0.022847, 0.00225, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2088.0, 1616.0, 0.000118, -0.00186, 0.00225, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2076.0, 1616.0, 0.000507, 0.03022, 0.00225, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1786.0, 1562.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1785.0, 1562.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1755.0, 1562.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1786.0, 1561.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1785.0, 1561.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1754.0, 1561.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1868.0, 1615.0, 0.000105, 0.01782, 0.003375, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1867.0, 1615.0, 5.8e-05, -0.00247, 0.003375, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2072.0, 1615.0, 0.000494, 0.030927, 0.003375, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1866.0, 1614.0, 7.9e-05, 0.019153, 0.00145, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1865.0, 1614.0, 6.4e-05, -0.00314, 0.00145, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2007.0, 1614.0, 0.000335, 0.030553, 0.00145, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1799.0, 1568.0, 7.8e-05, 0.018079, 0.001336, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 1568.0, 4.9e-05, -0.00241, 0.001336, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1569.0, 1568.0, 0.000403, 0.038458, 0.001336, 300.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1798.0, 1566.0, 7.4e-05, 0.018598, 0.001837, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1797.0, 1566.0, 5.3e-05, -0.00316, 0.001837, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1567.0, 1566.0, 0.000378, 0.039316, 0.001837, 300.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2013.0, 1611.0, 0.001709, 0.13125, 0.000972, 120.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1613.0, 1611.0, 0.001024, 0.070417, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1612.0, 1611.0, 0.001075, -0.00625, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2013.0, 1608.0, 0.0021, 0.1588, 0.000972, 120.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1610.0, 1608.0, 0.0012, 0.0852, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1609.0, 1608.0, 0.0013, 0.0063, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1960.0, 1585.0, 7.3e-05, 0.018815, 0.00096667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 1585.0, 6e-05, -0.00139, 0.00096667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1881.0, 1585.0, 0.000405, 0.037565, 0.00096667, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[123.0, 1583.0, 7.4e-05, 0.018955, 0.00096667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1920.0, 1583.0, 6.1e-05, -0.00145, 0.00096667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1808.0, 1583.0, 0.000406, 0.037395, 0.00096667, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2056.0, 1607.0, 8.6e-05, 0.012, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2061.0, 1607.0, 8.4e-05, 0.0052, 0.0, 750.0, 0.0,0.0,1.07045, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2055.0, 1607.0, 0.00064, 0.0098, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2057.0, 1588.0, 8.2e-05, 0.01899, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2062.0, 1588.0, 9.5e-05, 0.00187, 0.0, 750.0, 0.0,0.0,1.07045, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1967.0, 1588.0, 0.000595, 0.04896, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2050.0, 1606.0, 0.000124, 0.026467, 0.003, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2054.0, 1606.0, 8.8e-05, -0.00659, 0.003, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2049.0, 1606.0, 0.000433, 0.03668, 0.003, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2019.0, 1605.0, 6.9e-05, 0.01806, 0.000725, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2018.0, 1605.0, 8.7e-05, -0.00197, 0.000725, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2017.0, 1605.0, 0.000344, 0.03106, 0.000725, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2081.0, 1576.0, 5.9e-05, 0.017137, 0.0009, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2052.0, 1576.0, 7.4e-05, -0.0013, 0.0009, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1880.0, 1576.0, 0.000392, 0.036947, 0.0009, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2230.0, 1604.0, 8.3e-05, 0.019047, 0.001425, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2124.0, 1604.0, 6.1e-05, -0.00317, 0.001425, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1878.0, 1604.0, 0.000339, 0.031247, 0.001425, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2230.0, 1582.0, 6e-05, 0.017225, 0.00096667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2014.0, 1582.0, 7.3e-05, -0.00129, 0.00096667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1877.0, 1582.0, 0.000392, 0.036925, 0.00096667, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1773.0, 1558.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1769.0, 1558.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1761.0, 1558.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1773.0, 1557.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1769.0, 1557.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1760.0, 1557.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1787.0, 8.0, 0.000881, 0.085611, 0.000444, 180.0, 0.0,0.0,1.0625, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1646.0, 8.0, 0.000767, -0.00617, 0.000444, 180.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[10.0, 8.0, 9.1e-05, 0.051056, 0.000444, 90.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1787.0, 7.0, 0.000881, 0.085611, 0.000444, 180.0, 0.0,0.0,1.0625, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1647.0, 7.0, 0.000767, -0.00617, 0.000444, 180.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[9.0, 7.0, 9.1e-05, 0.051056, 0.000444, 90.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2046.0, 1603.0, 0.0, 0.04475, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1935.0, 1603.0, 0.0, -0.00462, 0.0, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2043.0, 1603.0, 0.0, 0.07026, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2096.0, 1601.0, 0.0018, 0.1243, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1602.0, 1601.0, 0.0015, 0.0698, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2041.0, 1601.0, 0.0014, -0.0077, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2179.0, 1598.0, 0.0063, 0.2671, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1600.0, 1598.0, 0.0058, 0.1401, 0.0, 400.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1599.0, 1598.0, 0.003, -0.0097, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2179.0, 1596.0, 0.0063, 0.2652, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1600.0, 1596.0, 0.0059, 0.1419, 0.0, 400.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1597.0, 1596.0, 0.0028, -0.0079, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1895.0, 1575.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1893.0, 1575.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1890.0, 1575.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1892.0, 1574.0, 9.1e-05, 0.02099, 0.0, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1891.0, 1574.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1889.0, 1574.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2033.0, 1595.0, 8.5e-05, 0.01857, 0.00183333, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2035.0, 1595.0, 4.7e-05, -0.00287, 0.00183333, 1000.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2031.0, 1595.0, 0.000426, 0.03594, 0.00183333, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1981.0, 1593.0, 7.3e-05, 0.0163, 0.001, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1980.0, 1593.0, 5.4e-05, -0.001, 0.001, 1000.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1979.0, 1593.0, 0.000377, 0.03705, 0.001, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2023.0, 1594.0, 0.000116, 0.018433, 0.002075, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2025.0, 1594.0, 7.4e-05, -0.00326, 0.002075, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2022.0, 1594.0, 0.000476, 0.032887, 0.002075, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2024.0, 1589.0, 6.4e-05, 0.016337, 0.00120233, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2228.0, 1589.0, 6.3e-05, -0.0024, 0.00120233, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1862.0, 1589.0, 0.000244, 0.030978, 0.00120233, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1899.0, 1581.0, 8.5e-05, 0.018221, 0.001275, 750.0, 0.0,0.0,1.072, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 1581.0, 8.5e-05, -0.00243, 0.001275, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1879.0, 1581.0, -9e-05, 0.041486, 0.001275, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1899.0, 1579.0, 8.4e-05, 0.018087, 0.00135, 750.0, 0.0,0.0,1.072, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1923.0, 1579.0, 8.4e-05, -0.00222, 0.00135, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1580.0, 1579.0, -8e-05, 0.04158, 0.00135, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1771.0, 1560.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 1560.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1757.0, 1560.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1771.0, 1559.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1768.0, 1559.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1756.0, 1559.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1853.0, 1571.0, 6.1e-05, 0.01713, 0.00126667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1852.0, 1571.0, 7.3e-05, -0.00142, 0.00126667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1851.0, 1571.0, 0.000408, 0.0376, 0.00126667, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1926.0, 1577.0, 5e-05, 0.01767, 0.00133333, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[2196.0, 1577.0, 7e-05, -0.00193, 0.00133333, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],\n\t\t[1882.0, 1577.0, 0.000396, 0.03757, 0.00133333, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ]\n\t])\n\tppc[\"gencost\"] = array([\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 44.0, 0.0, 66.0, 33.0, 52.8, 26.4 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 44.0, 0.0, 66.0, 33.0, 52.8, 26.4 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 50.0, 0.0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 62.8, 0.0, 94.2, 47.1, 75.36, 37.68 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 30.0, 0.0, 45.0, 22.5, 36.0, 18.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 30.0, 0.0, 45.0, 22.5, 36.0, 18.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 400.0, 0.0, 600.0, 300.0, 480.0, 240.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 400.0, 0.0, 600.0, 300.0, 480.0, 240.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 64.0, 0.0, 96.0, 48.0, 76.8, 38.4 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 64.0, 0.0, 96.0, 48.0, 76.8, 38.4 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 64.0, 0.0, 96.0, 48.0, 76.8, 38.4 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 29.0, 0.0, 43.5, 21.75, 34.8, 17.4 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 29.0, 0.0, 43.5, 21.75, 34.8, 17.4 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 29.0, 0.0, 43.5, 21.75, 34.8, 17.4 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 14.4, 0.0, 21.6, 10.8, 17.28, 8.64 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 14.4, 0.0, 21.6, 10.8, 17.28, 8.64 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 16.8, 0.0, 25.2, 12.6, 20.16, 10.08 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 16.8, 0.0, 25.2, 12.6, 20.16, 10.08 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 48.0, 0.0, 72.0, 36.0, 57.6, 28.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 48.0, 0.0, 72.0, 36.0, 57.6, 28.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 60.0, 0.0, 90.0, 45.0, 72.0, 36.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 60.0, 0.0, 90.0, 45.0, 72.0, 36.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 84.0, 0.0, 126.0, 63.0, 100.8, 50.4 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 39.6, 0.0, 59.4, 29.7, 47.52, 23.76 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 20.0, 0.0, 30.0, 15.0, 24.0, 12.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 62.8, 0.0, 94.2, 47.1, 75.36, 37.68 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 62.8, 0.0, 94.2, 47.1, 75.36, 37.68 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 320.0, 0.0, 480.0, 240.0, 384.0, 192.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 320.0, 0.0, 480.0, 240.0, 384.0, 192.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 403.2, 0.0, 604.8, 302.4, 483.84, 241.92 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 403.2, 0.0, 604.8, 302.4, 483.84, 241.92 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 54.0, 0.0, 81.0, 40.5, 64.8, 32.4 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 54.0, 0.0, 81.0, 40.5, 64.8, 32.4 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 20.0, 0.0, 30.0, 15.0, 24.0, 12.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 20.0, 0.0, 30.0, 15.0, 24.0, 12.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],\n\t\t[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ]\n\t])\n\treturn ppc", "from numpy import array\ndef scigrid_2011_01_06_19():\n\tppc = {\"version\": '2'}\n\tppc[\"baseMVA\"] = 100.0\n\tppc[\"bus\"] = array([\n\t\t[586,\t\t3,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[589,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[590,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[593,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[594,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[595,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[598,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[599,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[601,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[602,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[603,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[607,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[608,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[609,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[612,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[613,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[614,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[616,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[617,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[618,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[619,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[621,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[624,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[628,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[629,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[631,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[632,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[637,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[638,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[640,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[641,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[642,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[643,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[647,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[650,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[652,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[655,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[663,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[666,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[670,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[672,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[676,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[681,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[683,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[687,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[689,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[691,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[694,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[695,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[696,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[697,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[698,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[702,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[705,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[707,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[713,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[714,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[716,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[717,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[719,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[722,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[723,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[724,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[727,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[728,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[730,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[732,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[735,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[738,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[741,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[742,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[743,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[746,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[747,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[748,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[749,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[750,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[753,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[758,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[760,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[761,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[762,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[763,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[765,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[767,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[769,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[771,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[772,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[774,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[777,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[778,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[781,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[784,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[785,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[787,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[788,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[789,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[791,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[792,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[795,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[800,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[801,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[802,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[805,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[806,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[808,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[809,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[811,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[814,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[816,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[817,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[821,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[822,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[826,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[830,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[834,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[835,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[836,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[837,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[839,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[841,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[843,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[844,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[845,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[849,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[850,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[851,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[853,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[855,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[856,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[857,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[858,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[859,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[860,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[864,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[865,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[867,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[869,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[870,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[872,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[873,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[874,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[875,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[877,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[881,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[882,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[883,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[885,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[886,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[889,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[890,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[893,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[894,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[895,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[896,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[898,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[900,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[902,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[903,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[905,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[906,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[907,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[909,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[915,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[917,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[918,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[920,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[921,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[922,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[923,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[925,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[931,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[935,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[936,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[937,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[939,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[940,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[944,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[950,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[952,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[957,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[958,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[959,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[960,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[963,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[965,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[966,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[967,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[968,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999554,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[969,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999554,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[971,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[973,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[976,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[978,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[981,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[982,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[983,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[984,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[985,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[986,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[987,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[988,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[993,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[994,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[995,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[997,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[999,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1000,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1002,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1003,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1007,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1008,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1010,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1011,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1012,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1014,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1026,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1027,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1028,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1029,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1030,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1031,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1032,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1033,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1034,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1035,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1036,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1037,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1038,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1039,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1040,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1041,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1042,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1043,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1044,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1045,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1046,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1047,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1048,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1049,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1050,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1051,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1052,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1053,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1054,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1055,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1056,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1057,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1058,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1059,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1060,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1061,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1062,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1063,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1064,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1065,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1066,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1067,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1068,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1069,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1070,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1071,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1072,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1073,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1074,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1075,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1076,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1077,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1078,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1079,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1080,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1081,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1082,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1083,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1084,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1085,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1086,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1087,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1088,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1089,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1090,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1091,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1092,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1093,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1094,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1095,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1096,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1097,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1098,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1099,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1100,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1101,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1102,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1103,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1104,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1105,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1106,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1107,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1108,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1109,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1110,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1111,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1112,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1113,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1114,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1115,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1116,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1117,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1118,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1119,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1120,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1121,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1122,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1123,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1124,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1125,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1126,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1127,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1128,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1129,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1130,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1131,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1132,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1133,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1134,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1135,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1136,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1137,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1138,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1139,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1140,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1141,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1142,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1143,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1144,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1145,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1146,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1147,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1148,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1149,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1150,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1151,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1152,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1153,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1154,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1155,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1156,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1157,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1158,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1159,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1160,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1161,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1162,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1163,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1164,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1165,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1166,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1167,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1168,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1169,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1170,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1171,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1172,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1173,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1174,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1175,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1176,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1177,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1178,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1179,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1180,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1181,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1182,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1183,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1184,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1185,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1186,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1187,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1188,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1189,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1190,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1191,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1192,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1193,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1194,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1195,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1196,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1197,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1198,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1201,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1202,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1203,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1204,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1205,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1206,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1207,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1208,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1209,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1210,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1211,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1212,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1213,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1214,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1215,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1216,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1217,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1218,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1219,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1220,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1221,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1222,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1223,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1224,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1225,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1226,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1227,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1228,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1229,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1230,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1231,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1232,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1235,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1236,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1237,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1238,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1239,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1240,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1241,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1242,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1243,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1244,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1245,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1246,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1247,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1248,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1249,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1250,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1251,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1252,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1253,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1254,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1255,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1256,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1257,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1258,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1259,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1260,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1261,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1262,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1263,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1264,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1265,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1266,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1267,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1268,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1269,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1270,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1271,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1272,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1273,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1274,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1275,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1276,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1277,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1278,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1279,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1280,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1281,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1282,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1283,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1284,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1285,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1286,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1287,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1288,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1289,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1290,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1291,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1292,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1293,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1294,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1295,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1296,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1297,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1298,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1299,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1300,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1301,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1302,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1303,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1304,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1305,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1306,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1307,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1308,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1309,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1310,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1311,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1312,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1313,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1314,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1315,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1316,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1317,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1318,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1319,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1320,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1321,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1322,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1323,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1324,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1325,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1326,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1327,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1328,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1329,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1330,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1331,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1332,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1333,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1334,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1335,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1336,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1337,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1338,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1339,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1340,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1341,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1342,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1343,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1344,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1345,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1346,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1347,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1348,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1349,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1350,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1351,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1352,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1354,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1355,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1356,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1357,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1358,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1359,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1360,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1361,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1362,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1363,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1364,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1365,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1366,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1367,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1368,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1369,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1370,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1371,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1372,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1373,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1376,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1377,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1378,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1379,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1380,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1381,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1382,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1383,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1384,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1385,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1386,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1387,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1388,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1389,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1390,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1391,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1392,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1393,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1394,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1395,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1396,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1397,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1398,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1399,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1400,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1401,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1402,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1403,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1404,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1405,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1406,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1407,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1408,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1409,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1410,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1411,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1412,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1413,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1414,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1415,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1416,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1417,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1418,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1419,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1420,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1421,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999554,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1422,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1423,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1424,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1425,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1426,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1427,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1428,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1429,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1430,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1431,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1432,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1433,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1434,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1435,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1436,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1437,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1438,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1439,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1440,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1441,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1442,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1443,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1444,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1445,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1446,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1447,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1448,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1449,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1450,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1451,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1452,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1453,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1454,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1455,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1456,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1457,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1458,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1459,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1460,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1461,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1462,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1463,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1464,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1465,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1466,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1467,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1468,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1469,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1470,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1471,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1472,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1473,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1474,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1475,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1476,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1477,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1478,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1479,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1480,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1481,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1482,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1483,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1484,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1485,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1486,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1487,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1488,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1489,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1490,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1491,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1492,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1493,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1494,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1495,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1496,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1497,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1498,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1499,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1500,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1501,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1502,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1503,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1504,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1505,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1506,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1507,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1508,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1509,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1510,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1511,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1512,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1513,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1514,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1515,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1516,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1517,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1518,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1519,\t\t2,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[1,\t\t1,\t\t299.357139,\t\t59.871428,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[2,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000011,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[3,\t\t1,\t\t52.469255,\t\t10.493851,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[4,\t\t1,\t\t86.287429,\t\t17.257486,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[5,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999623,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[6,\t\t1,\t\t253.37566,\t\t50.675132,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[7,\t\t1,\t\t190.950068,\t\t38.190014,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[8,\t\t1,\t\t159.773374,\t\t31.954675,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[9,\t\t1,\t\t108.052236,\t\t21.610447,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[10,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000968,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[11,\t\t1,\t\t94.672178,\t\t18.934436,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[12,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000956,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[13,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00017,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[14,\t\t1,\t\t226.42112,\t\t45.284224,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[15,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00024,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[16,\t\t1,\t\t386.152959,\t\t77.230592,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[17,\t\t1,\t\t90.949166,\t\t18.189833,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[18,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.002568,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[19,\t\t1,\t\t224.701104,\t\t44.940221,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[20,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999285,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[21,\t\t1,\t\t966.249214,\t\t193.249843,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[22,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000582,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[23,\t\t1,\t\t126.514784,\t\t25.302957,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[24,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[25,\t\t1,\t\t60.512903,\t\t12.102581,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[26,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000747,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[27,\t\t1,\t\t74.281264,\t\t14.856253,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[28,\t\t1,\t\t219.47888,\t\t43.895776,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[29,\t\t1,\t\t80.619162,\t\t16.123832,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[30,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000284,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[31,\t\t1,\t\t158.656429,\t\t31.731286,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[32,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.996654,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[33,\t\t1,\t\t198.925347,\t\t39.785069,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[34,\t\t1,\t\t39.465856,\t\t7.893171,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[35,\t\t1,\t\t2.612848,\t\t0.52257,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[36,\t\t1,\t\t8.650765,\t\t1.730153,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[37,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.002852,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[38,\t\t1,\t\t208.416239,\t\t41.683248,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[39,\t\t1,\t\t68.245581,\t\t13.649116,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[40,\t\t1,\t\t71.284643,\t\t14.256929,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[41,\t\t1,\t\t76.614835,\t\t15.322967,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[42,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001121,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[43,\t\t1,\t\t117.492302,\t\t23.49846,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[44,\t\t1,\t\t150.313981,\t\t30.062796,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[45,\t\t1,\t\t79.790022,\t\t15.958004,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[46,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000273,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[47,\t\t1,\t\t346.933422,\t\t69.386684,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[48,\t\t1,\t\t238.470527,\t\t47.694105,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[49,\t\t1,\t\t60.321012,\t\t12.064202,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[50,\t\t1,\t\t87.835536,\t\t17.567107,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[51,\t\t1,\t\t113.829121,\t\t22.765824,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[52,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000133,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[53,\t\t1,\t\t172.71746,\t\t34.543492,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[54,\t\t1,\t\t87.750526,\t\t17.550105,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[55,\t\t1,\t\t86.057622,\t\t17.211524,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[56,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999733,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[57,\t\t1,\t\t102.725918,\t\t20.545184,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[58,\t\t1,\t\t235.309339,\t\t47.061868,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[59,\t\t1,\t\t67.205785,\t\t13.441157,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[60,\t\t1,\t\t35.432755,\t\t7.086551,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[61,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999718,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[62,\t\t1,\t\t270.131503,\t\t54.026301,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[63,\t\t1,\t\t159.456313,\t\t31.891263,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[64,\t\t1,\t\t1692.15463,\t\t338.430926,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[65,\t\t1,\t\t5.638288,\t\t1.127658,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[66,\t\t1,\t\t178.896436,\t\t35.779287,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[67,\t\t1,\t\t383.762992,\t\t76.752598,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[68,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998457,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[69,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00038,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[70,\t\t1,\t\t725.992049,\t\t145.19841,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[71,\t\t1,\t\t168.711201,\t\t33.74224,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[72,\t\t1,\t\t276.325796,\t\t55.265159,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[73,\t\t1,\t\t88.4623,\t\t17.69246,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[74,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.003234,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[75,\t\t1,\t\t110.255161,\t\t22.051032,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[76,\t\t1,\t\t106.420421,\t\t21.284084,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[77,\t\t1,\t\t103.07545,\t\t20.61509,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[78,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.995361,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[79,\t\t1,\t\t106.433346,\t\t21.286669,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[80,\t\t1,\t\t113.048636,\t\t22.609727,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[81,\t\t1,\t\t127.616514,\t\t25.523303,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[82,\t\t1,\t\t4.247153,\t\t0.849431,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[83,\t\t1,\t\t284.165823,\t\t56.833165,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[84,\t\t1,\t\t27.974372,\t\t5.594874,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[85,\t\t1,\t\t97.009691,\t\t19.401938,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[86,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000053,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[87,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000327,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[88,\t\t1,\t\t78.299678,\t\t15.659936,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[89,\t\t1,\t\t97.142735,\t\t19.428547,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[90,\t\t1,\t\t112.19557,\t\t22.439114,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[91,\t\t1,\t\t38.971155,\t\t7.794231,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[92,\t\t1,\t\t42.531201,\t\t8.50624,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[93,\t\t1,\t\t41.714588,\t\t8.342918,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[94,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00087,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[95,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001187,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[96,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[97,\t\t1,\t\t5.866845,\t\t1.173369,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[98,\t\t1,\t\t107.867686,\t\t21.573537,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[99,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000716,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[100,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001847,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[101,\t\t1,\t\t76.381322,\t\t15.276264,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[102,\t\t1,\t\t147.839608,\t\t29.567922,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[103,\t\t1,\t\t172.85311,\t\t34.570622,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[104,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999953,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[105,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000152,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[106,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99996,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[107,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000002,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[108,\t\t1,\t\t121.926795,\t\t24.385359,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[109,\t\t1,\t\t49.366079,\t\t9.873216,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[110,\t\t1,\t\t64.079149,\t\t12.81583,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[111,\t\t1,\t\t112.924775,\t\t22.584955,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[112,\t\t1,\t\t57.154171,\t\t11.430834,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[113,\t\t1,\t\t90.095678,\t\t18.019136,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[114,\t\t1,\t\t132.688902,\t\t26.53778,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[115,\t\t1,\t\t85.536735,\t\t17.107347,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[116,\t\t1,\t\t143.133962,\t\t28.626792,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[117,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000294,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[118,\t\t1,\t\t221.622459,\t\t44.324492,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[119,\t\t1,\t\t42.959533,\t\t8.591907,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[120,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001148,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[121,\t\t1,\t\t58.339066,\t\t11.667813,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[122,\t\t1,\t\t51.07526,\t\t10.215052,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[123,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000162,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[124,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[125,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999713,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[126,\t\t1,\t\t267.788855,\t\t53.557771,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[127,\t\t1,\t\t207.028957,\t\t41.405791,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[128,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001581,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[129,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[130,\t\t1,\t\t285.45527,\t\t57.091054,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[131,\t\t1,\t\t63.028277,\t\t12.605655,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[132,\t\t1,\t\t164.116103,\t\t32.823221,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[133,\t\t1,\t\t54.972465,\t\t10.994493,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[134,\t\t1,\t\t54.747354,\t\t10.949471,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[135,\t\t1,\t\t54.81994,\t\t10.963988,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[136,\t\t1,\t\t53.105693,\t\t10.621139,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[137,\t\t1,\t\t42.479666,\t\t8.495933,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[138,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000183,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[139,\t\t1,\t\t83.213361,\t\t16.642672,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[140,\t\t1,\t\t57.545602,\t\t11.50912,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[141,\t\t1,\t\t68.181381,\t\t13.636276,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[142,\t\t1,\t\t75.023859,\t\t15.004772,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[143,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999983,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[144,\t\t1,\t\t68.338979,\t\t13.667796,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[145,\t\t1,\t\t198.799897,\t\t39.759979,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[146,\t\t1,\t\t256.290464,\t\t51.258093,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[147,\t\t1,\t\t157.090963,\t\t31.418193,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[148,\t\t1,\t\t221.684188,\t\t44.336838,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[149,\t\t1,\t\t142.918192,\t\t28.583638,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[150,\t\t1,\t\t186.594538,\t\t37.318908,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[151,\t\t1,\t\t43.970718,\t\t8.794144,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[152,\t\t1,\t\t91.278651,\t\t18.25573,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[153,\t\t1,\t\t162.855958,\t\t32.571192,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[154,\t\t1,\t\t167.285387,\t\t33.457077,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[155,\t\t1,\t\t174.242515,\t\t34.848503,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[156,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[157,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001193,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[158,\t\t1,\t\t45.9071,\t\t9.18142,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[159,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999774,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[160,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999991,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[161,\t\t1,\t\t142.515256,\t\t28.503051,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[162,\t\t1,\t\t213.018072,\t\t42.603614,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[163,\t\t1,\t\t42.601603,\t\t8.520321,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[164,\t\t1,\t\t42.772929,\t\t8.554586,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[165,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999992,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[166,\t\t1,\t\t50.008474,\t\t10.001695,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[167,\t\t1,\t\t70.349335,\t\t14.069867,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[168,\t\t1,\t\t48.012523,\t\t9.602505,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[169,\t\t1,\t\t164.360711,\t\t32.872142,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[170,\t\t1,\t\t123.503215,\t\t24.700643,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[171,\t\t1,\t\t105.409947,\t\t21.081989,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[172,\t\t1,\t\t51.73233,\t\t10.346466,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[173,\t\t1,\t\t49.419687,\t\t9.883937,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[174,\t\t1,\t\t74.161243,\t\t14.832249,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[175,\t\t1,\t\t49.387297,\t\t9.877459,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[176,\t\t1,\t\t172.096394,\t\t34.419279,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[177,\t\t1,\t\t28.062824,\t\t5.612565,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[178,\t\t1,\t\t148.627602,\t\t29.72552,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[179,\t\t1,\t\t54.764142,\t\t10.952828,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[180,\t\t1,\t\t48.139082,\t\t9.627816,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[181,\t\t1,\t\t36.333993,\t\t7.266799,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[182,\t\t1,\t\t1.645947,\t\t0.329189,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[183,\t\t1,\t\t492.683663,\t\t98.536733,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[184,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000023,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[185,\t\t1,\t\t105.357552,\t\t21.07151,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[186,\t\t1,\t\t56.734494,\t\t11.346899,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[187,\t\t1,\t\t33.183902,\t\t6.63678,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[188,\t\t1,\t\t49.387297,\t\t9.877459,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[189,\t\t1,\t\t181.220426,\t\t36.244085,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[190,\t\t1,\t\t239.697919,\t\t47.939584,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[191,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[192,\t\t1,\t\t57.72652,\t\t11.545304,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[193,\t\t1,\t\t49.307632,\t\t9.861526,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[194,\t\t1,\t\t34.037848,\t\t6.80757,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[195,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[196,\t\t1,\t\t47.753116,\t\t9.550623,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[197,\t\t1,\t\t75.658474,\t\t15.131695,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[198,\t\t1,\t\t44.770634,\t\t8.954127,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[199,\t\t1,\t\t57.640701,\t\t11.52814,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[200,\t\t1,\t\t49.388443,\t\t9.877689,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[201,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00096,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[202,\t\t1,\t\t50.609135,\t\t10.121827,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[203,\t\t1,\t\t6.668207,\t\t1.333641,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[204,\t\t1,\t\t195.44382,\t\t39.088764,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[205,\t\t1,\t\t97.730708,\t\t19.546142,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[206,\t\t1,\t\t46.90391,\t\t9.380782,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[207,\t\t1,\t\t139.472027,\t\t27.894405,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[208,\t\t1,\t\t41.069012,\t\t8.213802,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[209,\t\t1,\t\t57.071575,\t\t11.414315,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[210,\t\t1,\t\t65.564558,\t\t13.112912,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[211,\t\t1,\t\t230.408553,\t\t46.081711,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[212,\t\t1,\t\t57.748654,\t\t11.549731,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[213,\t\t1,\t\t270.71278,\t\t54.142556,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[214,\t\t1,\t\t182.155387,\t\t36.431077,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[215,\t\t1,\t\t385.176656,\t\t77.035331,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[216,\t\t1,\t\t129.876458,\t\t25.975292,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[217,\t\t1,\t\t41.617029,\t\t8.323406,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[218,\t\t1,\t\t126.787729,\t\t25.357546,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[219,\t\t1,\t\t203.763334,\t\t40.752667,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[220,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000027,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[221,\t\t1,\t\t116.237427,\t\t23.247485,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[222,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[223,\t\t1,\t\t115.198486,\t\t23.039697,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[224,\t\t1,\t\t133.959969,\t\t26.791994,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[225,\t\t1,\t\t240.532809,\t\t48.106562,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[226,\t\t1,\t\t84.025543,\t\t16.805109,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[227,\t\t1,\t\t104.678785,\t\t20.935757,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[228,\t\t1,\t\t102.634351,\t\t20.52687,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[229,\t\t1,\t\t227.112315,\t\t45.422463,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[230,\t\t1,\t\t54.474503,\t\t10.894901,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[231,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000717,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[232,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999969,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[233,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999805,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[234,\t\t1,\t\t194.044238,\t\t38.808848,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[235,\t\t1,\t\t63.1006,\t\t12.62012,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[236,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999975,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[237,\t\t1,\t\t0.522229,\t\t0.104446,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[238,\t\t1,\t\t71.399477,\t\t14.279895,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[239,\t\t1,\t\t98.64733,\t\t19.729466,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[240,\t\t1,\t\t622.248439,\t\t124.449688,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[241,\t\t1,\t\t460.442229,\t\t92.088446,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[242,\t\t1,\t\t167.655348,\t\t33.53107,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[243,\t\t1,\t\t135.264433,\t\t27.052887,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[244,\t\t1,\t\t161.157525,\t\t32.231505,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[245,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001372,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[246,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999902,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[247,\t\t1,\t\t31.9808,\t\t6.39616,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[248,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999997,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[249,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999996,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[250,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999994,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[251,\t\t1,\t\t79.369092,\t\t15.873818,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[252,\t\t1,\t\t203.545412,\t\t40.709082,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[253,\t\t1,\t\t89.364203,\t\t17.872841,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[254,\t\t1,\t\t28.532507,\t\t5.706501,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[255,\t\t1,\t\t140.320492,\t\t28.064098,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[256,\t\t1,\t\t160.923187,\t\t32.184637,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[257,\t\t1,\t\t77.66509,\t\t15.533018,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[258,\t\t1,\t\t253.101148,\t\t50.62023,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[259,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999295,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[260,\t\t1,\t\t157.520213,\t\t31.504043,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[261,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.002014,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[262,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999674,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[263,\t\t1,\t\t225.962539,\t\t45.192508,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[264,\t\t1,\t\t292.520695,\t\t58.504139,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[265,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000009,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[266,\t\t1,\t\t140.975489,\t\t28.195098,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[267,\t\t1,\t\t178.303407,\t\t35.660681,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[268,\t\t1,\t\t62.00365,\t\t12.40073,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[269,\t\t1,\t\t49.791256,\t\t9.958251,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[270,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[271,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[272,\t\t1,\t\t1.015925,\t\t0.203185,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[273,\t\t1,\t\t138.928224,\t\t27.785645,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[274,\t\t1,\t\t270.058165,\t\t54.011633,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[275,\t\t1,\t\t50.556363,\t\t10.111273,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[276,\t\t1,\t\t197.081554,\t\t39.416311,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[277,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998827,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[278,\t\t1,\t\t153.854372,\t\t30.770874,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[279,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.998808,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[280,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999709,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[281,\t\t1,\t\t203.2232,\t\t40.64464,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[282,\t\t1,\t\t287.389078,\t\t57.477816,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[283,\t\t1,\t\t115.198021,\t\t23.039604,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[284,\t\t1,\t\t174.760733,\t\t34.952147,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[285,\t\t1,\t\t77.937156,\t\t15.587431,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[286,\t\t1,\t\t163.343691,\t\t32.668738,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[287,\t\t1,\t\t100.394621,\t\t20.078924,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[288,\t\t1,\t\t64.573121,\t\t12.914624,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[289,\t\t1,\t\t101.554791,\t\t20.310958,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[290,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.004653,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[291,\t\t1,\t\t66.832008,\t\t13.366402,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[292,\t\t1,\t\t131.756242,\t\t26.351248,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[293,\t\t1,\t\t116.121759,\t\t23.224352,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[294,\t\t1,\t\t30.944694,\t\t6.188939,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[295,\t\t1,\t\t64.747078,\t\t12.949416,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[296,\t\t1,\t\t183.817107,\t\t36.763421,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[297,\t\t1,\t\t193.193842,\t\t38.638768,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[298,\t\t1,\t\t102.010189,\t\t20.402038,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[299,\t\t1,\t\t98.79619,\t\t19.759238,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[300,\t\t1,\t\t269.147572,\t\t53.829514,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[301,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000038,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[302,\t\t1,\t\t226.723905,\t\t45.344781,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[303,\t\t1,\t\t116.451973,\t\t23.290395,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[304,\t\t1,\t\t99.99739,\t\t19.999478,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[305,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99962,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[306,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001477,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[307,\t\t1,\t\t118.606198,\t\t23.72124,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[308,\t\t1,\t\t146.22564,\t\t29.245128,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[309,\t\t1,\t\t239.24571,\t\t47.849142,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[310,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000141,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[311,\t\t1,\t\t203.217454,\t\t40.643491,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[312,\t\t1,\t\t91.392543,\t\t18.278509,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[313,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000343,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[314,\t\t1,\t\t283.075925,\t\t56.615185,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[315,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001462,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[316,\t\t1,\t\t110.912827,\t\t22.182565,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[317,\t\t1,\t\t149.340059,\t\t29.868012,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[318,\t\t1,\t\t245.420849,\t\t49.08417,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[319,\t\t1,\t\t8.791956,\t\t1.758391,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[320,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999996,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[321,\t\t1,\t\t207.977107,\t\t41.595421,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[322,\t\t1,\t\t26.476825,\t\t5.295365,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[323,\t\t1,\t\t2.754688,\t\t0.550938,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[324,\t\t1,\t\t486.962231,\t\t97.392446,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[325,\t\t1,\t\t158.630045,\t\t31.726009,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[326,\t\t1,\t\t12.861232,\t\t2.572246,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[327,\t\t1,\t\t110.679681,\t\t22.135936,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[328,\t\t1,\t\t188.615186,\t\t37.723037,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[329,\t\t1,\t\t283.694054,\t\t56.738811,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[330,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001153,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[331,\t\t1,\t\t22.524343,\t\t4.504869,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[332,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.994596,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[333,\t\t1,\t\t236.669238,\t\t47.333848,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[334,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999169,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[335,\t\t1,\t\t241.538812,\t\t48.307762,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[336,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.996999,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[337,\t\t1,\t\t96.077057,\t\t19.215411,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[338,\t\t1,\t\t260.766786,\t\t52.153357,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[339,\t\t1,\t\t161.28065,\t\t32.25613,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[340,\t\t1,\t\t136.35953,\t\t27.271906,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[341,\t\t1,\t\t123.27174,\t\t24.654348,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[342,\t\t1,\t\t213.835906,\t\t42.767181,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[343,\t\t1,\t\t117.313496,\t\t23.462699,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[344,\t\t1,\t\t294.133032,\t\t58.826606,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[345,\t\t1,\t\t321.622889,\t\t64.324578,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[346,\t\t1,\t\t319.289533,\t\t63.857907,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[347,\t\t1,\t\t111.661092,\t\t22.332218,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[348,\t\t1,\t\t291.889448,\t\t58.37789,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[349,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99991,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[350,\t\t1,\t\t153.129465,\t\t30.625893,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[351,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999667,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[352,\t\t1,\t\t1013.60899,\t\t202.721798,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[353,\t\t1,\t\t3.047247,\t\t0.609449,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[354,\t\t1,\t\t20.702734,\t\t4.140547,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[355,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[356,\t\t1,\t\t0.0,\t\t0.0,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[357,\t\t1,\t\t0.051895,\t\t0.010379,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[358,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.00123,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[359,\t\t1,\t\t3.029978,\t\t0.605996,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[360,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000731,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[361,\t\t1,\t\t77.549559,\t\t15.509912,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[362,\t\t1,\t\t221.056378,\t\t44.211276,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[363,\t\t1,\t\t325.466629,\t\t65.093326,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[364,\t\t1,\t\t76.78937,\t\t15.357874,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[365,\t\t1,\t\t68.922537,\t\t13.784507,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[366,\t\t1,\t\t136.604249,\t\t27.32085,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[367,\t\t1,\t\t66.02882,\t\t13.205764,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[368,\t\t1,\t\t32.513676,\t\t6.502735,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[369,\t\t1,\t\t26.717579,\t\t5.343516,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[370,\t\t1,\t\t78.657315,\t\t15.731463,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[371,\t\t1,\t\t395.768976,\t\t79.153795,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[372,\t\t1,\t\t229.512116,\t\t45.902423,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[373,\t\t1,\t\t154.874942,\t\t30.974988,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[374,\t\t1,\t\t79.417248,\t\t15.88345,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[375,\t\t1,\t\t260.516148,\t\t52.10323,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[376,\t\t1,\t\t285.737149,\t\t57.14743,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[377,\t\t1,\t\t204.46909,\t\t40.893818,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[378,\t\t1,\t\t204.07553,\t\t40.815106,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[379,\t\t1,\t\t70.336094,\t\t14.067219,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[380,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.001431,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[381,\t\t1,\t\t235.208187,\t\t47.041637,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[382,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.99931,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[383,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999355,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[384,\t\t1,\t\t82.999126,\t\t16.599825,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[385,\t\t1,\t\t104.761187,\t\t20.952237,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[386,\t\t1,\t\t84.172473,\t\t16.834495,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[387,\t\t1,\t\t171.420679,\t\t34.284136,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[388,\t\t1,\t\t920.52654,\t\t184.105308,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[389,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999927,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[390,\t\t1,\t\t76.005723,\t\t15.201145,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[391,\t\t1,\t\t86.577001,\t\t17.3154,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[392,\t\t1,\t\t166.140394,\t\t33.228079,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[393,\t\t1,\t\t207.478268,\t\t41.495654,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[394,\t\t1,\t\t74.623968,\t\t14.924794,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[395,\t\t1,\t\t103.424209,\t\t20.684842,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[396,\t\t1,\t\t73.254309,\t\t14.650862,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[397,\t\t1,\t\t587.418866,\t\t117.483773,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[398,\t\t1,\t\t254.423799,\t\t50.88476,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[399,\t\t1,\t\t108.403069,\t\t21.680614,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[400,\t\t1,\t\t57.755339,\t\t11.551068,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[401,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000607,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[402,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000402,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[403,\t\t1,\t\t28.676869,\t\t5.735374,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[404,\t\t1,\t\t101.030383,\t\t20.206077,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[405,\t\t1,\t\t761.669209,\t\t152.333842,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[406,\t\t1,\t\t57.709613,\t\t11.541923,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[407,\t\t1,\t\t114.237444,\t\t22.847489,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[408,\t\t1,\t\t330.310625,\t\t66.062125,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[409,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999945,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[410,\t\t1,\t\t42.765284,\t\t8.553057,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[411,\t\t1,\t\t40.436328,\t\t8.087266,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[412,\t\t1,\t\t2.840209,\t\t0.568042,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[413,\t\t1,\t\t141.788379,\t\t28.357676,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[414,\t\t1,\t\t12.039367,\t\t2.407873,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[415,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t1.000216,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[416,\t\t1,\t\t171.453258,\t\t34.290652,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[417,\t\t1,\t\t6.708639,\t\t1.341728,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[418,\t\t1,\t\t139.803992,\t\t27.960798,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[419,\t\t1,\t\t74.72424,\t\t14.944848,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[420,\t\t1,\t\t75.232124,\t\t15.046425,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[421,\t\t1,\t\t108.369957,\t\t21.673991,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[422,\t\t1,\t\t79.395462,\t\t15.879092,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[423,\t\t1,\t\t166.748016,\t\t33.349603,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[424,\t\t1,\t\t12.022102,\t\t2.40442,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[425,\t\t1,\t\t98.731794,\t\t19.746359,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[426,\t\t1,\t\t8.180233,\t\t1.636047,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[427,\t\t1,\t\t68.746812,\t\t13.749362,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[428,\t\t1,\t\t30.823937,\t\t6.164787,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[429,\t\t1,\t\t347.840816,\t\t69.568163,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[430,\t\t1,\t\t185.282839,\t\t37.056568,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[431,\t\t1,\t\t123.901479,\t\t24.780296,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[432,\t\t1,\t\t144.833229,\t\t28.966646,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[433,\t\t1,\t\t74.034885,\t\t14.806977,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[434,\t\t1,\t\t38.53136,\t\t7.706272,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[435,\t\t1,\t\t154.101184,\t\t30.820237,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[436,\t\t1,\t\t82.272037,\t\t16.454407,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[437,\t\t1,\t\t18.736593,\t\t3.747319,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[438,\t\t1,\t\t50.283885,\t\t10.056777,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[439,\t\t1,\t\t93.622094,\t\t18.724419,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[440,\t\t1,\t\t79.120237,\t\t15.824047,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[441,\t\t1,\t\t60.656262,\t\t12.131252,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[442,\t\t1,\t\t80.268768,\t\t16.053754,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[443,\t\t1,\t\t174.030244,\t\t34.806049,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[444,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999997,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[445,\t\t1,\t\t79.077332,\t\t15.815466,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[446,\t\t1,\t\t36.667449,\t\t7.33349,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[447,\t\t1,\t\t69.711986,\t\t13.942397,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[448,\t\t1,\t\t51.231231,\t\t10.246246,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[449,\t\t1,\t\t258.32521,\t\t51.665042,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[450,\t\t1,\t\t158.082702,\t\t31.61654,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[451,\t\t1,\t\t67.549518,\t\t13.509904,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[452,\t\t1,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0,\t\t0.999998,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[453,\t\t1,\t\t45.271283,\t\t9.054257,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[454,\t\t1,\t\t31.584233,\t\t6.316847,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[455,\t\t1,\t\t51.495434,\t\t10.299087,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[456,\t\t1,\t\t51.495434,\t\t10.299087,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[457,\t\t1,\t\t157.923583,\t\t31.584717,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[458,\t\t1,\t\t150.20524,\t\t30.041048,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[459,\t\t1,\t\t182.805366,\t\t36.561073,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[460,\t\t1,\t\t240.243914,\t\t48.048783,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[461,\t\t1,\t\t249.905766,\t\t49.981153,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[462,\t\t1,\t\t76.447469,\t\t15.289494,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[463,\t\t1,\t\t39.172162,\t\t7.834432,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[464,\t\t1,\t\t39.219512,\t\t7.843902,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[465,\t\t1,\t\t63.350231,\t\t12.670046,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[466,\t\t1,\t\t51.432373,\t\t10.286475,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[467,\t\t1,\t\t47.463564,\t\t9.492713,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[468,\t\t1,\t\t77.821485,\t\t15.564297,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[469,\t\t1,\t\t48.224415,\t\t9.644883,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[470,\t\t1,\t\t122.809076,\t\t24.561815,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[471,\t\t1,\t\t120.916609,\t\t24.183322,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[472,\t\t1,\t\t42.292985,\t\t8.458597,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[473,\t\t1,\t\t77.660006,\t\t15.532001,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[474,\t\t1,\t\t40.110581,\t\t8.022116,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[475,\t\t1,\t\t39.362455,\t\t7.872491,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[476,\t\t1,\t\t44.48605,\t\t8.89721,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[477,\t\t1,\t\t71.790863,\t\t14.358173,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[478,\t\t1,\t\t90.182408,\t\t18.036482,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[479,\t\t1,\t\t163.430553,\t\t32.686111,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[480,\t\t1,\t\t71.634573,\t\t14.326915,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[481,\t\t1,\t\t62.210779,\t\t12.442156,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[482,\t\t1,\t\t70.637663,\t\t14.127533,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[483,\t\t1,\t\t60.072155,\t\t12.014431,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[484,\t\t1,\t\t47.093648,\t\t9.41873,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[485,\t\t1,\t\t70.345446,\t\t14.069089,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[486,\t\t1,\t\t647.143737,\t\t129.428747,\t\t0,\t\t0,\t\t0,\t\t0.999554,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[487,\t\t1,\t\t163.983232,\t\t32.796646,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[488,\t\t1,\t\t472.509932,\t\t94.501986,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[489,\t\t1,\t\t124.36327,\t\t24.872654,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t380.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[490,\t\t1,\t\t38.697212,\t\t7.739442,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[491,\t\t1,\t\t53.209163,\t\t10.641833,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[492,\t\t1,\t\t82.974923,\t\t16.594985,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[493,\t\t1,\t\t106.944743,\t\t21.388949,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[494,\t\t1,\t\t146.164126,\t\t29.232825,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[495,\t\t1,\t\t115.05729,\t\t23.011458,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[496,\t\t1,\t\t8.1497,\t\t1.62994,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[497,\t\t1,\t\t1019.11742,\t\t203.823484,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[498,\t\t1,\t\t47.795688,\t\t9.559138,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[499,\t\t1,\t\t66.71495,\t\t13.34299,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[500,\t\t1,\t\t36.52565,\t\t7.30513,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[501,\t\t1,\t\t61.795103,\t\t12.359021,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[502,\t\t1,\t\t243.892472,\t\t48.778494,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[503,\t\t1,\t\t74.69475,\t\t14.93895,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[504,\t\t1,\t\t48.913631,\t\t9.782726,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[505,\t\t1,\t\t346.933422,\t\t69.386684,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[506,\t\t1,\t\t108.898132,\t\t21.779626,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[507,\t\t1,\t\t103.585169,\t\t20.717034,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[508,\t\t1,\t\t150.590165,\t\t30.118033,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[509,\t\t1,\t\t198.447968,\t\t39.689594,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[510,\t\t1,\t\t125.371437,\t\t25.074287,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[511,\t\t1,\t\t109.362196,\t\t21.872439,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[512,\t\t1,\t\t72.240483,\t\t14.448097,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[513,\t\t1,\t\t39.796797,\t\t7.959359,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[514,\t\t1,\t\t99.050376,\t\t19.810075,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[515,\t\t1,\t\t88.35882,\t\t17.671764,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[516,\t\t1,\t\t98.852728,\t\t19.770546,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[517,\t\t1,\t\t46.433493,\t\t9.286699,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[518,\t\t1,\t\t261.516372,\t\t52.303274,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[519,\t\t1,\t\t25.737999,\t\t5.1476,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[520,\t\t1,\t\t103.914264,\t\t20.782853,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[521,\t\t1,\t\t93.869868,\t\t18.773974,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[522,\t\t1,\t\t80.372141,\t\t16.074428,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[523,\t\t1,\t\t43.26341,\t\t8.652682,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[524,\t\t1,\t\t125.571667,\t\t25.114333,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[525,\t\t1,\t\t149.598388,\t\t29.919678,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[526,\t\t1,\t\t45.355417,\t\t9.071083,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[527,\t\t1,\t\t49.797061,\t\t9.959412,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[528,\t\t1,\t\t108.686742,\t\t21.737348,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[529,\t\t1,\t\t139.32031,\t\t27.864062,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[530,\t\t1,\t\t59.038257,\t\t11.807651,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[531,\t\t1,\t\t60.026309,\t\t12.005262,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[532,\t\t1,\t\t57.614792,\t\t11.522958,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[533,\t\t1,\t\t51.629806,\t\t10.325961,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[534,\t\t1,\t\t142.423899,\t\t28.48478,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[535,\t\t1,\t\t178.305581,\t\t35.661116,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[536,\t\t1,\t\t140.543223,\t\t28.108645,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[537,\t\t1,\t\t46.752939,\t\t9.350588,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[538,\t\t1,\t\t34.949307,\t\t6.989861,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[539,\t\t1,\t\t37.083364,\t\t7.416673,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[540,\t\t1,\t\t33.39194,\t\t6.678388,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[541,\t\t1,\t\t86.254263,\t\t17.250853,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[542,\t\t1,\t\t118.486697,\t\t23.697339,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[543,\t\t1,\t\t64.716851,\t\t12.94337,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[544,\t\t1,\t\t120.536044,\t\t24.107209,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[545,\t\t1,\t\t259.53387,\t\t51.906774,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[546,\t\t1,\t\t130.082295,\t\t26.016459,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[547,\t\t1,\t\t168.139914,\t\t33.627983,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[548,\t\t1,\t\t54.427586,\t\t10.885517,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[549,\t\t1,\t\t46.540239,\t\t9.308048,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[550,\t\t1,\t\t38.403612,\t\t7.680722,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[551,\t\t1,\t\t37.020156,\t\t7.404031,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[552,\t\t1,\t\t183.837924,\t\t36.767585,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[553,\t\t1,\t\t1.271874,\t\t0.254375,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[554,\t\t1,\t\t186.247009,\t\t37.249402,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[555,\t\t1,\t\t70.962172,\t\t14.192434,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[556,\t\t1,\t\t109.780842,\t\t21.956168,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[557,\t\t1,\t\t233.244794,\t\t46.648959,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[558,\t\t1,\t\t137.534821,\t\t27.506964,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[559,\t\t1,\t\t73.607312,\t\t14.721462,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[560,\t\t1,\t\t114.992034,\t\t22.998407,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[561,\t\t1,\t\t63.058275,\t\t12.611655,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[562,\t\t1,\t\t172.270483,\t\t34.454097,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[563,\t\t1,\t\t121.120189,\t\t24.224038,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[564,\t\t1,\t\t239.15215,\t\t47.83043,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[565,\t\t1,\t\t180.452148,\t\t36.09043,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[566,\t\t1,\t\t0.289845,\t\t0.057969,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[567,\t\t1,\t\t293.333059,\t\t58.666612,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[568,\t\t1,\t\t271.262107,\t\t54.252421,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[569,\t\t1,\t\t190.861924,\t\t38.172385,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[570,\t\t1,\t\t297.969832,\t\t59.593966,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[571,\t\t1,\t\t219.388065,\t\t43.877613,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[572,\t\t1,\t\t386.963917,\t\t77.392783,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[573,\t\t1,\t\t112.640123,\t\t22.528025,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[574,\t\t1,\t\t214.622449,\t\t42.92449,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[575,\t\t1,\t\t4.033141,\t\t0.806628,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[576,\t\t1,\t\t260.979458,\t\t52.195892,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[577,\t\t1,\t\t287.702645,\t\t57.540529,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[578,\t\t1,\t\t274.688852,\t\t54.93777,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[579,\t\t1,\t\t100.21399,\t\t20.042798,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[580,\t\t1,\t\t20.863062,\t\t4.172612,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[581,\t\t1,\t\t0.119881,\t\t0.023976,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[582,\t\t1,\t\t75.482663,\t\t15.096533,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[583,\t\t1,\t\t86.575841,\t\t17.315168,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[584,\t\t1,\t\t49.673071,\t\t9.934614,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t],\n\t\t[585,\t\t1,\t\t86.238563,\t\t17.247713,\t\t0,\t\t0,\t\t0,\t\t1.0,\t\t0,\t\t220.0,\t\t0,\t\t1.1,\t\t0.9\t\t]\n\t])\n\tppc[\"gen\"] = array([\n\t\t[586,\t\t272.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t272.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[589,\t\t63.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t63.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[590,\t\t38.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[593,\t\t11.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[594,\t\t19.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[595,\t\t1510.82619,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4730.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[598,\t\t12.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[599,\t\t9.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[601,\t\t61.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[602,\t\t24.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[603,\t\t1382.09855,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3455.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[607,\t\t1800.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1800.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[608,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[609,\t\t36.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[612,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[613,\t\t85.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t85.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[614,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[616,\t\t29.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[617,\t\t137.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t137.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[618,\t\t33.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[619,\t\t118.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t118.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[621,\t\t765.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t765.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[624,\t\t27.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[628,\t\t449.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t449.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[629,\t\t75.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t75.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[631,\t\t79.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[632,\t\t45.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[637,\t\t53.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t53.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[638,\t\t128.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t128.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[640,\t\t12.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[641,\t\t12.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[642,\t\t28.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[643,\t\t857.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t857.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[647,\t\t14.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[650,\t\t650.644627,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1324.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[652,\t\t46.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t46.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[655,\t\t61.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[663,\t\t15.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[666,\t\t28.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[670,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[672,\t\t33.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[676,\t\t370.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t370.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[681,\t\t40.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t40.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[683,\t\t27.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[687,\t\t1329.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1329.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[689,\t\t310.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t310.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[691,\t\t26.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[694,\t\t16.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[695,\t\t14.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[696,\t\t721.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t721.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[697,\t\t11.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[698,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[702,\t\t73.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[705,\t\t17.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[707,\t\t34.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[713,\t\t13.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[714,\t\t15.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[716,\t\t0.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[717,\t\t11.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[719,\t\t1254.748674,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1958.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[722,\t\t20.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[723,\t\t19.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[724,\t\t12.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[727,\t\t61.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[728,\t\t510.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t510.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[730,\t\t633.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t633.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[732,\t\t14.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[735,\t\t84.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[738,\t\t138.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t138.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[741,\t\t214.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t214.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[742,\t\t9.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[743,\t\t1410.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1410.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[746,\t\t100.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t100.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[747,\t\t12.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[748,\t\t110.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t110.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[749,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[750,\t\t90.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[753,\t\t311.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t311.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[758,\t\t18.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[760,\t\t294.128123,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t794.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[761,\t\t15.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[762,\t\t1105.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1105.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[763,\t\t20.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[765,\t\t59.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t59.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[767,\t\t11.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[769,\t\t43.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t43.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[771,\t\t684.364258,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t690.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[772,\t\t18.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[774,\t\t33.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[777,\t\t79.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[778,\t\t14.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[781,\t\t971.759122,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1310.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[784,\t\t890.776074,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1275.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[785,\t\t3.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[787,\t\t778.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t778.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[788,\t\t875.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t875.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[789,\t\t77.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t77.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[791,\t\t10.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[792,\t\t62.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t62.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[795,\t\t13.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[800,\t\t36.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[801,\t\t21.82418,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[802,\t\t500.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t500.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[805,\t\t848.970643,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1410.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[806,\t\t35.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[808,\t\t217.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t217.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[809,\t\t12.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[811,\t\t25.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[814,\t\t89.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[816,\t\t80.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t80.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[817,\t\t54.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t54.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[821,\t\t82.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[822,\t\t134.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t134.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[826,\t\t58.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t58.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[830,\t\t55.516834,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[834,\t\t23.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[835,\t\t63.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t63.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[836,\t\t25.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[837,\t\t472.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t472.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[839,\t\t73.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[841,\t\t23.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[843,\t\t333.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t333.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[844,\t\t40.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t40.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[845,\t\t318.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t318.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[849,\t\t779.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t779.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[850,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[851,\t\t79.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[853,\t\t11.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[855,\t\t688.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t688.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[856,\t\t36.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[857,\t\t1402.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1402.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[858,\t\t56.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[859,\t\t3.480201,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t85.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[860,\t\t25.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[864,\t\t875.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t875.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[865,\t\t11.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[867,\t\t769.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t769.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[869,\t\t1360.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1360.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[870,\t\t58.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t58.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[872,\t\t22.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[873,\t\t122.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t122.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[874,\t\t20.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[875,\t\t24.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[877,\t\t24.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[881,\t\t337.281055,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1001.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[882,\t\t17.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[883,\t\t18.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[885,\t\t117.63457,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t490.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[886,\t\t2572.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2572.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[889,\t\t9.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[890,\t\t48.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t48.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[893,\t\t60.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[894,\t\t158.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t158.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[895,\t\t19.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[896,\t\t24.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[898,\t\t84.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[900,\t\t112.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t112.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[902,\t\t19.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[903,\t\t20.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[905,\t\t137.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t137.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[906,\t\t66.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[907,\t\t67.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[909,\t\t36.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[915,\t\t12.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[917,\t\t17.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[918,\t\t38.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[920,\t\t12.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[921,\t\t124.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t124.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[922,\t\t164.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t164.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[923,\t\t146.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t146.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[925,\t\t26.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[931,\t\t217.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t217.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[935,\t\t23.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[936,\t\t104.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t104.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[937,\t\t30.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[939,\t\t0.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[940,\t\t29.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[944,\t\t25.4,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.4,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[950,\t\t16.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[952,\t\t31.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t31.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[957,\t\t6.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t6.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[958,\t\t66.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[959,\t\t45.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[960,\t\t26.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[963,\t\t757.748298,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t875.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[965,\t\t352.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t352.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[966,\t\t66.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[967,\t\t37.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t37.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[968,\t\t54.0,\t\t0,\t\t9999,\t\t-9999,\t\t0.999554,\t\t100,\t\t1,\t\t54.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[969,\t\t56.9,\t\t0,\t\t9999,\t\t-9999,\t\t0.999554,\t\t100,\t\t1,\t\t56.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[971,\t\t20.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[973,\t\t1347.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1347.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[976,\t\t26.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[978,\t\t4.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[981,\t\t99.016829,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[982,\t\t9.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[983,\t\t44.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t44.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[984,\t\t465.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t465.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[985,\t\t22.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[986,\t\t11.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[987,\t\t164.5,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t164.5,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[988,\t\t5.1,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.1,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[993,\t\t392.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t392.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[994,\t\t33.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[995,\t\t4.2,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[997,\t\t18.8,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.8,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[999,\t\t15.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1000,\t\t49.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1002,\t\t9.9,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.9,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1003,\t\t900.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t900.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1007,\t\t23.3,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1008,\t\t49.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1010,\t\t358.638683,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t750.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1011,\t\t18.7,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.7,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1012,\t\t1598.86858,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2835.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1014,\t\t750.0,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t750.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1026,\t\t655.6,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t655.6,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1027,\t\t6.608085,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t48.3,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1028,\t\t104.030085,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t400.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1029,\t\t0.659424,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1030,\t\t236.668442,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1018.0,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1031,\t\t178.027662,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1447.2,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1032,\t\t77.598072,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t153.510391,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1033,\t\t36.059975,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.164506,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1034,\t\t26.058195,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.262779,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1035,\t\t33.33615,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.886469,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1036,\t\t50.715498,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.223077,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1037,\t\t13.306083,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t94.684044,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1038,\t\t12.955878,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t85.798525,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1039,\t\t82.927654,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t132.724114,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1040,\t\t0.006512,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.064179,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1041,\t\t105.224635,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t204.187624,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1042,\t\t33.31008,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.70053,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1043,\t\t0.89438,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t6.035538,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1044,\t\t1.604991,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.163532,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1045,\t\t2.84692,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t61.836204,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1046,\t\t3.159564,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t106.787063,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1047,\t\t0.11702,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.029581,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1048,\t\t1.874472,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t71.656883,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1049,\t\t135.960004,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t293.755375,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1050,\t\t50.376748,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.781606,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1051,\t\t296.97528,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t304.42978,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1052,\t\t0.007977,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.66869,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1053,\t\t0.005683,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.368087,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1054,\t\t0.018487,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t273.855776,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1055,\t\t1.869334,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.856069,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1056,\t\t328.102405,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t603.943953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1057,\t\t154.862136,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t426.979979,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1058,\t\t590.783379,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1055.735174,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1059,\t\t295.545938,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t414.871332,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1060,\t\t6.855672,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.351632,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1061,\t\t124.441219,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t161.862597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1062,\t\t1.821353,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.878561,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1063,\t\t5.38714,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.670916,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1064,\t\t123.682419,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t209.786524,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1065,\t\t237.798684,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t339.421643,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1066,\t\t128.229023,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t134.399019,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1067,\t\t12.727235,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t32.653526,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1068,\t\t3.22426,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.009022,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1069,\t\t1.824218,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.190759,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1070,\t\t0.473903,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.788599,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1071,\t\t2.819964,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.328696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1072,\t\t4.927999,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t112.606433,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1073,\t\t0.016514,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t77.81765,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1074,\t\t6.014763,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t153.592986,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1075,\t\t10.918523,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.783448,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1076,\t\t0.300183,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.29551,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1077,\t\t14.615588,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.120041,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1078,\t\t7.065166,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.413246,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1079,\t\t2.798967,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t72.327992,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1080,\t\t49.695174,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t132.149983,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1081,\t\t347.16283,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t405.642115,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1082,\t\t476.885634,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t510.054159,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1083,\t\t495.180028,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t633.681488,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1084,\t\t549.155419,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t602.719371,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1085,\t\t109.258361,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t113.714399,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1086,\t\t209.987466,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t225.59917,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1087,\t\t104.908763,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t116.66597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1088,\t\t35.596807,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t36.782492,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1089,\t\t270.799357,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t384.449592,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1090,\t\t0.028127,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.140897,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1091,\t\t1.57761,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.7939,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1092,\t\t0.778957,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t54.002032,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1093,\t\t79.950854,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.605298,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1094,\t\t3.677528,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.759038,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1095,\t\t0.200242,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.204951,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1096,\t\t75.731488,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.50612,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1097,\t\t1.18816,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.601122,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1098,\t\t2.597672,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t71.025499,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1099,\t\t0.015673,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t290.937198,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1100,\t\t0.003921,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.026696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1101,\t\t18.961359,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.930665,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1102,\t\t140.103409,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t350.979988,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1103,\t\t67.236383,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t245.381701,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1104,\t\t0.201805,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.206918,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1105,\t\t2.1618,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.178593,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1106,\t\t2.253376,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.289793,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1107,\t\t74.033819,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t76.221615,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1108,\t\t294.151901,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t320.422751,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1109,\t\t0.773974,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.77821,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1110,\t\t1.644838,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.654557,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1111,\t\t72.444777,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.637993,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1112,\t\t69.501815,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t69.53429,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1113,\t\t3.517851,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.536361,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1114,\t\t12.959909,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.446889,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1115,\t\t50.529108,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.575278,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1116,\t\t32.590705,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t32.601142,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1117,\t\t90.740798,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.792541,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1118,\t\t7.238119,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.725012,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1119,\t\t43.247173,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t43.254023,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1120,\t\t2.249546,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.416001,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1121,\t\t0.52564,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.540589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1122,\t\t1.415755,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.462883,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1123,\t\t1.368314,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.464336,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1124,\t\t1.254099,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.288283,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1125,\t\t25.510688,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.818899,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1126,\t\t29.011219,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.154893,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1127,\t\t75.065189,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t105.296621,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1128,\t\t3.04291,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.06139,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1129,\t\t4.711862,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.738747,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1130,\t\t1.019131,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.025754,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1131,\t\t2.880583,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.897078,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1132,\t\t0.357118,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.359497,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1133,\t\t0.699698,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.719597,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1134,\t\t0.494393,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.508453,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1135,\t\t7.75526,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.117819,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1136,\t\t0.390993,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.4027,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1137,\t\t2.447861,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.669012,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1138,\t\t1.152243,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.254278,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1139,\t\t19.805849,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.822769,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1140,\t\t27.354023,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.389457,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1141,\t\t117.761296,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.46456,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1142,\t\t1.119845,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.215733,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1143,\t\t23.95844,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.239356,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1144,\t\t52.472607,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t52.527382,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1145,\t\t0.009343,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t175.889627,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1146,\t\t0.837499,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.861317,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1147,\t\t45.670633,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.703707,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1148,\t\t15.193514,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.645529,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1149,\t\t8.516184,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.556784,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1150,\t\t3.565438,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.62256,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1151,\t\t12.933916,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.036113,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1152,\t\t0.114832,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.116518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1153,\t\t0.066338,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.068788,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1154,\t\t0.154903,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.160625,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1155,\t\t0.603868,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.609451,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1156,\t\t15.943274,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.022334,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1157,\t\t4.319464,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.354147,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1158,\t\t1.020134,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.04304,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1159,\t\t13.341573,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.498087,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1160,\t\t61.348753,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t238.377761,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1161,\t\t13.37679,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.263391,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1162,\t\t295.52795,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t502.409178,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1163,\t\t204.813364,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t330.03194,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1164,\t\t169.080818,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t285.625412,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1165,\t\t32.736142,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.188579,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1166,\t\t10.324392,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.277163,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1167,\t\t4.944229,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.05378,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1168,\t\t1.260694,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.345774,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1169,\t\t2.587141,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.721845,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1170,\t\t0.259966,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.26599,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1171,\t\t3.699796,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.029885,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1172,\t\t1.223448,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.584043,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1173,\t\t106.980744,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t254.253327,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1174,\t\t1.232698,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.260082,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1175,\t\t0.771596,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.855454,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1176,\t\t0.229984,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.23222,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1177,\t\t22.538602,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.87401,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1178,\t\t3.150843,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.167999,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1179,\t\t1.299251,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.306293,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1180,\t\t0.673136,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.688545,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1181,\t\t16.564349,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t85.739557,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1182,\t\t20.777501,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.319579,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1183,\t\t29.900467,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.222575,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1184,\t\t4.094736,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.219005,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1185,\t\t11.338664,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.343971,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1186,\t\t37.012936,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.916368,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1187,\t\t9.432298,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.814574,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1188,\t\t35.700043,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t179.712741,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1189,\t\t13.755058,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.261805,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1190,\t\t219.178004,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t220.533673,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1191,\t\t72.800116,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.079413,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1192,\t\t8.977024,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.454569,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1193,\t\t1.137177,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.399953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1194,\t\t4.479308,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.986036,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1195,\t\t0.083395,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.202359,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1196,\t\t30.791933,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t160.697956,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1197,\t\t18.077938,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.592266,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1198,\t\t23.334118,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.819157,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1201,\t\t19.641372,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.166667,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1202,\t\t27.702181,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t49.89238,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1203,\t\t166.58394,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t182.623256,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1204,\t\t35.170656,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t47.541821,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1205,\t\t0.204675,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.548843,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1206,\t\t1.775686,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.806894,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1207,\t\t1.694167,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.575453,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1208,\t\t1.819097,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.242031,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1209,\t\t0.108058,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.268261,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1210,\t\t1.703029,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.02599,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1211,\t\t17.394643,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.005229,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1212,\t\t89.204411,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t91.171888,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1213,\t\t56.865564,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.342704,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1214,\t\t2.103582,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.505907,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1215,\t\t0.750611,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.252965,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1216,\t\t24.438918,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t67.754469,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1217,\t\t24.249023,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.871617,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1218,\t\t0.566199,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.980482,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1219,\t\t12.313625,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.33953,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1220,\t\t28.947375,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.597849,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1221,\t\t362.770634,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t593.230436,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1222,\t\t209.632122,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t211.057769,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1223,\t\t3.779223,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.806101,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1224,\t\t62.035215,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t160.523778,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1225,\t\t31.599486,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t34.931481,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1226,\t\t3.430153,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.982858,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1227,\t\t12.458532,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.482807,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1228,\t\t0.787282,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.021367,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1229,\t\t34.545517,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t51.244222,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1230,\t\t0.110943,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.681276,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1231,\t\t16.90505,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.55478,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1232,\t\t44.228643,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t75.075088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1235,\t\t0.457018,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.03734,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1236,\t\t3.498451,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.225035,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1237,\t\t13.898558,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.605409,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1238,\t\t124.493668,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t188.691049,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1239,\t\t0.000875,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.267706,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1240,\t\t234.917427,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t339.51051,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1241,\t\t329.977461,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t385.361595,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1242,\t\t16.492805,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.074038,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1243,\t\t73.44164,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.079842,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1244,\t\t2.21417,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t323.472536,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1245,\t\t7.597448,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.080896,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1246,\t\t10.509823,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t57.127825,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1247,\t\t9.134905,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.833396,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1248,\t\t53.949763,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t91.958275,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1249,\t\t71.087635,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t76.135177,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1250,\t\t26.948774,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.830519,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1251,\t\t21.826987,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.404345,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1252,\t\t13.904591,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t14.887727,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1253,\t\t40.004932,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.502694,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1254,\t\t15.914068,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.278695,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1255,\t\t2.91361,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.818419,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1256,\t\t11.486854,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.091842,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1257,\t\t64.291554,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t88.95288,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1258,\t\t30.398898,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t235.487329,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1259,\t\t77.360255,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t109.288719,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1260,\t\t19.068998,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.168717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1261,\t\t141.137179,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t201.699555,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1262,\t\t0.325949,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.524108,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1263,\t\t0.251304,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.352421,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1264,\t\t55.338841,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t82.035361,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1265,\t\t4.605899,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t6.654727,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1266,\t\t95.273884,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.710849,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1267,\t\t37.425318,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.469006,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1268,\t\t1.088852,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.4295,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1269,\t\t1.14527,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.105829,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1270,\t\t18.624163,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t38.950511,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1271,\t\t34.874339,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t47.371792,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1272,\t\t0.767634,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.23166,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1273,\t\t0.573609,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.169201,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1274,\t\t50.404078,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t53.095629,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1275,\t\t91.873338,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.0753,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1276,\t\t22.042664,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.655641,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1277,\t\t54.415572,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t65.611252,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1278,\t\t137.849688,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t170.437781,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1279,\t\t4.5e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.004344,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1280,\t\t0.044687,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.626494,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1281,\t\t0.502718,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.51246,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1282,\t\t0.295983,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.363037,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1283,\t\t69.836828,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1297.764428,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1284,\t\t11.051678,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t28.426322,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1285,\t\t0.459963,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.937048,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1286,\t\t9.739874,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.872201,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1287,\t\t79.244769,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t93.199628,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1288,\t\t141.096543,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t148.402692,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1289,\t\t176.093898,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t184.149235,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1290,\t\t2.87747,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.901974,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1291,\t\t85.365953,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t98.293351,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1292,\t\t34.357008,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t41.682074,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1293,\t\t1.672049,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.402107,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1294,\t\t3.454541,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.39743,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1295,\t\t3.810908,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.873666,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1296,\t\t5.426327,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t27.356489,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1297,\t\t39.649397,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t177.778742,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1298,\t\t0.614727,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.014603,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1299,\t\t0.232699,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.158207,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1300,\t\t23.336616,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.74405,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1301,\t\t59.759643,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t60.863304,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1302,\t\t4.74882,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.877299,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1303,\t\t4.215176,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.335516,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1304,\t\t9.191642,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t9.594319,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1305,\t\t0.004544,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.004567,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1306,\t\t1.805996,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.827014,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1307,\t\t0.290083,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.29894,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1308,\t\t2.287693,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.278321,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1309,\t\t2.080815,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.34909,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1310,\t\t1.024479,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.64589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1311,\t\t2.457558,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.854004,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1312,\t\t3.294717,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t262.264924,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1313,\t\t29.677875,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.836748,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1314,\t\t11.648153,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.003987,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1315,\t\t7.823385,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.879027,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1316,\t\t0.440241,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.757497,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1317,\t\t22.594495,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.958574,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1318,\t\t1.214,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.956332,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1319,\t\t7.726802,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.708276,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1320,\t\t15.658252,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t20.75859,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1321,\t\t0.105733,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.161123,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1322,\t\t0.673281,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.929763,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1323,\t\t88.070537,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t199.111909,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1324,\t\t7.462697,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.063258,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1325,\t\t35.850661,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t90.497559,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1326,\t\t43.155673,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.928865,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1327,\t\t32.057922,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.796895,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1328,\t\t7.62562,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t16.063343,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1329,\t\t143.638084,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t218.675424,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1330,\t\t13.942682,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t30.131028,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1331,\t\t0.287325,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.289238,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1332,\t\t18.971857,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.293088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1333,\t\t39.377002,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.650254,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1334,\t\t0.165238,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.215341,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1335,\t\t1.675624,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.306939,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1336,\t\t22.179569,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.773035,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1337,\t\t1.895167,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t121.31241,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1338,\t\t0.343096,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.832524,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1339,\t\t8.094061,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.086482,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1340,\t\t1.719329,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t70.098327,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1341,\t\t5.747566,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t205.513321,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1342,\t\t0.061568,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.734589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1343,\t\t0.082546,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.102108,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1344,\t\t0.093284,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.226057,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1345,\t\t2.160254,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.971188,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1346,\t\t207.05021,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t214.719215,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1347,\t\t30.730149,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t414.115976,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1348,\t\t0.076903,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t22.707927,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1349,\t\t1.054008,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t42.352342,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1350,\t\t0.023105,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.094971,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1351,\t\t0.000591,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.015958,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1352,\t\t0.04782,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.83726,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1354,\t\t0.004264,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.147716,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1355,\t\t1.046975,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.688324,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1356,\t\t52.744967,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t73.486231,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1357,\t\t42.848835,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.459913,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1358,\t\t0.153292,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.247293,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1359,\t\t60.088697,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t70.633589,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1360,\t\t17.130838,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.135983,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1361,\t\t63.030411,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t63.207173,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1362,\t\t79.073054,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t79.107216,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1363,\t\t0.009694,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.036158,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1364,\t\t0.014171,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.061068,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1365,\t\t0.00016,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.000456,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1366,\t\t0.790668,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.229992,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1367,\t\t16.844243,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t43.863891,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1368,\t\t0.533701,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.298243,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1369,\t\t5.730756,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.968859,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1370,\t\t0.206567,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.343308,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1371,\t\t2.629506,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t81.767208,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1372,\t\t186.488603,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t192.966588,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1373,\t\t34.443544,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t35.200257,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1376,\t\t105.317936,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t176.213655,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1377,\t\t112.728506,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t234.376272,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1378,\t\t128.013354,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t246.029906,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1379,\t\t0.782569,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.805984,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1380,\t\t1.205328,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.213356,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1381,\t\t0.986293,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.01257,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1382,\t\t138.007244,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t138.839906,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1383,\t\t109.413691,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t109.821439,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1384,\t\t4.66695,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.669135,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1385,\t\t0.120034,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.124455,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1386,\t\t0.658834,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.673858,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1387,\t\t3.47374,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.493561,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1388,\t\t0.922047,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.928188,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1389,\t\t0.212123,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.213536,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1390,\t\t3.711638,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.732816,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1391,\t\t0.518853,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.521719,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1392,\t\t18.867476,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t19.306386,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1393,\t\t1.277892,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.376509,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1394,\t\t1.009398,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.077886,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1395,\t\t0.063961,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.073776,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1396,\t\t0.021924,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.026112,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1397,\t\t22.814948,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.084545,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1398,\t\t2.522882,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.779641,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1399,\t\t17.766171,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.868157,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1400,\t\t1.20352,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.297197,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1401,\t\t84.971313,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.339497,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1402,\t\t25.706285,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.328902,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1403,\t\t25.589062,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t119.651672,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1404,\t\t20.034146,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t134.800518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1405,\t\t26.655143,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t29.550802,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1406,\t\t7.405844,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t10.763987,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1407,\t\t0.206231,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.211614,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1408,\t\t33.729648,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t41.078698,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1409,\t\t9.208431,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.019786,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1410,\t\t28.273097,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t37.466518,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1411,\t\t30.074024,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t39.395367,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1412,\t\t1.056231,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.987601,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1413,\t\t0.921657,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.679791,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1414,\t\t5.84373,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.992489,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1415,\t\t1.354626,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.454501,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1416,\t\t1.195782,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.958002,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1417,\t\t0.000204,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.001311,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1418,\t\t73.547107,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t88.264613,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1419,\t\t29.245381,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t33.260903,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1420,\t\t1.062106,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.399757,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1421,\t\t3.729871,\t\t0,\t\t9999,\t\t-9999,\t\t0.999554,\t\t100,\t\t1,\t\t6.972369,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1422,\t\t2.701524,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t4.730495,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1423,\t\t1.036575,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.931017,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1424,\t\t6.870415,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t219.092115,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1425,\t\t4.240743,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t21.366402,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1426,\t\t48.393481,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t68.762602,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1427,\t\t320.481892,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t480.698671,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1428,\t\t206.447115,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t334.885743,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1429,\t\t1.934629,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t13.279826,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1430,\t\t0.000918,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.034248,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1431,\t\t107.667744,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t227.662022,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1432,\t\t0.512128,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.058931,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1433,\t\t32.437405,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1289.241188,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1434,\t\t0.724661,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.440014,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1435,\t\t2.719421,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t86.713217,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1436,\t\t0.810927,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t98.434116,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1437,\t\t233.547643,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t238.321958,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1438,\t\t262.517934,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t392.815158,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1439,\t\t25.144625,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t99.103164,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1440,\t\t0.527204,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.833609,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1441,\t\t0.10379,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.171578,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1442,\t\t0.358625,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.715522,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1443,\t\t0.005191,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t103.005076,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1444,\t\t4.836207,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.981696,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1445,\t\t10.804131,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t25.036799,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1446,\t\t603.424909,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t758.547933,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1447,\t\t77.589286,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.477411,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1448,\t\t1.990229,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t7.523578,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1449,\t\t75.799398,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t95.437673,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1450,\t\t29.696998,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t59.256809,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1451,\t\t38.222514,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t68.198838,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1452,\t\t8.496963,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t24.068921,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1453,\t\t64.705767,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.93775,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1454,\t\t153.672244,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.126607,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1455,\t\t0.648449,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.654438,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1456,\t\t36.643758,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t50.054822,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1457,\t\t1.989422,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.002672,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1458,\t\t0.24457,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.246199,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1459,\t\t4.925247,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.309059,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1460,\t\t39.822684,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t101.498473,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1461,\t\t17.933929,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.951737,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1462,\t\t2.401562,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.402686,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1463,\t\t0.689129,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.711207,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1464,\t\t176.333456,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t218.884211,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1465,\t\t5.213592,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.299939,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1466,\t\t5.640449,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.685017,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1467,\t\t2.081537,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.096155,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1468,\t\t23.073463,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.789171,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1469,\t\t61.686727,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t65.007467,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1470,\t\t0.001122,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t78.965265,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1471,\t\t0.038276,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t159.165074,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1472,\t\t11.952646,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t11.980182,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1473,\t\t8.177237,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.362608,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1474,\t\t1.370149,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.398948,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1475,\t\t0.352563,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.39088,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1476,\t\t35.385904,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t250.480113,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1477,\t\t9.087521,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t12.122974,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1478,\t\t0.001027,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.035833,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1479,\t\t3.883098,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.592606,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1480,\t\t12.250601,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t18.681964,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1481,\t\t0.021901,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.053146,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1482,\t\t4.135907,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t17.51083,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1483,\t\t3.580244,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t3.599649,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1484,\t\t0.029847,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.02991,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1485,\t\t0.562364,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.563547,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1486,\t\t2.893254,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.89934,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1487,\t\t0.458304,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.142917,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1488,\t\t0.911802,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t5.569856,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1489,\t\t0.115649,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.118938,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1490,\t\t5.673013,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t782.463701,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1491,\t\t76.534474,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t84.622838,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1492,\t\t222.990441,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t229.927503,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1493,\t\t78.147874,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t83.557175,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1494,\t\t326.80152,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t404.486733,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1495,\t\t60.458248,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t66.920717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1496,\t\t4.6e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.000282,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1497,\t\t57.039458,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t89.070006,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1498,\t\t97.421461,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t105.800802,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1499,\t\t0.45968,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t2.286676,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1500,\t\t0.068834,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.154817,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1501,\t\t2.367818,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t8.165333,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1502,\t\t0.111192,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.938928,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1503,\t\t24.40196,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t45.972187,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1504,\t\t126.871686,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t188.822836,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1505,\t\t4.377864,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t26.765913,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1506,\t\t14.359595,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t56.406717,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1507,\t\t3.264294,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t15.438042,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1508,\t\t0.064798,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.065259,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1509,\t\t4.7e-05,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.005193,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1510,\t\t56.12069,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t107.008141,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1511,\t\t95.027293,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t155.22192,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1512,\t\t39.855633,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t64.130052,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1513,\t\t16.938458,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t23.051786,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1514,\t\t0.003798,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.027711,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1515,\t\t0.0001,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.00633,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1516,\t\t0.011036,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.02881,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1517,\t\t0.816063,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t1.286804,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1518,\t\t0.650791,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.670542,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\t\t[1519,\t\t0.045169,\t\t0,\t\t9999,\t\t-9999,\t\t1.0,\t\t100,\t\t1,\t\t0.04654,\t\t0.0,\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n\t])\n\tppc[\"branch\"] = array([\n\t\t[586,\t\t1,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[589,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[590,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[593,\t\t112,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[594,\t\t114,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[595,\t\t115,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[598,\t\t118,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[599,\t\t119,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[601,\t\t119,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[602,\t\t121,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[603,\t\t526,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[607,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[608,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[609,\t\t529,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[612,\t\t493,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[613,\t\t130,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[614,\t\t130,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[616,\t\t132,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[617,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[618,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[619,\t\t134,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[621,\t\t136,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[624,\t\t14,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[628,\t\t142,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[629,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[631,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[632,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[637,\t\t148,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[638,\t\t149,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[640,\t\t153,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[641,\t\t155,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[642,\t\t533,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[643,\t\t534,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[647,\t\t536,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[650,\t\t166,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[652,\t\t167,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[655,\t\t170,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[663,\t\t178,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[666,\t\t180,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[670,\t\t183,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[672,\t\t185,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[676,\t\t19,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[681,\t\t197,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[683,\t\t200,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[687,\t\t202,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[689,\t\t204,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[691,\t\t209,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[694,\t\t21,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[695,\t\t210,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[696,\t\t211,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[697,\t\t211,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[698,\t\t212,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[702,\t\t215,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[705,\t\t217,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[707,\t\t219,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[713,\t\t225,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[714,\t\t225,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[716,\t\t226,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[717,\t\t227,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[719,\t\t229,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[722,\t\t545,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[723,\t\t235,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[724,\t\t238,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[727,\t\t243,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[728,\t\t244,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[730,\t\t547,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[732,\t\t247,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[735,\t\t253,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[738,\t\t258,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[741,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[742,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[743,\t\t500,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[746,\t\t273,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[747,\t\t273,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[748,\t\t274,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[749,\t\t274,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[750,\t\t557,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[753,\t\t28,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[758,\t\t286,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[760,\t\t287,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[761,\t\t288,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[762,\t\t289,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[763,\t\t560,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[765,\t\t560,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[767,\t\t292,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[769,\t\t293,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[771,\t\t297,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[772,\t\t3,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[774,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[777,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[778,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[781,\t\t303,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[784,\t\t563,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[785,\t\t501,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[787,\t\t308,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[788,\t\t311,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[789,\t\t565,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[791,\t\t314,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[792,\t\t316,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[795,\t\t319,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[800,\t\t326,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[801,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[802,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[805,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[806,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[808,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[809,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[811,\t\t568,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[814,\t\t570,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[816,\t\t335,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[817,\t\t571,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[821,\t\t338,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[822,\t\t339,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[826,\t\t339,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[830,\t\t345,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[834,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[835,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[836,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[837,\t\t350,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[839,\t\t350,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[841,\t\t573,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[843,\t\t352,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[844,\t\t352,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[845,\t\t356,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[849,\t\t574,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[850,\t\t574,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[851,\t\t575,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[853,\t\t362,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[855,\t\t363,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[856,\t\t363,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[857,\t\t365,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[858,\t\t368,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[859,\t\t368,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[860,\t\t371,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[864,\t\t374,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[865,\t\t375,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[867,\t\t376,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[869,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[870,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[872,\t\t378,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[873,\t\t576,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[874,\t\t576,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[875,\t\t381,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[877,\t\t578,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[881,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[882,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[883,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[885,\t\t393,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[886,\t\t394,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[889,\t\t397,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[890,\t\t40,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[893,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[894,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[895,\t\t580,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[896,\t\t581,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[898,\t\t403,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[900,\t\t405,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[902,\t\t405,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[903,\t\t406,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[905,\t\t413,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[906,\t\t414,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[907,\t\t583,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[909,\t\t417,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[915,\t\t423,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[917,\t\t43,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[918,\t\t424,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[920,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[921,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[922,\t\t429,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[923,\t\t432,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[925,\t\t44,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[931,\t\t439,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[935,\t\t45,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[936,\t\t445,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[937,\t\t447,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[939,\t\t450,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[940,\t\t451,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[944,\t\t458,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[950,\t\t462,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[952,\t\t47,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[957,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[958,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[959,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[960,\t\t479,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[963,\t\t481,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[965,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[966,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[967,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[968,\t\t486,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[969,\t\t486,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[971,\t\t51,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[973,\t\t506,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[976,\t\t58,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[978,\t\t491,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[981,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[982,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[983,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[984,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[985,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[986,\t\t64,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[987,\t\t65,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[988,\t\t66,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[993,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[994,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[995,\t\t509,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[997,\t\t510,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[999,\t\t70,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1000,\t\t71,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1002,\t\t71,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1003,\t\t72,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1007,\t\t511,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1008,\t\t75,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1010,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1011,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1012,\t\t81,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1014,\t\t83,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1026,\t\t518,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1027,\t\t218,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1028,\t\t221,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1029,\t\t268,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1030,\t\t269,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1031,\t\t498,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1032,\t\t1,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1033,\t\t3,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1034,\t\t4,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1035,\t\t6,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1036,\t\t7,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1037,\t\t8,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1038,\t\t9,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1039,\t\t11,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1040,\t\t14,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1041,\t\t16,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1042,\t\t17,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1043,\t\t19,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1044,\t\t21,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1045,\t\t23,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1046,\t\t25,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1047,\t\t27,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1048,\t\t28,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1049,\t\t29,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1050,\t\t31,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1051,\t\t33,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1052,\t\t34,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1053,\t\t35,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1054,\t\t36,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1055,\t\t38,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1056,\t\t39,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1057,\t\t40,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1058,\t\t41,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1059,\t\t43,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1060,\t\t44,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1061,\t\t45,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1062,\t\t47,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1063,\t\t48,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1064,\t\t49,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1065,\t\t50,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1066,\t\t51,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1067,\t\t53,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1068,\t\t54,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1069,\t\t55,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1070,\t\t57,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1071,\t\t58,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1072,\t\t59,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1073,\t\t60,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1074,\t\t62,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1075,\t\t63,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1076,\t\t64,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1077,\t\t65,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1078,\t\t66,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1079,\t\t67,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1080,\t\t70,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1081,\t\t71,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1082,\t\t72,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1083,\t\t73,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1084,\t\t75,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1085,\t\t76,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1086,\t\t77,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1087,\t\t79,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1088,\t\t80,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1089,\t\t81,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1090,\t\t82,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1091,\t\t83,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1092,\t\t84,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1093,\t\t85,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1094,\t\t88,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1095,\t\t89,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1096,\t\t90,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1097,\t\t91,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1098,\t\t92,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1099,\t\t93,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1100,\t\t97,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1101,\t\t98,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1102,\t\t101,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1103,\t\t102,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1104,\t\t103,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1105,\t\t108,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1106,\t\t109,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1107,\t\t110,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1108,\t\t111,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1109,\t\t112,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1110,\t\t113,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1111,\t\t114,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1112,\t\t115,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1113,\t\t116,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1114,\t\t118,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1115,\t\t119,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1116,\t\t121,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1117,\t\t122,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1118,\t\t126,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1119,\t\t127,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1120,\t\t130,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1121,\t\t131,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1122,\t\t132,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1123,\t\t133,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1124,\t\t134,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1125,\t\t135,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1126,\t\t136,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1127,\t\t137,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1128,\t\t139,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1129,\t\t140,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1130,\t\t141,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1131,\t\t142,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1132,\t\t144,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1133,\t\t145,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1134,\t\t146,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1135,\t\t147,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1136,\t\t148,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1137,\t\t149,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1138,\t\t150,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1139,\t\t151,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1140,\t\t152,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1141,\t\t153,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1142,\t\t154,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1143,\t\t155,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1144,\t\t158,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1145,\t\t161,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1146,\t\t162,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1147,\t\t163,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1148,\t\t164,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1149,\t\t166,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1150,\t\t167,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1151,\t\t168,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1152,\t\t169,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1153,\t\t170,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1154,\t\t171,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1155,\t\t172,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1156,\t\t173,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1157,\t\t174,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1158,\t\t175,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1159,\t\t176,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1160,\t\t177,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1161,\t\t178,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1162,\t\t179,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1163,\t\t180,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1164,\t\t181,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1165,\t\t182,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1166,\t\t183,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1167,\t\t185,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1168,\t\t186,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1169,\t\t187,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1170,\t\t188,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1171,\t\t189,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1172,\t\t190,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1173,\t\t192,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1174,\t\t193,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1175,\t\t194,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1176,\t\t196,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1177,\t\t197,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1178,\t\t198,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1179,\t\t199,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1180,\t\t200,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1181,\t\t202,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1182,\t\t203,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1183,\t\t204,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1184,\t\t205,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1185,\t\t206,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1186,\t\t207,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1187,\t\t208,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1188,\t\t209,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1189,\t\t210,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1190,\t\t211,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1191,\t\t212,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1192,\t\t213,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1193,\t\t214,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1194,\t\t215,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1195,\t\t216,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1196,\t\t217,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1197,\t\t218,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1198,\t\t219,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1201,\t\t223,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1202,\t\t224,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1203,\t\t225,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1204,\t\t226,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1205,\t\t227,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1206,\t\t228,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1207,\t\t229,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1208,\t\t230,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1209,\t\t234,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1210,\t\t235,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1211,\t\t237,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1212,\t\t238,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1213,\t\t239,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1214,\t\t240,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1215,\t\t241,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1216,\t\t242,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1217,\t\t243,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1218,\t\t244,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1219,\t\t247,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1220,\t\t251,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1221,\t\t252,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1222,\t\t253,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1223,\t\t254,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1224,\t\t255,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1225,\t\t256,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1226,\t\t257,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1227,\t\t258,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1228,\t\t260,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1229,\t\t263,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1230,\t\t264,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1231,\t\t266,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1232,\t\t267,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1235,\t\t271,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1236,\t\t272,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1237,\t\t273,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1238,\t\t274,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1239,\t\t275,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1240,\t\t276,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1241,\t\t278,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1242,\t\t281,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1243,\t\t282,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1244,\t\t283,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1245,\t\t284,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1246,\t\t285,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1247,\t\t286,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1248,\t\t287,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1249,\t\t288,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1250,\t\t289,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1251,\t\t291,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1252,\t\t292,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1253,\t\t293,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1254,\t\t294,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1255,\t\t295,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1256,\t\t296,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1257,\t\t297,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1258,\t\t298,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1259,\t\t299,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1260,\t\t300,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1261,\t\t302,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1262,\t\t303,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1263,\t\t304,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1264,\t\t307,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1265,\t\t308,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1266,\t\t309,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1267,\t\t311,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1268,\t\t312,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1269,\t\t314,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1270,\t\t316,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1271,\t\t317,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1272,\t\t318,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1273,\t\t319,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1274,\t\t321,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1275,\t\t322,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1276,\t\t323,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1277,\t\t324,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1278,\t\t325,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1279,\t\t326,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1280,\t\t327,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1281,\t\t328,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1282,\t\t329,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1283,\t\t331,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1284,\t\t333,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1285,\t\t335,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1286,\t\t337,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1287,\t\t338,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1288,\t\t339,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1289,\t\t340,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1290,\t\t341,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1291,\t\t342,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1292,\t\t343,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1293,\t\t344,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1294,\t\t345,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1295,\t\t346,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1296,\t\t347,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1297,\t\t348,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1298,\t\t350,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1299,\t\t352,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1300,\t\t353,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1301,\t\t354,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1302,\t\t355,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1303,\t\t356,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1304,\t\t357,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1305,\t\t359,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1306,\t\t361,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1307,\t\t362,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1308,\t\t363,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1309,\t\t364,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1310,\t\t365,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1311,\t\t366,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1312,\t\t367,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1313,\t\t368,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1314,\t\t369,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1315,\t\t370,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1316,\t\t371,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1317,\t\t372,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1318,\t\t373,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1319,\t\t374,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1320,\t\t375,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1321,\t\t376,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1322,\t\t377,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1323,\t\t378,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1324,\t\t379,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1325,\t\t381,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1326,\t\t384,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1327,\t\t385,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1328,\t\t386,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1329,\t\t387,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1330,\t\t388,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1331,\t\t390,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1332,\t\t391,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1333,\t\t392,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1334,\t\t393,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1335,\t\t394,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1336,\t\t395,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1337,\t\t396,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1338,\t\t397,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1339,\t\t398,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1340,\t\t399,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1341,\t\t400,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1342,\t\t403,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1343,\t\t404,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1344,\t\t405,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1345,\t\t406,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1346,\t\t407,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1347,\t\t408,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1348,\t\t410,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1349,\t\t411,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1350,\t\t412,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1351,\t\t413,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1352,\t\t414,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1354,\t\t417,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1355,\t\t418,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1356,\t\t419,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1357,\t\t420,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1358,\t\t421,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1359,\t\t422,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1360,\t\t423,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1361,\t\t424,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1362,\t\t425,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1363,\t\t426,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1364,\t\t427,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1365,\t\t428,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1366,\t\t429,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1367,\t\t430,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1368,\t\t431,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1369,\t\t432,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1370,\t\t433,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1371,\t\t434,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1372,\t\t435,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1373,\t\t436,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1376,\t\t439,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1377,\t\t440,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1378,\t\t441,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1379,\t\t442,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1380,\t\t443,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1381,\t\t445,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1382,\t\t446,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1383,\t\t447,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1384,\t\t448,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1385,\t\t449,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1386,\t\t450,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1387,\t\t451,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1388,\t\t453,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1389,\t\t454,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1390,\t\t455,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1391,\t\t456,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1392,\t\t457,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1393,\t\t458,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1394,\t\t459,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1395,\t\t460,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1396,\t\t461,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1397,\t\t462,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1398,\t\t463,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1399,\t\t464,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1400,\t\t465,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1401,\t\t466,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1402,\t\t467,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1403,\t\t468,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1404,\t\t469,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1405,\t\t470,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1406,\t\t471,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1407,\t\t472,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1408,\t\t473,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1409,\t\t474,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1410,\t\t475,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1411,\t\t476,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1412,\t\t477,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1413,\t\t478,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1414,\t\t479,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1415,\t\t480,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1416,\t\t481,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1417,\t\t482,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1418,\t\t483,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1419,\t\t484,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1420,\t\t485,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1421,\t\t486,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1422,\t\t487,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1423,\t\t488,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1424,\t\t489,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1425,\t\t490,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1426,\t\t491,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1427,\t\t492,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1428,\t\t493,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1429,\t\t494,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1430,\t\t495,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1431,\t\t496,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1432,\t\t497,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1433,\t\t498,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1434,\t\t499,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1435,\t\t500,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1436,\t\t501,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1437,\t\t502,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1438,\t\t503,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1439,\t\t504,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1440,\t\t505,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1441,\t\t506,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1442,\t\t507,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1443,\t\t508,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1444,\t\t509,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1445,\t\t510,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1446,\t\t511,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1447,\t\t512,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1448,\t\t513,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1449,\t\t514,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1450,\t\t515,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1451,\t\t516,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1452,\t\t517,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1453,\t\t518,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1454,\t\t519,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1455,\t\t520,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1456,\t\t521,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1457,\t\t522,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1458,\t\t523,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1459,\t\t524,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1460,\t\t525,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1461,\t\t526,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1462,\t\t527,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1463,\t\t528,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1464,\t\t529,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1465,\t\t530,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1466,\t\t531,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1467,\t\t532,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1468,\t\t533,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1469,\t\t534,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1470,\t\t535,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1471,\t\t536,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1472,\t\t537,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1473,\t\t538,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1474,\t\t539,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1475,\t\t540,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1476,\t\t541,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1477,\t\t542,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1478,\t\t543,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1479,\t\t544,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1480,\t\t545,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1481,\t\t546,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1482,\t\t547,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1483,\t\t548,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1484,\t\t549,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1485,\t\t550,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1486,\t\t551,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1487,\t\t552,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1488,\t\t554,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1489,\t\t555,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1490,\t\t556,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1491,\t\t557,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1492,\t\t558,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1493,\t\t559,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1494,\t\t560,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1495,\t\t561,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1496,\t\t562,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1497,\t\t563,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1498,\t\t564,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1499,\t\t565,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1500,\t\t566,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1501,\t\t567,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1502,\t\t568,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1503,\t\t569,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1504,\t\t570,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1505,\t\t571,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1506,\t\t572,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1507,\t\t573,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1508,\t\t574,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1509,\t\t575,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1510,\t\t576,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1511,\t\t577,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1512,\t\t578,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1513,\t\t579,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1514,\t\t580,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1515,\t\t581,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1516,\t\t582,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1517,\t\t583,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1518,\t\t584,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1519,\t\t585,\t\t0,\t\t1e-05,\t\t0,\t\t9999,\t\t9999,\t\t9999,\t\t0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[1,\t\t490,\t\t0,\t\t0.01433884297520661,\t\t0.151691958358336,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t43.375\t\t],\n\t\t[3,\t\t4,\t\t0,\t\t0.006291637811634348,\t\t0.903417549506624,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t72.681\t\t],\n\t\t[491,\t\t6,\t\t0,\t\t0.011200661157024791,\t\t0.118492839955776,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.882\t\t],\n\t\t[7,\t\t5,\t\t0,\t\t0.005794840720221606,\t\t0.20802058859584005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.471\t\t],\n\t\t[8,\t\t9,\t\t0,\t\t0.0024379328254847646,\t\t0.350063268897336,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.163\t\t],\n\t\t[492,\t\t11,\t\t0,\t\t0.018224793388429753,\t\t0.0482004476327704,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.565\t\t],\n\t\t[11,\t\t493,\t\t0,\t\t0.030286942148760328,\t\t0.08010209706571599,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.809\t\t],\n\t\t[492,\t\t493,\t\t0,\t\t0.04521652892561983,\t\t0.11958747011094399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t68.39\t\t],\n\t\t[494,\t\t14,\t\t0,\t\t0.012990743801652892,\t\t0.137430291356512,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.297\t\t],\n\t\t[13,\t\t15,\t\t0,\t\t0.007681959833795014,\t\t0.27576354266704156,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.371\t\t],\n\t\t[16,\t\t5,\t\t0,\t\t0.006275623268698061,\t\t0.22527950450957998,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t36.248000000000005\t\t],\n\t\t[17,\t\t18,\t\t0,\t\t0.04623522622347646,\t\t0.9335989000302801,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t200.291\t\t],\n\t\t[17,\t\t12,\t\t0,\t\t0.0056020313942728535,\t\t0.113118303398186,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.268\t\t],\n\t\t[14,\t\t495,\t\t0,\t\t0.0017957024793388433,\t\t0.018996904156819597,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.432\t\t],\n\t\t[494,\t\t19,\t\t0,\t\t0.010246611570247935,\t\t0.10839986031771602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.996\t\t],\n\t\t[20,\t\t21,\t\t0,\t\t0.005415685595567867,\t\t0.19440984828307922,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t31.281\t\t],\n\t\t[20,\t\t22,\t\t0,\t\t0.0049706544321329645,\t\t0.713737278110032,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.42100000000001\t\t],\n\t\t[497,\t\t23,\t\t0,\t\t0.002190413223140496,\t\t0.005793146490362,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.313\t\t],\n\t\t[23,\t\t499,\t\t0,\t\t0.020799669421487598,\t\t0.22004164444829602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.919\t\t],\n\t\t[25,\t\t26,\t\t0,\t\t0.00141845567867036,\t\t0.050919084651523595,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.193\t\t],\n\t\t[25,\t\t22,\t\t0,\t\t0.0035578254847645433,\t\t0.0319293051869808,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.275\t\t],\n\t\t[23,\t\t27,\t\t0,\t\t0.027738181818181818,\t\t0.073361203699828,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.95399999999999\t\t],\n\t\t[28,\t\t23,\t\t0,\t\t0.012841652892561981,\t\t0.0339632611780132,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.423\t\t],\n\t\t[8,\t\t21,\t\t0,\t\t0.004948753462603878,\t\t0.17764812836304802,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t28.584\t\t],\n\t\t[9,\t\t29,\t\t0,\t\t0.002212863573407202,\t\t0.31774552934092004,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t25.563000000000002\t\t],\n\t\t[30,\t\t25,\t\t0,\t\t0.019958795013850415,\t\t0.17911796401827998,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t57.641000000000005\t\t],\n\t\t[31,\t\t32,\t\t0,\t\t0.0299776084949446,\t\t0.605319030583196,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t129.863\t\t],\n\t\t[32,\t\t33,\t\t0,\t\t0.016762234533725762,\t\t0.33846927983213604,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.61399999999999\t\t],\n\t\t[34,\t\t35,\t\t0,\t\t0.001931900826446281,\t\t0.020437759184893597,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t5.843999999999999\t\t],\n\t\t[35,\t\t36,\t\t0,\t\t0.0008730578512396695,\t\t0.0092361605077588,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.641\t\t],\n\t\t[490,\t\t6,\t\t0,\t\t0.049352066115702475,\t\t0.130525028606764,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.645\t\t],\n\t\t[37,\t\t10,\t\t0,\t\t0.02404639889196676,\t\t0.485553838251812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t104.169\t\t],\n\t\t[10,\t\t38,\t\t0,\t\t0.006848799630657894,\t\t0.13829351176534158,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.669\t\t],\n\t\t[37,\t\t38,\t\t0,\t\t0.01437834718372576,\t\t1.1613317560186958,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t124.574\t\t],\n\t\t[39,\t\t40,\t\t0,\t\t0.04521629732222991,\t\t0.913024308337812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t195.877\t\t],\n\t\t[39,\t\t41,\t\t0,\t\t0.017466989843005543,\t\t0.35269996139852006,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t75.667\t\t],\n\t\t[42,\t\t41,\t\t0,\t\t0.031145429362880884,\t\t0.6289001042979919,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t134.922\t\t],\n\t\t[18,\t\t42,\t\t0,\t\t0.03439750692520776,\t\t0.6945672650962679,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t149.01\t\t],\n\t\t[492,\t\t43,\t\t0,\t\t0.01819173553719008,\t\t0.192452068436848,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t55.03\t\t],\n\t\t[44,\t\t45,\t\t0,\t\t0.02562314049586777,\t\t0.067767398802972,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.755\t\t],\n\t\t[44,\t\t505,\t\t0,\t\t0.006061487603305785,\t\t0.0160312607980052,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.168\t\t],\n\t\t[46,\t\t12,\t\t0,\t\t0.0014741170360110802,\t\t0.2116687641962416,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.029\t\t],\n\t\t[47,\t\t48,\t\t0,\t\t0.005344182825484765,\t\t0.01199019212302604,\t\t428.0,\t\t428.0,\t\t428.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.7170000000000005\t\t],\n\t\t[49,\t\t50,\t\t0,\t\t0.0019151662049861494,\t\t0.0171874439892256,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.531000000000001\t\t],\n\t\t[31,\t\t33,\t\t0,\t\t0.013475992613088641,\t\t0.27211225959163604,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t58.378\t\t],\n\t\t[31,\t\t51,\t\t0,\t\t0.003518611495844875,\t\t0.5052381383693519,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.647\t\t],\n\t\t[52,\t\t53,\t\t0,\t\t0.010464421745152355,\t\t1.5025884408875438,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t120.885\t\t],\n\t\t[52,\t\t54,\t\t0,\t\t0.0076126500461911354,\t\t0.1537174637168,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.978\t\t],\n\t\t[506,\t\t55,\t\t0,\t\t0.012634380165289257,\t\t0.133660287181212,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.219\t\t],\n\t\t[506,\t\t507,\t\t0,\t\t0.044157355371900825,\t\t0.11678619613628,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t66.788\t\t],\n\t\t[57,\t\t506,\t\t0,\t\t0.004687272727272727,\t\t0.049587095736244,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.179\t\t],\n\t\t[57,\t\t58,\t\t0,\t\t0.014436363636363634,\t\t0.0381809096340232,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.835\t\t],\n\t\t[58,\t\t506,\t\t0,\t\t0.019797685950413223,\t\t0.052360391943288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.944000000000003\t\t],\n\t\t[59,\t\t60,\t\t0,\t\t0.019407548476454296,\t\t0.174170863885556,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.049\t\t],\n\t\t[508,\t\t62,\t\t0,\t\t0.051111404958677685,\t\t0.03379452026753001,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.653\t\t],\n\t\t[30,\t\t61,\t\t0,\t\t0.03143698060941828,\t\t0.28212765137935203,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t90.79\t\t],\n\t\t[63,\t\t506,\t\t0,\t\t0.027457190082644623,\t\t0.072618044249872,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.528999999999996\t\t],\n\t\t[13,\t\t64,\t\t0,\t\t0.0014816481994459833,\t\t0.2127501654814608,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.116\t\t],\n\t\t[65,\t\t66,\t\t0,\t\t0.03778185595567867,\t\t0.7629053006222161,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t163.671\t\t],\n\t\t[59,\t\t67,\t\t0,\t\t0.0051880193905817175,\t\t0.046559297286324804,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.982999999999999\t\t],\n\t\t[61,\t\t67,\t\t0,\t\t0.012931440443213295,\t\t0.1160517597580644,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.346\t\t],\n\t\t[68,\t\t69,\t\t0,\t\t0.011149584487534626,\t\t0.4002427745096039,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.4\t\t],\n\t\t[70,\t\t69,\t\t0,\t\t0.009625346260387812,\t\t0.345526355460808,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.596000000000004\t\t],\n\t\t[71,\t\t72,\t\t0,\t\t0.008878635734072021,\t\t0.318721276477736,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.283\t\t],\n\t\t[73,\t\t74,\t\t0,\t\t0.012529547553116345,\t\t0.253001288604392,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t54.278\t\t],\n\t\t[37,\t\t75,\t\t0,\t\t0.027459141274238225,\t\t0.5544652029066119,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t118.95299999999999\t\t],\n\t\t[72,\t\t75,\t\t0,\t\t0.006688711911357341,\t\t0.240108375006292,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.634\t\t],\n\t\t[37,\t\t72,\t\t0,\t\t0.036222068328739615,\t\t0.7314094881920841,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t156.914\t\t],\n\t\t[76,\t\t77,\t\t0,\t\t0.004683777700831025,\t\t0.6725445900750401,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t54.107\t\t],\n\t\t[77,\t\t51,\t\t0,\t\t0.00363183864265928,\t\t0.5214964473447999,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t41.955\t\t],\n\t\t[73,\t\t72,\t\t0,\t\t0.025475069252077563,\t\t0.514402082018968,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.35799999999999\t\t],\n\t\t[18,\t\t40,\t\t0,\t\t0.01302770083102493,\t\t0.26306018504072,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.43600000000001\t\t],\n\t\t[492,\t\t45,\t\t0,\t\t0.0308703030303719,\t\t0.18370114733484796,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t70.03699999999999\t\t],\n\t\t[10,\t\t74,\t\t0,\t\t0.030167359187465374,\t\t0.609150547206812,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t130.685\t\t],\n\t\t[45,\t\t511,\t\t0,\t\t0.08203371900826446,\t\t0.05424014819960001,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.038000000000004\t\t],\n\t\t[78,\t\t32,\t\t0,\t\t0.013458795013850415,\t\t0.48313777647302397,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.738\t\t],\n\t\t[79,\t\t80,\t\t0,\t\t0.0038086911357340715,\t\t0.1367226831743568,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.999000000000002\t\t],\n\t\t[81,\t\t79,\t\t0,\t\t0.010767832409972299,\t\t0.3865388099484561,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t62.195\t\t],\n\t\t[34,\t\t82,\t\t0,\t\t0.0015497520661157025,\t\t0.00409874294399768,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.344\t\t],\n\t\t[83,\t\t84,\t\t0,\t\t0.00902611570247934,\t\t0.0238720301499152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.652000000000001\t\t],\n\t\t[83,\t\t499,\t\t0,\t\t0.04179570247933885,\t\t0.0276350398834796,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.608\t\t],\n\t\t[85,\t\t86,\t\t0,\t\t0.00802354570637119,\t\t0.28802563884886,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.343999999999994\t\t],\n\t\t[87,\t\t86,\t\t0,\t\t0.01904968836565097,\t\t0.683837154069184,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.031\t\t],\n\t\t[88,\t\t89,\t\t0,\t\t0.00380297520661157,\t\t0.010058007429140002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.752000000000001\t\t],\n\t\t[90,\t\t86,\t\t0,\t\t0.012097818559556786,\t\t0.434282055192244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t69.877\t\t],\n\t\t[91,\t\t86,\t\t0,\t\t9.26246537396122e-05,\t\t0.013299992817559201,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.07\t\t],\n\t\t[86,\t\t92,\t\t0,\t\t0.0001852493074792244,\t\t0.0066499964087796005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.07\t\t],\n\t\t[86,\t\t93,\t\t0,\t\t0.008152181440443215,\t\t0.292643346635492,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.086999999999996\t\t],\n\t\t[94,\t\t86,\t\t0,\t\t0.012883829639889197,\t\t0.46249792780547194,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.417\t\t],\n\t\t[86,\t\t95,\t\t0,\t\t0.010421052631578947,\t\t0.37409026526870803,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t60.192\t\t],\n\t\t[513,\t\t517,\t\t0,\t\t0.0008733884297520661,\t\t0.0023099144321748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.321\t\t],\n\t\t[97,\t\t66,\t\t0,\t\t0.03812777008310249,\t\t0.34217338998058805,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t110.113\t\t],\n\t\t[42,\t\t98,\t\t0,\t\t0.003091759002770083,\t\t0.44394630230884,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t35.716\t\t],\n\t\t[99,\t\t100,\t\t0,\t\t0.016371537396121884,\t\t0.587698093837988,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t94.56200000000001\t\t],\n\t\t[42,\t\t101,\t\t0,\t\t0.008165339335180054,\t\t0.29311568282888,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.163000000000004\t\t],\n\t\t[102,\t\t42,\t\t0,\t\t0.012403047091412742,\t\t0.44523901189173193,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t71.64\t\t],\n\t\t[103,\t\t87,\t\t0,\t\t0.007073060941828254,\t\t0.25390556381756,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.854\t\t],\n\t\t[104,\t\t103,\t\t0,\t\t0.0028852146814404432,\t\t0.1035721403291428,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.665\t\t],\n\t\t[105,\t\t87,\t\t0,\t\t0.006406682825484765,\t\t0.22998422159488002,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.005\t\t],\n\t\t[106,\t\t107,\t\t0,\t\t0.005714219759923823,\t\t0.11538365264216799,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.754\t\t],\n\t\t[108,\t\t107,\t\t0,\t\t0.0025427631578947367,\t\t0.09127896939786201,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.687000000000001\t\t],\n\t\t[109,\t\t106,\t\t0,\t\t0.003030470914127424,\t\t0.10878648330773438,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.504\t\t],\n\t\t[110,\t\t111,\t\t0,\t\t0.019821849030470913,\t\t0.7115558306889919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.491\t\t],\n\t\t[87,\t\t112,\t\t0,\t\t0.006135907202216068,\t\t0.220264039928212,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.441\t\t],\n\t\t[113,\t\t87,\t\t0,\t\t0.003981648199445983,\t\t0.14293141813921081,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.998\t\t],\n\t\t[87,\t\t85,\t\t0,\t\t0.011046225761772853,\t\t0.3965324494097,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.803000000000004\t\t],\n\t\t[110,\t\t114,\t\t0,\t\t0.011665339335180056,\t\t0.418757110306188,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.37899999999999\t\t],\n\t\t[115,\t\t116,\t\t0,\t\t0.007048925619834712,\t\t0.07457124214588401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.323\t\t],\n\t\t[117,\t\t118,\t\t0,\t\t0.005987534626038782,\t\t0.21493782785077598,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.584\t\t],\n\t\t[117,\t\t119,\t\t0,\t\t0.0038738746537396117,\t\t0.5562504472696961,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.751000000000005\t\t],\n\t\t[117,\t\t120,\t\t0,\t\t0.005886686288088643,\t\t0.8452704781039522,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t68.003\t\t],\n\t\t[121,\t\t122,\t\t0,\t\t0.0021170360110803325,\t\t0.0759964075574972,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.228\t\t],\n\t\t[123,\t\t124,\t\t0,\t\t0.0018386426592797783,\t\t0.0660027680945204,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.62\t\t],\n\t\t[125,\t\t126,\t\t0,\t\t0.004941135734072022,\t\t0.17737467056702802,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.54\t\t],\n\t\t[127,\t\t119,\t\t0,\t\t0.0029027008310249305,\t\t0.1041998502705648,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.766\t\t],\n\t\t[118,\t\t128,\t\t0,\t\t0.007397160664819945,\t\t0.265539950057812,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.726000000000006\t\t],\n\t\t[121,\t\t119,\t\t0,\t\t0.002552458448753463,\t\t0.0916270065931116,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.743\t\t],\n\t\t[530,\t\t527,\t\t0,\t\t0.022726611570247933,\t\t0.060106736329903994,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.374\t\t],\n\t\t[125,\t\t130,\t\t0,\t\t0.002931440443213297,\t\t0.105231531956442,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.932000000000002\t\t],\n\t\t[125,\t\t123,\t\t0,\t\t0.0019078081717451524,\t\t0.2739425623421336,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t22.039\t\t],\n\t\t[131,\t\t132,\t\t0,\t\t0.0035744459833795014,\t\t0.12831385593973843,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.646\t\t],\n\t\t[133,\t\t123,\t\t0,\t\t0.003864439058171745,\t\t0.13872389704704202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.320999999999998\t\t],\n\t\t[524,\t\t134,\t\t0,\t\t0.008092231404958678,\t\t0.08560847143881999,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.479\t\t],\n\t\t[135,\t\t136,\t\t0,\t\t0.005242901662049862,\t\t0.1882073282678,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.283\t\t],\n\t\t[123,\t\t131,\t\t0,\t\t0.003138331024930748,\t\t0.1126583971045252,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.127\t\t],\n\t\t[117,\t\t128,\t\t0,\t\t0.010800034626038782,\t\t0.38769479063117196,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.381\t\t],\n\t\t[137,\t\t521,\t\t0,\t\t0.013832396694214875,\t\t0.14633421587532003,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t41.843\t\t],\n\t\t[531,\t\t514,\t\t0,\t\t0.0059504132231404955,\t\t0.035409362037522,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.5\t\t],\n\t\t[139,\t\t521,\t\t0,\t\t0.021257520661157023,\t\t0.05622132386323199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.152\t\t],\n\t\t[140,\t\t514,\t\t0,\t\t0.018527603305785127,\t\t0.04900131122836401,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.023000000000003\t\t],\n\t\t[522,\t\t141,\t\t0,\t\t0.012168595041322314,\t\t0.032183175718526795,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.405\t\t],\n\t\t[142,\t\t523,\t\t0,\t\t0.007060165289256198,\t\t0.0746901476577608,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.357\t\t],\n\t\t[530,\t\t526,\t\t0,\t\t0.020281652892561983,\t\t0.053640374808152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.676\t\t],\n\t\t[140,\t\t532,\t\t0,\t\t0.004669090909090909,\t\t0.0123486871461184,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.062\t\t],\n\t\t[142,\t\t144,\t\t0,\t\t0.006678126721756199,\t\t0.0397397958689204,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.151\t\t],\n\t\t[140,\t\t522,\t\t0,\t\t0.020450247933884298,\t\t0.05408627047793199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.930999999999997\t\t],\n\t\t[145,\t\t146,\t\t0,\t\t0.028527603305785125,\t\t0.07544904460236,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.148\t\t],\n\t\t[147,\t\t523,\t\t0,\t\t0.02461289256198347,\t\t0.0650955220034416,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t37.227\t\t],\n\t\t[144,\t\t523,\t\t0,\t\t0.008479338842975206,\t\t0.0224259292904064,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.825\t\t],\n\t\t[139,\t\t523,\t\t0,\t\t0.029245619834710742,\t\t0.0193370088934308,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.116999999999997\t\t],\n\t\t[140,\t\t141,\t\t0,\t\t0.008362975206611572,\t\t0.022118173847506,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.649000000000001\t\t],\n\t\t[528,\t\t526,\t\t0,\t\t0.015389090909090908,\t\t0.0407006573227188,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.276\t\t],\n\t\t[528,\t\t148,\t\t0,\t\t0.014306115702479338,\t\t0.0378364333712244,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.638\t\t],\n\t\t[149,\t\t150,\t\t0,\t\t0.013604628099173552,\t\t0.035981157661543604,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.576999999999998\t\t],\n\t\t[145,\t\t528,\t\t0,\t\t0.00320595041322314,\t\t0.0084790121737992,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.849\t\t],\n\t\t[530,\t\t151,\t\t0,\t\t0.013144462809917355,\t\t0.0347641247737036,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.881\t\t],\n\t\t[524,\t\t152,\t\t0,\t\t0.014598347107438016,\t\t0.03860931919944,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.08\t\t],\n\t\t[149,\t\t525,\t\t0,\t\t0.016897190082644627,\t\t0.17875695122823998,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t51.114\t\t],\n\t\t[139,\t\t514,\t\t0,\t\t0.007824132231404959,\t\t0.020693056313687997,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.834000000000001\t\t],\n\t\t[126,\t\t120,\t\t0,\t\t0.012780297783933518,\t\t0.458781387757004,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.819\t\t],\n\t\t[530,\t\t153,\t\t0,\t\t0.02254545454545455,\t\t0.059627617060924,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.1\t\t],\n\t\t[528,\t\t147,\t\t0,\t\t0.15786710743801652,\t\t0.104380679149868,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t119.387\t\t],\n\t\t[528,\t\t154,\t\t0,\t\t0.006528264462809917,\t\t0.017265779790547203,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t9.874\t\t],\n\t\t[130,\t\t120,\t\t0,\t\t0.01450502077562327,\t\t0.5206947188067639,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t83.781\t\t],\n\t\t[528,\t\t155,\t\t0,\t\t0.16064132231404957,\t\t0.1062149715341,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t121.485\t\t],\n\t\t[524,\t\t533,\t\t0,\t\t0.004432727272727273,\t\t0.0468942356109744,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.409\t\t],\n\t\t[524,\t\t149,\t\t0,\t\t0.0056413223140495865,\t\t0.05968007537478799,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.065\t\t],\n\t\t[154,\t\t150,\t\t0,\t\t0.007539173553719007,\t\t0.0199394052006688,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t11.402999999999999\t\t],\n\t\t[157,\t\t110,\t\t0,\t\t0.009962084487534625,\t\t0.357614433044424,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t57.541000000000004\t\t],\n\t\t[119,\t\t158,\t\t0,\t\t0.0002490189289012004,\t\t0.08045252664623159,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t3,\t\t1,\t\t-360,\t\t4.315\t\t],\n\t\t[159,\t\t60,\t\t0,\t\t0.010967451523545706,\t\t0.0984261617997728,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.674\t\t],\n\t\t[536,\t\t161,\t\t0,\t\t0.021314380165289255,\t\t0.056371704363524,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.238\t\t],\n\t\t[115,\t\t151,\t\t0,\t\t0.00379404958677686,\t\t0.0401376047510724,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.477\t\t],\n\t\t[162,\t\t134,\t\t0,\t\t0.0015910743801652895,\t\t0.016832124393744,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t4.813\t\t],\n\t\t[115,\t\t526,\t\t0,\t\t0.0037884297520661154,\t\t0.010019537998747198,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.73\t\t],\n\t\t[138,\t\t87,\t\t0,\t\t0.0011838642659279777,\t\t0.16999131006813442,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t13.675999999999998\t\t],\n\t\t[123,\t\t163,\t\t0,\t\t0.0022778739612188364,\t\t0.08177009602828919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.157\t\t],\n\t\t[112,\t\t164,\t\t0,\t\t0.0008672957063711912,\t\t0.12453516639176802,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.019\t\t],\n\t\t[112,\t\t165,\t\t0,\t\t0.005989439058171744,\t\t0.21500619230086396,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.595\t\t],\n\t\t[166,\t\t165,\t\t0,\t\t0.002632790858725762,\t\t0.09451074335350361,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.207\t\t],\n\t\t[167,\t\t537,\t\t0,\t\t0.00832595041322314,\t\t0.08808100664460242,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t25.186\t\t],\n\t\t[168,\t\t104,\t\t0,\t\t0.002552458448753463,\t\t0.0916270065931116,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.743\t\t],\n\t\t[531,\t\t520,\t\t0,\t\t0.016156694214876033,\t\t0.042730794079516396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.436999999999998\t\t],\n\t\t[139,\t\t520,\t\t0,\t\t0.010682314049586776,\t\t0.0282522993797748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.157\t\t],\n\t\t[520,\t\t169,\t\t0,\t\t0.0011328925619834712,\t\t0.0119849761681232,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t3.427\t\t],\n\t\t[168,\t\t105,\t\t0,\t\t0.007340893351800554,\t\t0.26352009133553606,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.401\t\t],\n\t\t[520,\t\t170,\t\t0,\t\t0.005842644628099174,\t\t0.015452470732151198,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t8.837\t\t],\n\t\t[171,\t\t89,\t\t0,\t\t0.005505454545454546,\t\t0.058242717567848004,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.654\t\t],\n\t\t[521,\t\t172,\t\t0,\t\t0.006304793388429752,\t\t0.06669899780522001,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.072\t\t],\n\t\t[123,\t\t173,\t\t0,\t\t0.005247403047091413,\t\t0.18836891696656402,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.309\t\t],\n\t\t[521,\t\t174,\t\t0,\t\t0.013300495867768597,\t\t0.035176796844864404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.117\t\t],\n\t\t[37,\t\t39,\t\t0,\t\t0.004338873499549862,\t\t0.35044859579205606,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t37.592\t\t],\n\t\t[530,\t\t175,\t\t0,\t\t0.013128595041322313,\t\t0.0347221581224188,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.857\t\t],\n\t\t[530,\t\t176,\t\t0,\t\t0.005685289256198347,\t\t0.01503630144005,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.599\t\t],\n\t\t[88,\t\t530,\t\t0,\t\t0.006015867768595041,\t\t0.0159106066755372,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.099\t\t],\n\t\t[177,\t\t496,\t\t0,\t\t0.018632066115702478,\t\t0.19711036673178398,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.361999999999995\t\t],\n\t\t[178,\t\t525,\t\t0,\t\t0.03106842975206612,\t\t0.08216895464241199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.99100000000001\t\t],\n\t\t[179,\t\t493,\t\t0,\t\t0.057079669421487594,\t\t0.15096278779194802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.333\t\t],\n\t\t[180,\t\t181,\t\t0,\t\t0.041027438016528923,\t\t0.10850827416682,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.053999999999995\t\t],\n\t\t[182,\t\t180,\t\t0,\t\t0.00866314049586777,\t\t0.09164817200545601,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.206\t\t],\n\t\t[179,\t\t181,\t\t0,\t\t0.01957223140495868,\t\t0.051764115772731996,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.603\t\t],\n\t\t[180,\t\t493,\t\t0,\t\t0.06676561983471074,\t\t0.17657993119175203,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t100.98299999999999\t\t],\n\t\t[183,\t\t30,\t\t0,\t\t0.0024804362880886427,\t\t0.356166349712776,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t28.654\t\t],\n\t\t[183,\t\t21,\t\t0,\t\t0.0025647506925207757,\t\t0.36827307214930394,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t29.628\t\t],\n\t\t[538,\t\t185,\t\t0,\t\t0.018631404958677687,\t\t0.0123189607681008,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.09\t\t],\n\t\t[538,\t\t89,\t\t0,\t\t0.014509752066115702,\t\t0.038375005396288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.945999999999998\t\t],\n\t\t[184,\t\t186,\t\t0,\t\t0.0016554709141274237,\t\t0.059427351084826,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.562000000000001\t\t],\n\t\t[184,\t\t187,\t\t0,\t\t0.002698753462603878,\t\t0.09687863927102919,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.588\t\t],\n\t\t[520,\t\t172,\t\t0,\t\t0.0034188429752066113,\t\t0.0361682589818792,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.342\t\t],\n\t\t[89,\t\t175,\t\t0,\t\t0.0037309090909090903,\t\t0.0098674088877672,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.643\t\t],\n\t\t[185,\t\t89,\t\t0,\t\t0.005812892561983471,\t\t0.0153737832609196,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.792\t\t],\n\t\t[89,\t\t188,\t\t0,\t\t0.003108760330578513,\t\t0.008221966434607202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.702\t\t],\n\t\t[189,\t\t190,\t\t0,\t\t0.008599492151454294,\t\t0.17364414688031998,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.253\t\t],\n\t\t[539,\t\t172,\t\t0,\t\t0.0021570247933884296,\t\t0.022819366646419197,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t6.525\t\t],\n\t\t[504,\t\t192,\t\t0,\t\t0.0003084297520661157,\t\t0.00326290713886456,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.9329999999999999\t\t],\n\t\t[105,\t\t186,\t\t0,\t\t0.003273372576177285,\t\t0.1175060580379876,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.907\t\t],\n\t\t[105,\t\t187,\t\t0,\t\t0.0021712257617728533,\t\t0.0779416868808324,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.540999999999999\t\t],\n\t\t[539,\t\t193,\t\t0,\t\t0.005608595041322314,\t\t0.01483346262541,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.482999999999999\t\t],\n\t\t[187,\t\t194,\t\t0,\t\t4.8649584487534626e-05,\t\t0.0069856037041576,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.562\t\t],\n\t\t[539,\t\t540,\t\t0,\t\t0.004394710743801653,\t\t0.0116230138006708,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.647\t\t],\n\t\t[539,\t\t196,\t\t0,\t\t0.00332297520661157,\t\t0.008788516227194,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.026\t\t],\n\t\t[197,\t\t540,\t\t0,\t\t0.004737190082644629,\t\t0.012528794024621601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.165\t\t],\n\t\t[110,\t\t198,\t\t0,\t\t0.00018724030470914128,\t\t0.02688587333118328,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.1630000000000003\t\t],\n\t\t[197,\t\t539,\t\t0,\t\t0.009172231404958677,\t\t0.024258473063998802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.873\t\t],\n\t\t[199,\t\t537,\t\t0,\t\t0.03612826446280991,\t\t0.0238877676441712,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.322\t\t],\n\t\t[134,\t\t526,\t\t0,\t\t0.007771239669421488,\t\t0.020553167475975197,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.754000000000001\t\t],\n\t\t[200,\t\t193,\t\t0,\t\t0.0009322314049586776,\t\t0.009862163056380801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.82\t\t],\n\t\t[4,\t\t201,\t\t0,\t\t0.013726108033240996,\t\t0.49273365914097605,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t79.282\t\t],\n\t\t[202,\t\t86,\t\t0,\t\t0.00013365650969529087,\t\t0.00479794133417816,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.772\t\t],\n\t\t[85,\t\t203,\t\t0,\t\t0.0019011426592797783,\t\t0.2729854600553416,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.962\t\t],\n\t\t[147,\t\t204,\t\t0,\t\t0.0073874380165289254,\t\t0.0781523963903056,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t22.346999999999998\t\t],\n\t\t[147,\t\t205,\t\t0,\t\t0.005959669421487603,\t\t0.00394049369636956,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.507\t\t],\n\t\t[123,\t\t206,\t\t0,\t\t0.0005753116343490305,\t\t0.0826091142668064,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t6.646\t\t],\n\t\t[537,\t\t207,\t\t0,\t\t0.018456198347107437,\t\t0.048812461297776,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.915\t\t],\n\t\t[165,\t\t208,\t\t0,\t\t0.00414612188365651,\t\t0.14883562055771601,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.948\t\t],\n\t\t[4,\t\t94,\t\t0,\t\t0.013687673130193905,\t\t0.49135394025941603,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t79.06\t\t],\n\t\t[4,\t\t2,\t\t0,\t\t5.2054478301015697e-05,\t\t0.016817654469309,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t3,\t\t1,\t\t-360,\t\t0.902\t\t],\n\t\t[209,\t\t4,\t\t0,\t\t0.0022369286703601107,\t\t0.32120104149338397,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.840999999999998\t\t],\n\t\t[119,\t\t163,\t\t0,\t\t0.003535145429362881,\t\t0.12690306230914922,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.419\t\t],\n\t\t[210,\t\t3,\t\t0,\t\t0.0003150969529085873,\t\t0.011311208844832242,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.82\t\t],\n\t\t[99,\t\t211,\t\t0,\t\t0.0035045013850415513,\t\t0.1258030161741948,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.242\t\t],\n\t\t[99,\t\t69,\t\t0,\t\t0.021717970914127423,\t\t0.7796219621557,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t125.443\t\t],\n\t\t[212,\t\t99,\t\t0,\t\t0.008453774238227147,\t\t0.30346978938770003,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.82899999999999\t\t],\n\t\t[213,\t\t214,\t\t0,\t\t0.01490115702479339,\t\t0.15764073118032798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t45.076\t\t],\n\t\t[510,\t\t215,\t\t0,\t\t0.002174710743801653,\t\t0.09202587186721281,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t13.157\t\t],\n\t\t[128,\t\t69,\t\t0,\t\t0.010711651662049862,\t\t1.538088234801848,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t123.741\t\t],\n\t\t[216,\t\t69,\t\t0,\t\t0.009628462603878117,\t\t1.3825528982351443,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t111.228\t\t],\n\t\t[217,\t\t98,\t\t0,\t\t0.0012787396121883656,\t\t0.045903620070299994,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.386\t\t],\n\t\t[504,\t\t218,\t\t0,\t\t0.027480991735537193,\t\t0.072680994226412,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.565\t\t],\n\t\t[177,\t\t504,\t\t0,\t\t0.07054809917355372,\t\t0.18658373169634002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t106.704\t\t],\n\t\t[219,\t\t209,\t\t0,\t\t0.003938798476454294,\t\t0.5655728721401839,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t45.501000000000005\t\t],\n\t\t[219,\t\t220,\t\t0,\t\t0.0013026315789473684,\t\t0.1870451326342096,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t15.048\t\t],\n\t\t[94,\t\t95,\t\t0,\t\t0.01070740997229917,\t\t0.38436979242743197,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.846000000000004\t\t],\n\t\t[159,\t\t221,\t\t0,\t\t0.009937153739612188,\t\t0.356719480257712,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.397\t\t],\n\t\t[34,\t\t161,\t\t0,\t\t0.010965289256198347,\t\t0.116002818645824,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.17\t\t],\n\t\t[222,\t\t221,\t\t0,\t\t0.0046457756232686975,\t\t0.16677196601221997,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.834\t\t],\n\t\t[211,\t\t52,\t\t0,\t\t0.05267313019390582,\t\t0.472709090515552,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t152.12\t\t],\n\t\t[215,\t\t223,\t\t0,\t\t0.04873190082644628,\t\t0.128884831985184,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.707\t\t],\n\t\t[224,\t\t215,\t\t0,\t\t0.019086280991735535,\t\t0.050478887076288004,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.868000000000002\t\t],\n\t\t[225,\t\t224,\t\t0,\t\t0.04200925619834711,\t\t0.11110496071615601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.538999999999994\t\t],\n\t\t[224,\t\t223,\t\t0,\t\t0.031061818181818183,\t\t0.082151468537468,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.981\t\t],\n\t\t[226,\t\t6,\t\t0,\t\t0.06420099173553719,\t\t0.0424492677936932,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.552\t\t],\n\t\t[7,\t\t3,\t\t0,\t\t0.009332929362880887,\t\t0.335029305054692,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t53.907\t\t],\n\t\t[216,\t\t227,\t\t0,\t\t0.01989941135734072,\t\t0.7143401282507,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.939\t\t],\n\t\t[228,\t\t229,\t\t0,\t\t0.010545454545454545,\t\t0.027890337012274,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.95\t\t],\n\t\t[227,\t\t230,\t\t0,\t\t0.003993074792243767,\t\t0.573366419334696,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t46.128\t\t],\n\t\t[231,\t\t53,\t\t0,\t\t0.007193213296398893,\t\t1.0328749562310842,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t83.096\t\t],\n\t\t[544,\t\t545,\t\t0,\t\t0.013061818181818181,\t\t0.034545548464856,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.756\t\t],\n\t\t[234,\t\t235,\t\t0,\t\t0.04608859504132231,\t\t0.121893887321888,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t69.709\t\t],\n\t\t[546,\t\t214,\t\t0,\t\t0.057025454545454546,\t\t0.15081940173295602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.251\t\t],\n\t\t[233,\t\t227,\t\t0,\t\t0.0029001038781163438,\t\t0.1041066260218888,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.750999999999998\t\t],\n\t\t[237,\t\t238,\t\t0,\t\t0.026324628099173554,\t\t0.06962267451304,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.816\t\t],\n\t\t[212,\t\t100,\t\t0,\t\t0.007955505540166205,\t\t0.285583163531816,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.951\t\t],\n\t\t[519,\t\t239,\t\t0,\t\t0.01740429752066116,\t\t0.046030422038308406,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.324\t\t],\n\t\t[238,\t\t519,\t\t0,\t\t0.015166280991735538,\t\t0.040111375593995205,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.939\t\t],\n\t\t[213,\t\t240,\t\t0,\t\t0.01665388429752066,\t\t0.04404574915373599,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.189\t\t],\n\t\t[241,\t\t242,\t\t0,\t\t0.009862015235457064,\t\t0.3540221919932281,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.963\t\t],\n\t\t[70,\t\t241,\t\t0,\t\t0.003819858033240997,\t\t0.5484941897752321,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t44.126999999999995\t\t],\n\t\t[509,\t\t213,\t\t0,\t\t0.011363636363636364,\t\t0.120216969880216,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t34.375\t\t],\n\t\t[68,\t\t243,\t\t0,\t\t0.003611668975069252,\t\t0.1296500701715312,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.861\t\t],\n\t\t[243,\t\t244,\t\t0,\t\t0.0007699099722991691,\t\t0.027637882270859202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.447\t\t],\n\t\t[68,\t\t244,\t\t0,\t\t0.004104051246537396,\t\t0.147325387728876,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.705\t\t],\n\t\t[544,\t\t547,\t\t0,\t\t0.02418776859504132,\t\t0.255884661882476,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.168\t\t],\n\t\t[245,\t\t227,\t\t0,\t\t0.012676419667590028,\t\t0.45505241780707606,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.219\t\t],\n\t\t[246,\t\t208,\t\t0,\t\t0.0010155817174515235,\t\t0.0364568961999408,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.8660000000000005\t\t],\n\t\t[112,\t\t208,\t\t0,\t\t0.0017927631578947367,\t\t0.0643558063672372,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.355\t\t],\n\t\t[165,\t\t247,\t\t0,\t\t0.0002113919667590028,\t\t0.0075884538459086,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.2209999999999999\t\t],\n\t\t[537,\t\t549,\t\t0,\t\t0.00032066115702479337,\t\t0.00084807607842936,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.485\t\t],\n\t\t[537,\t\t550,\t\t0,\t\t0.00032198347107438016,\t\t0.0008515732993697601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.48700000000000004\t\t],\n\t\t[537,\t\t551,\t\t0,\t\t0.0002651239669421488,\t\t0.0007011927988648,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.401\t\t],\n\t\t[110,\t\t251,\t\t0,\t\t0.00023857340720221602,\t\t0.008564200982522441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.3780000000000001\t\t],\n\t\t[510,\t\t252,\t\t0,\t\t0.08467702479338843,\t\t0.055987884365424005,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.03699999999999\t\t],\n\t\t[529,\t\t253,\t\t0,\t\t0.04859504132231405,\t\t0.12852286961777998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.5\t\t],\n\t\t[237,\t\t239,\t\t0,\t\t0.03309421487603306,\t\t0.08752669712542799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.055\t\t],\n\t\t[254,\t\t238,\t\t0,\t\t0.07815008264462811,\t\t0.05167231372274401,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t59.101000000000006\t\t],\n\t\t[69,\t\t255,\t\t0,\t\t0.0009369806094182826,\t\t0.134541235754472,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t10.824000000000002\t\t],\n\t\t[510,\t\t225,\t\t0,\t\t0.021953719008264466,\t\t0.232250442756508,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t66.41\t\t],\n\t\t[256,\t\t257,\t\t0,\t\t0.010125619834710746,\t\t0.0267799693631888,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.315\t\t],\n\t\t[258,\t\t190,\t\t0,\t\t0.011717451523545707,\t\t0.10515695255750121,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.84\t\t],\n\t\t[258,\t\t259,\t\t0,\t\t0.015782548476454293,\t\t0.1416387085570408,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.58\t\t],\n\t\t[260,\t\t261,\t\t0,\t\t0.006791031855955679,\t\t0.9751256416231477,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t78.45\t\t],\n\t\t[554,\t\t553,\t\t0,\t\t0.17583338842975205,\t\t0.11625986438453201,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t132.974\t\t],\n\t\t[515,\t\t263,\t\t0,\t\t0.006987107438016529,\t\t0.0739172618295936,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t21.136\t\t],\n\t\t[14,\t\t264,\t\t0,\t\t0.01700694214876033,\t\t0.17991802858084,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.446000000000005\t\t],\n\t\t[116,\t\t555,\t\t0,\t\t0.0009768595041322315,\t\t0.0103342878835768,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.955\t\t],\n\t\t[151,\t\t116,\t\t0,\t\t0.007244958677685951,\t\t0.0191612735410668,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.958\t\t],\n\t\t[111,\t\t114,\t\t0,\t\t0.008806613573407202,\t\t0.3161358573133961,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.867\t\t],\n\t\t[77,\t\t111,\t\t0,\t\t0.00288452216066482,\t\t0.41418912211817605,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t33.321999999999996\t\t],\n\t\t[266,\t\t525,\t\t0,\t\t0.01042909090909091,\t\t0.027582581569373602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.774000000000001\t\t],\n\t\t[267,\t\t120,\t\t0,\t\t0.013136945983379503,\t\t0.471584184581432,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t75.87899999999999\t\t],\n\t\t[268,\t\t269,\t\t0,\t\t0.0010327272727272726,\t\t0.0027313295556817604,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.5619999999999998\t\t],\n\t\t[556,\t\t271,\t\t0,\t\t0.052289586776859506,\t\t0.0345735262323792,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.544000000000004\t\t],\n\t\t[556,\t\t272,\t\t0,\t\t0.04685355371900827,\t\t0.030979257409249603,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.433\t\t],\n\t\t[529,\t\t273,\t\t0,\t\t0.0034604958677685953,\t\t0.009152227205140799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.234\t\t],\n\t\t[128,\t\t274,\t\t0,\t\t0.0029350761772853184,\t\t0.1053620459045884,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.953\t\t],\n\t\t[34,\t\t275,\t\t0,\t\t0.0008290909090909092,\t\t0.00054818938265696,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.627\t\t],\n\t\t[503,\t\t276,\t\t0,\t\t0.006707438016528925,\t\t0.07095861291266,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t20.29\t\t],\n\t\t[503,\t\t504,\t\t0,\t\t0.06432727272727272,\t\t0.680524223098808,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t194.59\t\t],\n\t\t[177,\t\t218,\t\t0,\t\t0.04330380165289256,\t\t0.114528740018308,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t65.497\t\t],\n\t\t[277,\t\t278,\t\t0,\t\t0.007191135734072023,\t\t1.032576638635032,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t83.072\t\t],\n\t\t[557,\t\t558,\t\t0,\t\t0.04341289256198347,\t\t0.258338836678648,\t\t743.0,\t\t743.0,\t\t743.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t98.493\t\t],\n\t\t[557,\t\t559,\t\t0,\t\t0.03415867768595042,\t\t0.09034195998366001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.665\t\t],\n\t\t[559,\t\t558,\t\t0,\t\t0.04474314049586777,\t\t0.11833546501370001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.67399999999999\t\t],\n\t\t[277,\t\t78,\t\t0,\t\t0.03585768698060942,\t\t0.32180078416049196,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t103.557\t\t],\n\t\t[277,\t\t279,\t\t0,\t\t0.021390927977839334,\t\t0.191970480441328,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.777\t\t],\n\t\t[78,\t\t279,\t\t0,\t\t0.015811980609418283,\t\t0.1419028439283376,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.665\t\t],\n\t\t[281,\t\t282,\t\t0,\t\t0.0023178670360110803,\t\t0.08320574945862161,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.388\t\t],\n\t\t[283,\t\t161,\t\t0,\t\t0.036741157024793386,\t\t0.09717203248350399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t55.571000000000005\t\t],\n\t\t[268,\t\t161,\t\t0,\t\t0.018883636363636366,\t\t0.199771751868832,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t57.123000000000005\t\t],\n\t\t[256,\t\t284,\t\t0,\t\t0.010755371900826446,\t\t0.113782083346976,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t32.535\t\t],\n\t\t[515,\t\t516,\t\t0,\t\t0.04071140495867769,\t\t0.107672438361532,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.576\t\t],\n\t\t[263,\t\t516,\t\t0,\t\t0.0030355371900826445,\t\t0.128452925198488,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t18.365\t\t],\n\t\t[516,\t\t285,\t\t0,\t\t0.006908429752066116,\t\t0.018271230811372,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.449000000000002\t\t],\n\t\t[63,\t\t286,\t\t0,\t\t0.019088925619834708,\t\t0.050485881518556,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.872\t\t],\n\t\t[287,\t\t516,\t\t0,\t\t0.01732892561983471,\t\t0.011457770111127998,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.105\t\t],\n\t\t[8,\t\t102,\t\t0,\t\t0.015100069252077563,\t\t0.542055501663692,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t87.21799999999999\t\t],\n\t\t[8,\t\t101,\t\t0,\t\t0.019246883656509697,\t\t0.69091598202144,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t111.17\t\t],\n\t\t[80,\t\t288,\t\t0,\t\t0.007984072022160666,\t\t0.2866086302684072,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t46.11600000000001\t\t],\n\t\t[80,\t\t289,\t\t0,\t\t0.0003782317636201524,\t\t0.122198345223416,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t6.553999999999999\t\t],\n\t\t[276,\t\t560,\t\t0,\t\t0.01778314049586777,\t\t0.047032375838192794,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.897\t\t],\n\t\t[37,\t\t290,\t\t0,\t\t0.005629501385041551,\t\t0.4546919507138321,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t48.773999999999994\t\t],\n\t\t[290,\t\t74,\t\t0,\t\t0.02071595106187673,\t\t1.673216783321968,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t179.483\t\t],\n\t\t[512,\t\t291,\t\t0,\t\t0.0053299173553719,\t\t0.056385693247479204,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t16.123\t\t],\n\t\t[78,\t\t292,\t\t0,\t\t0.0058149815327908595,\t\t0.469673087481408,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t50.381\t\t],\n\t\t[199,\t\t548,\t\t0,\t\t0.0015530578512396695,\t\t0.00410748599634868,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.349\t\t],\n\t\t[491,\t\t293,\t\t0,\t\t0.014176528925619833,\t\t0.009373426429729999,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.720999999999998\t\t],\n\t\t[4,\t\t294,\t\t0,\t\t9.669321329639889e-05,\t\t0.013884198109531681,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.117\t\t],\n\t\t[490,\t\t541,\t\t0,\t\t0.050580495867768596,\t\t0.133773946861896,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.503\t\t],\n\t\t[491,\t\t295,\t\t0,\t\t0.010613553719008264,\t\t0.028070443890777202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.053\t\t],\n\t\t[491,\t\t296,\t\t0,\t\t0.004400661157024794,\t\t0.0116387512948784,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.656000000000001\t\t],\n\t\t[295,\t\t297,\t\t0,\t\t0.020297520661157024,\t\t0.053682341459340005,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.7\t\t],\n\t\t[508,\t\t161,\t\t0,\t\t0.023239669421487603,\t\t0.061463658055360006,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.15\t\t],\n\t\t[117,\t\t123,\t\t0,\t\t0.005876211911357341,\t\t0.21094161505628,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.941\t\t],\n\t\t[133,\t\t117,\t\t0,\t\t0.004469182825484764,\t\t0.0401081792747688,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.907\t\t],\n\t\t[71,\t\t74,\t\t0,\t\t0.03904524469065097,\t\t0.7884161162841721,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t169.144\t\t],\n\t\t[74,\t\t278,\t\t0,\t\t0.0077122576177285325,\t\t1.10740463560792,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t89.09200000000001\t\t],\n\t\t[298,\t\t515,\t\t0,\t\t0.021701157024793388,\t\t0.05739464148919599,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.823\t\t],\n\t\t[5,\t\t299,\t\t0,\t\t0.0016232686980609415,\t\t0.058271370400665996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.376\t\t],\n\t\t[32,\t\t292,\t\t0,\t\t0.009679362880886427,\t\t0.34746541983297996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.908\t\t],\n\t\t[5,\t\t29,\t\t0,\t\t0.00743395083102493,\t\t1.0674425076571843,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t85.87700000000001\t\t],\n\t\t[503,\t\t560,\t\t0,\t\t0.015140495867768593,\t\t0.160172719142436,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.8\t\t],\n\t\t[300,\t\t301,\t\t0,\t\t0.004892053324099723,\t\t0.7024509290644521,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.513000000000005\t\t],\n\t\t[51,\t\t300,\t\t0,\t\t0.002573493767313019,\t\t0.3695284920307039,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.729\t\t],\n\t\t[244,\t\t302,\t\t0,\t\t0.007714508310249307,\t\t1.107727813004004,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t89.118\t\t],\n\t\t[31,\t\t302,\t\t0,\t\t0.004369113573407203,\t\t0.6273619041941161,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.472\t\t],\n\t\t[51,\t\t282,\t\t0,\t\t0.006288434903047093,\t\t0.9029576432132521,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.64399999999999\t\t],\n\t\t[303,\t\t304,\t\t0,\t\t8.795013850415512e-05,\t\t0.000789298639172312,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.254\t\t],\n\t\t[305,\t\t304,\t\t0,\t\t0.003881117266849031,\t\t0.0783689646873844,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.813\t\t],\n\t\t[305,\t\t259,\t\t0,\t\t0.0025625,\t\t0.36794989475177603,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t29.601999999999997\t\t],\n\t\t[306,\t\t307,\t\t0,\t\t0.03223268698060942,\t\t0.289268628831688,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t93.088\t\t],\n\t\t[305,\t\t308,\t\t0,\t\t0.0024272853185595567,\t\t0.0217833994511184,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.01\t\t],\n\t\t[305,\t\t309,\t\t0,\t\t0.011014773776523545,\t\t0.22241441259921202,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.716\t\t],\n\t\t[310,\t\t309,\t\t0,\t\t0.009565962603878117,\t\t0.343394627639832,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.253\t\t],\n\t\t[306,\t\t309,\t\t0,\t\t0.035333795013850415,\t\t0.31709917455019604,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t102.044\t\t],\n\t\t[311,\t\t280,\t\t0,\t\t0.003433691135734072,\t\t0.1232611016590444,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.833\t\t],\n\t\t[280,\t\t278,\t\t0,\t\t0.009749769159764544,\t\t0.7874838737974121,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t84.47200000000001\t\t],\n\t\t[311,\t\t32,\t\t0,\t\t0.01205909510619806,\t\t0.9740069506375919,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t104.48\t\t],\n\t\t[13,\t\t312,\t\t0,\t\t0.0043324965373961214,\t\t0.622104056565324,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.049\t\t],\n\t\t[313,\t\t314,\t\t0,\t\t0.006092624653739613,\t\t0.218710302449316,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.191\t\t],\n\t\t[312,\t\t313,\t\t0,\t\t0.00893957756232687,\t\t0.32090893884734,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.635\t\t],\n\t\t[547,\t\t566,\t\t0,\t\t0.027035702479338848,\t\t0.286013220297816,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.783\t\t],\n\t\t[245,\t\t315,\t\t0,\t\t0.014162569252077564,\t\t0.508401547875772,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.803\t\t],\n\t\t[312,\t\t316,\t\t0,\t\t8.803670360110802e-05,\t\t0.01264120812658816,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.0170000000000001\t\t],\n\t\t[312,\t\t314,\t\t0,\t\t0.005339854570637119,\t\t0.191687700220296,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.843000000000004\t\t],\n\t\t[554,\t\t546,\t\t0,\t\t0.08174743801652892,\t\t0.21620344446439202,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t123.64299999999999\t\t],\n\t\t[262,\t\t216,\t\t0,\t\t0.042641966759002774,\t\t0.38268554099981195,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t123.15\t\t],\n\t\t[317,\t\t233,\t\t0,\t\t0.005647276084951523,\t\t0.114031901035644,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.464000000000002\t\t],\n\t\t[318,\t\t317,\t\t0,\t\t0.008311634349030471,\t\t0.16783161497270002,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t36.006\t\t],\n\t\t[231,\t\t52,\t\t0,\t\t0.035263677285318554,\t\t1.2658796434850879,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t203.683\t\t],\n\t\t[319,\t\t567,\t\t0,\t\t0.006089586776859504,\t\t0.0644223069721,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.421\t\t],\n\t\t[557,\t\t321,\t\t0,\t\t0.010004628099173555,\t\t0.10583989458750401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.264\t\t],\n\t\t[277,\t\t65,\t\t0,\t\t0.009430170821779778,\t\t0.7616700793261759,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t81.703\t\t],\n\t\t[322,\t\t288,\t\t0,\t\t0.006545013850415513,\t\t0.528637424797136,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.706\t\t],\n\t\t[322,\t\t323,\t\t0,\t\t0.0018503000923372577,\t\t0.14944779312484,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t16.031\t\t],\n\t\t[277,\t\t324,\t\t0,\t\t0.019719529085872576,\t\t0.39818407235049996,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t85.425\t\t],\n\t\t[324,\t\t325,\t\t0,\t\t0.01103508771932133,\t\t0.22282459929396403,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.803999999999995\t\t],\n\t\t[277,\t\t325,\t\t0,\t\t0.008665743305609418,\t\t0.174981914850048,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.54\t\t],\n\t\t[326,\t\t327,\t\t0,\t\t0.007654214876033058,\t\t0.0202436634226288,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.577\t\t],\n\t\t[328,\t\t326,\t\t0,\t\t0.10300958677685952,\t\t0.068109252150368,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.90100000000001\t\t],\n\t\t[328,\t\t327,\t\t0,\t\t0.09827173553719008,\t\t0.064976616491468,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t74.318\t\t],\n\t\t[326,\t\t329,\t\t0,\t\t0.028062148760330575,\t\t0.07421802283046801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.443999999999996\t\t],\n\t\t[568,\t\t329,\t\t0,\t\t0.05699900826446282,\t\t0.15074945731414802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.211\t\t],\n\t\t[568,\t\t326,\t\t0,\t\t0.03218644628099173,\t\t0.08512585494846397,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.681999999999995\t\t],\n\t\t[332,\t\t78,\t\t0,\t\t0.006471029547541551,\t\t0.522661750455416,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t56.065\t\t],\n\t\t[333,\t\t306,\t\t0,\t\t0.008580159279778392,\t\t0.308006702824228,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.559\t\t],\n\t\t[332,\t\t333,\t\t0,\t\t0.007504674515235457,\t\t0.26939943395502003,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.347\t\t],\n\t\t[332,\t\t334,\t\t0,\t\t0.017124653739612188,\t\t0.15368328149175597,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.456\t\t],\n\t\t[66,\t\t334,\t\t0,\t\t0.030625,\t\t0.27484062260471603,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t88.445\t\t],\n\t\t[330,\t\t335,\t\t0,\t\t0.00550536703601108,\t\t0.790516769355108,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t63.598\t\t],\n\t\t[336,\t\t66,\t\t0,\t\t0.015054362880886425,\t\t0.1351036887216764,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t43.477\t\t],\n\t\t[330,\t\t336,\t\t0,\t\t0.039036357340720224,\t\t0.350327404269788,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t112.73700000000001\t\t],\n\t\t[68,\t\t70,\t\t0,\t\t0.016314058171745152,\t\t0.14640868261713597,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.115\t\t],\n\t\t[509,\t\t337,\t\t0,\t\t0.03494082644628099,\t\t0.09241056617056001,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t52.848\t\t],\n\t\t[324,\t\t288,\t\t0,\t\t0.012627423822714683,\t\t0.11332339674541761,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t36.468\t\t],\n\t\t[338,\t\t559,\t\t0,\t\t0.009228099173553718,\t\t0.097624922595552,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t27.915\t\t],\n\t\t[339,\t\t559,\t\t0,\t\t0.03560595041322315,\t\t0.023542417076125203,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.927\t\t],\n\t\t[339,\t\t340,\t\t0,\t\t0.08711537190082644,\t\t0.23040041287850396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t131.762\t\t],\n\t\t[559,\t\t340,\t\t0,\t\t0.20983272727272728,\t\t0.138740000599684,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t158.686\t\t],\n\t\t[341,\t\t292,\t\t0,\t\t0.0009329409048961218,\t\t0.07535316024134399,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.083\t\t],\n\t\t[557,\t\t342,\t\t0,\t\t0.006019834710743802,\t\t0.0636843933534336,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t18.21\t\t],\n\t\t[558,\t\t343,\t\t0,\t\t0.010650247933884296,\t\t0.11266996708783199,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.217\t\t],\n\t\t[502,\t\t340,\t\t0,\t\t0.021737520661157025,\t\t0.22996326026071198,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t65.756\t\t],\n\t\t[72,\t\t32,\t\t0,\t\t0.00675502077562327,\t\t0.969954803293024,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t78.03399999999999\t\t],\n\t\t[344,\t\t345,\t\t0,\t\t0.0005762927054480609,\t\t0.04654686738645321,\t\t2567.0,\t\t2567.0,\t\t2567.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.993\t\t],\n\t\t[346,\t\t47,\t\t0,\t\t0.0011340027700831024,\t\t0.04070792194158799,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.55\t\t],\n\t\t[46,\t\t47,\t\t0,\t\t0.0008975069252077563,\t\t0.0322183003580208,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.184\t\t],\n\t\t[346,\t\t345,\t\t0,\t\t0.0007217797783933517,\t\t0.025910126194627202,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.169\t\t],\n\t\t[347,\t\t328,\t\t0,\t\t0.029905454545454544,\t\t0.07909314882361201,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.232\t\t],\n\t\t[347,\t\t348,\t\t0,\t\t0.04883438016528925,\t\t0.129155866607944,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t73.862\t\t],\n\t\t[571,\t\t348,\t\t0,\t\t0.041548429752066116,\t\t0.10988617921762801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t62.842\t\t],\n\t\t[347,\t\t572,\t\t0,\t\t0.016052231404958678,\t\t0.04245451362512801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.279\t\t],\n\t\t[571,\t\t570,\t\t0,\t\t0.17379041322314048,\t\t0.11490906279551602,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t131.429\t\t],\n\t\t[14,\t\t350,\t\t0,\t\t0.02166743801652892,\t\t0.05730546235524,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.772\t\t],\n\t\t[350,\t\t573,\t\t0,\t\t0.026277685950413226,\t\t0.06949852316919598,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.745\t\t],\n\t\t[15,\t\t351,\t\t0,\t\t0.02639265927977839,\t\t0.236857956201204,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.222\t\t],\n\t\t[352,\t\t15,\t\t0,\t\t0.0015260560941828254,\t\t0.219126704094076,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t17.629\t\t],\n\t\t[15,\t\t335,\t\t0,\t\t0.0035338758079432133,\t\t1.1417173740880242,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.235\t\t],\n\t\t[232,\t\t227,\t\t0,\t\t5.5747922437673134e-05,\t\t0.000500303468136644,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.161\t\t],\n\t\t[565,\t\t544,\t\t0,\t\t0.0394803305785124,\t\t0.10441652566461601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t59.714\t\t],\n\t\t[235,\t\t567,\t\t0,\t\t0.02391404958677686,\t\t0.25298896294275997,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t72.34\t\t],\n\t\t[567,\t\t286,\t\t0,\t\t0.008068760330578512,\t\t0.34144067500694797,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.816\t\t],\n\t\t[353,\t\t519,\t\t0,\t\t0.007621818181818182,\t\t0.080631926038356,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.055999999999997\t\t],\n\t\t[354,\t\t353,\t\t0,\t\t0.0008436363636363636,\t\t0.00892490784392768,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.552\t\t],\n\t\t[355,\t\t354,\t\t0,\t\t0.0068502479338842966,\t\t0.0181173530898976,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.360999999999999\t\t],\n\t\t[354,\t\t356,\t\t0,\t\t0.01855404958677686,\t\t0.049071255647172,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.063000000000002\t\t],\n\t\t[357,\t\t358,\t\t0,\t\t0.0034823407202216067,\t\t0.5000300103406239,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.228\t\t],\n\t\t[574,\t\t359,\t\t0,\t\t0.013352066115702478,\t\t0.0353131884615884,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.195\t\t],\n\t\t[235,\t\t575,\t\t0,\t\t0.007459504132231404,\t\t0.0789147905557,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.565\t\t],\n\t\t[167,\t\t361,\t\t0,\t\t0.000616198347107438,\t\t0.0065188198358579995,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.864\t\t],\n\t\t[528,\t\t362,\t\t0,\t\t0.0011960330578512398,\t\t0.012652945368078402,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.6180000000000003\t\t],\n\t\t[363,\t\t344,\t\t0,\t\t0.0002662742382271468,\t\t0.009558592968871479,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.538\t\t],\n\t\t[259,\t\t364,\t\t0,\t\t0.013069713758102496,\t\t0.26390852570525997,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t56.618\t\t],\n\t\t[54,\t\t56,\t\t0,\t\t0.007723337950138504,\t\t0.0693122289241068,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.305\t\t],\n\t\t[365,\t\t364,\t\t0,\t\t0.0049974607571537395,\t\t0.10091058802821559,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.649\t\t],\n\t\t[231,\t\t366,\t\t0,\t\t0.0013273891966759002,\t\t0.0476500209962672,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t7.667000000000001\t\t],\n\t\t[30,\t\t367,\t\t0,\t\t0.01126108033240997,\t\t0.1010613005635992,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.522\t\t],\n\t\t[61,\t\t367,\t\t0,\t\t0.020337603878116343,\t\t0.18251754162067196,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t58.735\t\t],\n\t\t[254,\t\t368,\t\t0,\t\t0.0004297520661157025,\t\t0.00454638722456732,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.3\t\t],\n\t\t[254,\t\t369,\t\t0,\t\t0.00015999999999999999,\t\t0.00169265493591832,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.484\t\t],\n\t\t[254,\t\t370,\t\t0,\t\t0.0003669421487603306,\t\t0.0038819152455960805,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.11\t\t],\n\t\t[99,\t\t358,\t\t0,\t\t0.0020184383656509696,\t\t0.28982797432374396,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.316999999999997\t\t],\n\t\t[354,\t\t519,\t\t0,\t\t0.006762644628099174,\t\t0.07154264880985199,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.457\t\t],\n\t\t[571,\t\t371,\t\t0,\t\t0.023726942148760328,\t\t0.06275238397221199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.887\t\t],\n\t\t[207,\t\t372,\t\t0,\t\t0.002329256198347108,\t\t0.006160354689297601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.523\t\t],\n\t\t[57,\t\t373,\t\t0,\t\t0.0017725619834710745,\t\t0.0046880246727212796,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.681\t\t],\n\t\t[209,\t\t374,\t\t0,\t\t0.0010122922437673131,\t\t0.0363388121515216,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.847\t\t],\n\t\t[375,\t\t376,\t\t0,\t\t0.0045364727608518006,\t\t0.0916021467933684,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.652\t\t],\n\t\t[376,\t\t377,\t\t0,\t\t0.0030886426592797783,\t\t0.062367022394423606,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.38\t\t],\n\t\t[16,\t\t49,\t\t0,\t\t0.002266101108033241,\t\t0.32538991773524,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t26.178\t\t],\n\t\t[318,\t\t377,\t\t0,\t\t0.004755078485685596,\t\t0.0960163149704152,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.599\t\t],\n\t\t[378,\t\t297,\t\t0,\t\t0.01753917355371901,\t\t0.046387138574374404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.528000000000002\t\t],\n\t\t[562,\t\t379,\t\t0,\t\t0.01802314049586777,\t\t0.047667121439141605,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.26\t\t],\n\t\t[576,\t\t563,\t\t0,\t\t0.001808264462809917,\t\t0.004782449638150801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.735\t\t],\n\t\t[576,\t\t381,\t\t0,\t\t0.0034320661157024794,\t\t0.009077036954898,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.191\t\t],\n\t\t[577,\t\t576,\t\t0,\t\t0.06004495867768594,\t\t0.15880530575430396,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t90.818\t\t],\n\t\t[244,\t\t383,\t\t0,\t\t0.006845567867036011,\t\t0.1382282547912684,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.655\t\t],\n\t\t[244,\t\t306,\t\t0,\t\t0.02679108956599723,\t\t0.5409756541164079,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t116.059\t\t],\n\t\t[383,\t\t306,\t\t0,\t\t0.0300685595567867,\t\t0.269846910348376,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t86.838\t\t],\n\t\t[380,\t\t306,\t\t0,\t\t0.00025605955678670365,\t\t0.03676764369572,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t2.958\t\t],\n\t\t[252,\t\t225,\t\t0,\t\t0.062094545454545444,\t\t0.041056499553586,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.958999999999996\t\t],\n\t\t[220,\t\t76,\t\t0,\t\t0.002772074099722992,\t\t0.398042682239984,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t32.023\t\t],\n\t\t[542,\t\t384,\t\t0,\t\t0.007939834710743802,\t\t0.020999063146094,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.009\t\t],\n\t\t[385,\t\t384,\t\t0,\t\t0.053734876033057856,\t\t0.035529141854791196,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.637\t\t],\n\t\t[542,\t\t385,\t\t0,\t\t0.011306115702479337,\t\t0.119608453436296,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t34.201\t\t],\n\t\t[386,\t\t385,\t\t0,\t\t0.003668760330578512,\t\t0.0388121580140316,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.097999999999999\t\t],\n\t\t[387,\t\t578,\t\t0,\t\t0.015444628099173553,\t\t0.16339016240905604,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.72\t\t],\n\t\t[332,\t\t388,\t\t0,\t\t0.014036184210526315,\t\t0.5038646344377999,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t81.07300000000001\t\t],\n\t\t[382,\t\t332,\t\t0,\t\t0.017764369806094183,\t\t0.637697365901468,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t102.60700000000001\t\t],\n\t\t[382,\t\t388,\t\t0,\t\t0.00476159972299169,\t\t0.17092976750548,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.503\t\t],\n\t\t[579,\t\t578,\t\t0,\t\t0.01911074380165289,\t\t0.050543585664,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.905\t\t],\n\t\t[577,\t\t387,\t\t0,\t\t0.07597818181818182,\t\t0.20094506949431204,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t114.917\t\t],\n\t\t[144,\t\t390,\t\t0,\t\t0.0004277685950413223,\t\t0.0011313509747276,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.647\t\t],\n\t\t[37,\t\t49,\t\t0,\t\t0.008441481994459835,\t\t0.303028527944352,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t48.758\t\t],\n\t\t[391,\t\t233,\t\t0,\t\t0.014211218836565096,\t\t0.1275369872004348,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.042\t\t],\n\t\t[392,\t\t310,\t\t0,\t\t0.007035318559556785,\t\t0.06313767618386361,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.317999999999998\t\t],\n\t\t[260,\t\t393,\t\t0,\t\t0.006341412742382271,\t\t0.0569102963692744,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.314\t\t],\n\t\t[394,\t\t230,\t\t0,\t\t0.0007590027700831025,\t\t0.00681158510656168,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.1919999999999997\t\t],\n\t\t[395,\t\t282,\t\t0,\t\t0.008762984764542936,\t\t0.314569689934484,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.615\t\t],\n\t\t[395,\t\t244,\t\t0,\t\t0.0034046052631578946,\t\t0.12221699007344,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.665\t\t],\n\t\t[25,\t\t396,\t\t0,\t\t0.008809037396121884,\t\t0.316222866612064,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.881\t\t],\n\t\t[81,\t\t74,\t\t0,\t\t0.0075207756232686974,\t\t0.26997742429652244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t43.44\t\t],\n\t\t[278,\t\t80,\t\t0,\t\t0.016286011080332407,\t\t0.5846279085788,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t94.068\t\t],\n\t\t[81,\t\t278,\t\t0,\t\t0.021054016620498613,\t\t0.755787629231688,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t121.60799999999999\t\t],\n\t\t[569,\t\t570,\t\t0,\t\t0.03253950413223141,\t\t0.08605961294018,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t49.216\t\t],\n\t\t[397,\t\t552,\t\t0,\t\t0.006289586776859504,\t\t0.0166345314104904,\t\t1200.0,\t\t1200.0,\t\t1200.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.513\t\t],\n\t\t[542,\t\t398,\t\t0,\t\t0.0005580165289256199,\t\t0.0059033089500572,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.6880000000000002\t\t],\n\t\t[398,\t\t385,\t\t0,\t\t0.021893553719008262,\t\t0.05790348713648401,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.114000000000004\t\t],\n\t\t[399,\t\t499,\t\t0,\t\t0.03266380165289256,\t\t0.021597087927192803,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t24.701999999999998\t\t],\n\t\t[83,\t\t399,\t\t0,\t\t0.025700495867768593,\t\t0.016992996557050798,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.436\t\t],\n\t\t[498,\t\t400,\t\t0,\t\t0.012134214876033058,\t\t0.032092247974028,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.352999999999998\t\t],\n\t\t[518,\t\t239,\t\t0,\t\t0.04685289256198347,\t\t0.123915281026504,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t70.865\t\t],\n\t\t[575,\t\t543,\t\t0,\t\t0.0030307438016528923,\t\t0.032062521596058796,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.168\t\t],\n\t\t[401,\t\t360,\t\t0,\t\t0.007957063711911357,\t\t0.071409774520472,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.98\t\t],\n\t\t[580,\t\t581,\t\t0,\t\t0.007134545454545454,\t\t0.018869255592422397,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.790999999999999\t\t],\n\t\t[401,\t\t402,\t\t0,\t\t0.0033434903047091418,\t\t0.030005778188384805,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.656\t\t],\n\t\t[403,\t\t231,\t\t0,\t\t0.009592105263157893,\t\t0.08608327126915,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.701999999999998\t\t],\n\t\t[189,\t\t360,\t\t0,\t\t0.028456024930747923,\t\t0.255375399471348,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t82.181\t\t],\n\t\t[234,\t\t404,\t\t0,\t\t0.008092561983471074,\t\t0.0214029921648796,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.24\t\t],\n\t\t[235,\t\t404,\t\t0,\t\t0.05107504132231405,\t\t0.13508190749437998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t77.251\t\t],\n\t\t[235,\t\t580,\t\t0,\t\t0.000580495867768595,\t\t0.00153527999352772,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.878\t\t],\n\t\t[216,\t\t259,\t\t0,\t\t0.0022115650969529088,\t\t0.079389770210892,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t12.774000000000001\t\t],\n\t\t[405,\t\t259,\t\t0,\t\t0.0052832409972299165,\t\t0.1896554115982928,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.516\t\t],\n\t\t[405,\t\t318,\t\t0,\t\t0.0066348684210526315,\t\t0.23817552558268398,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t38.323\t\t],\n\t\t[406,\t\t230,\t\t0,\t\t8.098164819944598e-05,\t\t0.046512685161986804,\t\t6845.0,\t\t6845.0,\t\t6845.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.871\t\t],\n\t\t[542,\t\t407,\t\t0,\t\t0.025569586776859506,\t\t0.067625761355152,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.674\t\t],\n\t\t[23,\t\t408,\t\t0,\t\t0.03224528925619835,\t\t0.08528148128033601,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.771\t\t],\n\t\t[577,\t\t348,\t\t0,\t\t0.012999008264462809,\t\t0.13751772188026398,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.321999999999996\t\t],\n\t\t[562,\t\t564,\t\t0,\t\t0.06921520661157024,\t\t0.18305853298686803,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t104.68799999999999\t\t],\n\t\t[582,\t\t507,\t\t0,\t\t0.006357685950413223,\t\t0.016814638289042002,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.616\t\t],\n\t\t[27,\t\t410,\t\t0,\t\t0.0030042975206611565,\t\t0.007945685980170399,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.544\t\t],\n\t\t[501,\t\t27,\t\t0,\t\t0.003811570247933884,\t\t0.040322957460962,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.53\t\t],\n\t\t[27,\t\t411,\t\t0,\t\t0.004648595041322314,\t\t0.012294480221518,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.031000000000001\t\t],\n\t\t[411,\t\t410,\t\t0,\t\t0.002054214876033058,\t\t0.0054329327333556,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.1069999999999998\t\t],\n\t\t[403,\t\t360,\t\t0,\t\t0.008191481994459833,\t\t0.07351353506655639,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.656999999999996\t\t],\n\t\t[412,\t\t360,\t\t0,\t\t0.016761772853185596,\t\t0.15042664773666,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t48.408\t\t],\n\t\t[326,\t\t413,\t\t0,\t\t0.012077024793388432,\t\t0.12776397267356798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t36.533\t\t],\n\t\t[414,\t\t413,\t\t0,\t\t0.008093223140495867,\t\t0.08561896310149601,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t24.482\t\t],\n\t\t[6,\t\t297,\t\t0,\t\t0.019472396694214876,\t\t0.0128750188978664,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.725999999999999\t\t],\n\t\t[554,\t\t580,\t\t0,\t\t0.07435371900826447,\t\t0.196648733567264,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t112.46\t\t],\n\t\t[262,\t\t401,\t\t0,\t\t0.03931232686980609,\t\t0.35280406181043206,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t113.53399999999999\t\t],\n\t\t[499,\t\t556,\t\t0,\t\t0.04185586776859504,\t\t0.11069928308639199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t63.306999999999995\t\t],\n\t\t[224,\t\t229,\t\t0,\t\t0.004135206611570248,\t\t0.0437467367631624,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.509\t\t],\n\t\t[583,\t\t507,\t\t0,\t\t0.024632727272727268,\t\t0.065147980317596,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.257\t\t],\n\t\t[415,\t\t307,\t\t0,\t\t0.015675554016620498,\t\t0.1406784987952448,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t45.271\t\t],\n\t\t[416,\t\t507,\t\t0,\t\t0.0010555371900826446,\t\t0.011166626467730801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.193\t\t],\n\t\t[284,\t\t561,\t\t0,\t\t0.015221487603305786,\t\t0.16102953827307598,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t46.045\t\t],\n\t\t[543,\t\t417,\t\t0,\t\t0.0006614876033057851,\t\t0.027991756419545603,\t\t1981.0,\t\t1981.0,\t\t1981.0,\t\t0,\t\t4,\t\t1,\t\t-360,\t\t4.002\t\t],\n\t\t[418,\t\t506,\t\t0,\t\t0.0009395041322314049,\t\t0.009939101917118,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.842\t\t],\n\t\t[220,\t\t157,\t\t0,\t\t0.004599549861495845,\t\t0.165112574384632,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.566999999999997\t\t],\n\t\t[295,\t\t419,\t\t0,\t\t0.0012023140495867769,\t\t0.012719392565946,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.637\t\t],\n\t\t[295,\t\t420,\t\t0,\t\t0.0008003305785123967,\t\t0.008466771900532,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.421\t\t],\n\t\t[541,\t\t62,\t\t0,\t\t0.05133355371900827,\t\t0.0339414035471236,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.821\t\t],\n\t\t[52,\t\t421,\t\t0,\t\t0.00013885041551246538,\t\t0.004984389831631239,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.802\t\t],\n\t\t[60,\t\t160,\t\t0,\t\t6.128808864265928e-05,\t\t0.000550023067454096,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.177\t\t],\n\t\t[535,\t\t161,\t\t0,\t\t3.735537190082645e-05,\t\t0.00039518596644331203,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.113\t\t],\n\t\t[267,\t\t282,\t\t0,\t\t0.0065652700831024926,\t\t0.235677115717012,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.921\t\t],\n\t\t[52,\t\t365,\t\t0,\t\t0.007655586334279779,\t\t0.15458444922992,\t\t1283.0,\t\t1283.0,\t\t1283.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.164\t\t],\n\t\t[28,\t\t27,\t\t0,\t\t0.015726942148760328,\t\t0.041594197273402404,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.787\t\t],\n\t\t[30,\t\t201,\t\t0,\t\t0.009128289473684211,\t\t0.327683234253536,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t52.725\t\t],\n\t\t[422,\t\t81,\t\t0,\t\t0.0004226685133887349,\t\t0.13655487952674,\t\t5134.0,\t\t5134.0,\t\t5134.0,\t\t0,\t\t6,\t\t1,\t\t-360,\t\t7.324\t\t],\n\t\t[119,\t\t425,\t\t0,\t\t0.003579120498614958,\t\t0.1284816595874996,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t20.673000000000002\t\t],\n\t\t[423,\t\t425,\t\t0,\t\t0.0006518351800554017,\t\t0.0233992864289392,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.765\t\t],\n\t\t[424,\t\t425,\t\t0,\t\t0.005922957063711911,\t\t0.21261965153389198,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t34.211\t\t],\n\t\t[426,\t\t428,\t\t0,\t\t0.013948429752066116,\t\t0.14756174042535197,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t42.193999999999996\t\t],\n\t\t[427,\t\t428,\t\t0,\t\t0.0002664462809917355,\t\t0.0028187600792304794,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.8059999999999999\t\t],\n\t\t[19,\t\t428,\t\t0,\t\t0.023607603305785128,\t\t0.24974703912892798,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t71.413\t\t],\n\t\t[45,\t\t429,\t\t0,\t\t0.02562314049586777,\t\t0.067767398802972,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t38.755\t\t],\n\t\t[44,\t\t429,\t\t0,\t\t5.289256198347107e-05,\t\t0.00013988883767892,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.08\t\t],\n\t\t[505,\t\t429,\t\t0,\t\t0.006012561983471073,\t\t0.015901863623161996,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.094\t\t],\n\t\t[231,\t\t431,\t\t0,\t\t0.011677285318559558,\t\t0.4191859418495199,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t67.44800000000001\t\t],\n\t\t[190,\t\t431,\t\t0,\t\t0.009600761772853185,\t\t0.34464383257266795,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t55.45399999999999\t\t],\n\t\t[430,\t\t431,\t\t0,\t\t0.0028100761772853187,\t\t0.1008748520662472,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.230999999999998\t\t],\n\t\t[286,\t\t433,\t\t0,\t\t0.01568694214876033,\t\t0.16595362535967603,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t47.453\t\t],\n\t\t[432,\t\t433,\t\t0,\t\t0.00010049586776859504,\t\t0.00106315516636076,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.304\t\t],\n\t\t[506,\t\t433,\t\t0,\t\t0.0065904132231404955,\t\t0.06972059669946801,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t19.936\t\t],\n\t\t[23,\t\t434,\t\t0,\t\t0.02613685950413223,\t\t0.069126069139116,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t39.532\t\t],\n\t\t[400,\t\t434,\t\t0,\t\t0.008155371900826446,\t\t0.021569110159669603,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t12.335\t\t],\n\t\t[500,\t\t434,\t\t0,\t\t0.006338512396694216,\t\t0.0167639285853336,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t9.587\t\t],\n\t\t[32,\t\t436,\t\t0,\t\t0.0044813019390581715,\t\t0.16086776359270402,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t25.884\t\t],\n\t\t[435,\t\t436,\t\t0,\t\t0.0006634349030470914,\t\t0.023815688073266,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.832\t\t],\n\t\t[78,\t\t436,\t\t0,\t\t0.00897680055401662,\t\t0.32224515307884394,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t51.85\t\t],\n\t\t[86,\t\t438,\t\t0,\t\t0.014693213296398892,\t\t0.52745036936438,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t84.868\t\t],\n\t\t[437,\t\t438,\t\t0,\t\t1.0387811634349031e-05,\t\t0.0003728969948845,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.06\t\t],\n\t\t[221,\t\t438,\t\t0,\t\t0.002280124653739612,\t\t0.081850890377238,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.17\t\t],\n\t\t[207,\t\t439,\t\t0,\t\t0.055703801652892564,\t\t0.0368309823503996,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.126000000000005\t\t],\n\t\t[516,\t\t439,\t\t0,\t\t0.05448462809917355,\t\t0.03602487292327441,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t41.20399999999999\t\t],\n\t\t[513,\t\t439,\t\t0,\t\t0.046726611570247926,\t\t0.0308953241066316,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.336999999999996\t\t],\n\t\t[181,\t\t441,\t\t0,\t\t0.040805289256198356,\t\t0.10792074104825197,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.718\t\t],\n\t\t[440,\t\t441,\t\t0,\t\t0.0001322314049586777,\t\t0.000349722094197784,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.2\t\t],\n\t\t[504,\t\t441,\t\t0,\t\t0.05916099173553719,\t\t0.156467413554364,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t89.48100000000001\t\t],\n\t\t[135,\t\t442,\t\t0,\t\t0.004956890581717451,\t\t0.177940231009092,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.631\t\t],\n\t\t[109,\t\t442,\t\t0,\t\t0.0015380886426592797,\t\t0.055213615042649204,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.884\t\t],\n\t\t[112,\t\t442,\t\t0,\t\t0.0027304362880886425,\t\t0.09801597510545401,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t15.770999999999999\t\t],\n\t\t[113,\t\t443,\t\t0,\t\t0.0019885734072022164,\t\t0.07138491472072879,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.485999999999999\t\t],\n\t\t[132,\t\t443,\t\t0,\t\t0.006788434903047091,\t\t0.24368818615747198,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t39.21\t\t],\n\t\t[107,\t\t443,\t\t0,\t\t2.2333795013850418e-05,\t\t0.000801728539002036,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.129\t\t],\n\t\t[444,\t\t445,\t\t0,\t\t7.877423822714682e-05,\t\t0.00282780221121528,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.455\t\t],\n\t\t[112,\t\t445,\t\t0,\t\t0.002816135734072022,\t\t0.101092375313206,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.266\t\t],\n\t\t[109,\t\t445,\t\t0,\t\t0.0014354224376731304,\t\t0.0515281497432104,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.291\t\t],\n\t\t[119,\t\t447,\t\t0,\t\t0.005212690443213296,\t\t0.74849127803204,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t60.217\t\t],\n\t\t[100,\t\t447,\t\t0,\t\t0.0050695117728531865,\t\t0.7279322237145921,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t58.563\t\t],\n\t\t[446,\t\t447,\t\t0,\t\t2.9518698060941832e-05,\t\t0.00423859584186224,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t0.341\t\t],\n\t\t[124,\t\t448,\t\t0,\t\t6.509695290858726e-05,\t\t0.00233682116794768,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.376\t\t],\n\t\t[125,\t\t448,\t\t0,\t\t0.00615148891966759,\t\t0.22082338542026803,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t35.531\t\t],\n\t\t[131,\t\t448,\t\t0,\t\t3.912742382271468e-05,\t\t0.0014045786807313759,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.226\t\t],\n\t\t[449,\t\t450,\t\t0,\t\t0.0023614958448753462,\t\t0.08477191683710039,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.64\t\t],\n\t\t[173,\t\t450,\t\t0,\t\t0.002862361495844876,\t\t0.10275176694050518,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t16.533\t\t],\n\t\t[184,\t\t450,\t\t0,\t\t0.004022853185595568,\t\t0.14441057621844403,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t23.236\t\t],\n\t\t[144,\t\t451,\t\t0,\t\t0.007672727272727273,\t\t0.020292624515794402,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t11.605\t\t],\n\t\t[140,\t\t451,\t\t0,\t\t0.006991074380165291,\t\t0.018489807120219602,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.574000000000002\t\t],\n\t\t[514,\t\t451,\t\t0,\t\t0.01149289256198347,\t\t0.030396095817207994,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.383\t\t],\n\t\t[537,\t\t585,\t\t0,\t\t0.05072595041322314,\t\t0.134158641165824,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.723\t\t],\n\t\t[141,\t\t585,\t\t0,\t\t0.007994710743801653,\t\t0.0211441978151932,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.092\t\t],\n\t\t[584,\t\t585,\t\t0,\t\t9.256198347107438e-05,\t\t0.000244805465938352,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.14\t\t],\n\t\t[522,\t\t454,\t\t0,\t\t0.0035008264462809916,\t\t0.0092588924438956,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.295\t\t],\n\t\t[144,\t\t454,\t\t0,\t\t0.00452892561983471,\t\t0.011977981726290799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.85\t\t],\n\t\t[453,\t\t454,\t\t0,\t\t0.001114710743801653,\t\t0.0029481572540882,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.686\t\t],\n\t\t[199,\t\t456,\t\t0,\t\t0.013063140495867768,\t\t0.0086372614214612,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.879\t\t],\n\t\t[140,\t\t456,\t\t0,\t\t0.005061818181818182,\t\t0.013387361765852802,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t7.656000000000001\t\t],\n\t\t[455,\t\t456,\t\t0,\t\t0.0011365289256198346,\t\t0.00300586139962416,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t1.719\t\t],\n\t\t[537,\t\t456,\t\t0,\t\t0.039058512396694216,\t\t0.025825228046024003,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.538\t\t],\n\t\t[538,\t\t457,\t\t0,\t\t0.027927272727272728,\t\t0.0184653265736368,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.12\t\t],\n\t\t[153,\t\t457,\t\t0,\t\t0.030093223140495867,\t\t0.019897438549384,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t22.758000000000003\t\t],\n\t\t[176,\t\t457,\t\t0,\t\t0.004579173553719009,\t\t0.0030277190305137603,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.463\t\t],\n\t\t[524,\t\t459,\t\t0,\t\t0.004318677685950414,\t\t0.011421923596476799,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.532\t\t],\n\t\t[458,\t\t459,\t\t0,\t\t0.001993388429752066,\t\t0.0052720605700488,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.015\t\t],\n\t\t[134,\t\t459,\t\t0,\t\t0.011813553719008265,\t\t0.031244171895617998,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.868\t\t],\n\t\t[460,\t\t461,\t\t0,\t\t6.611570247933885e-05,\t\t0.000174861047098892,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.1\t\t],\n\t\t[150,\t\t461,\t\t0,\t\t0.008018512396694214,\t\t0.021207147792120403,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.128\t\t],\n\t\t[149,\t\t461,\t\t0,\t\t0.005586115702479339,\t\t0.0147740098693748,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.449\t\t],\n\t\t[521,\t\t463,\t\t0,\t\t0.014348429752066114,\t\t0.009487086110365599,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t10.850999999999999\t\t],\n\t\t[462,\t\t463,\t\t0,\t\t0.007197355371900825,\t\t0.0047588433967958406,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t5.443\t\t],\n\t\t[538,\t\t463,\t\t0,\t\t0.012211570247933883,\t\t0.0080742088497664,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t9.235\t\t],\n\t\t[110,\t\t464,\t\t0,\t\t0.0025753116343490306,\t\t0.0924473799817492,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.875\t\t],\n\t\t[90,\t\t464,\t\t0,\t\t0.007328947368421053,\t\t0.26309125979076,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.332\t\t],\n\t\t[165,\t\t464,\t\t0,\t\t0.002152527700831025,\t\t0.0772704722900764,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t12.433\t\t],\n\t\t[458,\t\t465,\t\t0,\t\t0.002003305785123967,\t\t0.0052982897270776,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t3.03\t\t],\n\t\t[134,\t\t465,\t\t0,\t\t0.011838677685950413,\t\t0.031310619093534,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.906\t\t],\n\t\t[524,\t\t465,\t\t0,\t\t0.004293553719008264,\t\t0.0113554763986092,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.494\t\t],\n\t\t[466,\t\t467,\t\t0,\t\t0.0023509349030470914,\t\t0.084392804892244,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.579\t\t],\n\t\t[110,\t\t467,\t\t0,\t\t0.0025337603878116343,\t\t0.09095579200221118,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t14.635\t\t],\n\t\t[165,\t\t467,\t\t0,\t\t0.0022891274238227145,\t\t0.08217406777274441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.222000000000001\t\t],\n\t\t[468,\t\t469,\t\t0,\t\t0.0005269421487603305,\t\t0.0013936425453786,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.797\t\t],\n\t\t[541,\t\t469,\t\t0,\t\t0.022390743801652895,\t\t0.05921844221026801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t33.866\t\t],\n\t\t[490,\t\t469,\t\t0,\t\t0.028243305785123966,\t\t0.07469714209944801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.718\t\t],\n\t\t[263,\t\t471,\t\t0,\t\t0.0371900826446281,\t\t0.0245898347482832,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.125\t\t],\n\t\t[470,\t\t471,\t\t0,\t\t0.001570909090909091,\t\t0.0010386746197682802,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.188\t\t],\n\t\t[534,\t\t471,\t\t0,\t\t0.024497190082644622,\t\t0.0161973787927468,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t18.526\t\t],\n\t\t[136,\t\t472,\t\t0,\t\t0.0007079293628808865,\t\t0.025412930201351602,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t4.0889999999999995\t\t],\n\t\t[110,\t\t472,\t\t0,\t\t0.00019511772853185596,\t\t0.0070042485539216805,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.127\t\t],\n\t\t[251,\t\t472,\t\t0,\t\t4.207063711911357e-05,\t\t0.00151023282928764,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.243\t\t],\n\t\t[226,\t\t474,\t\t0,\t\t0.017639669421487602,\t\t0.011663231841509601,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t13.34\t\t],\n\t\t[473,\t\t474,\t\t0,\t\t0.003467107438016529,\t\t0.00916971330986216,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t5.244\t\t],\n\t\t[257,\t\t474,\t\t0,\t\t0.020264462809917356,\t\t0.053594910935781594,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t2,\t\t1,\t\t-360,\t\t30.65\t\t],\n\t\t[6,\t\t474,\t\t0,\t\t0.08066247933884299,\t\t0.05333349367016,\t\t248.0,\t\t248.0,\t\t248.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t61.001000000000005\t\t],\n\t\t[299,\t\t475,\t\t0,\t\t0.013238227146814403,\t\t0.47521993028123993,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t76.464\t\t],\n\t\t[3,\t\t475,\t\t0,\t\t0.0002794321329639889,\t\t0.010030929162389441,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.614\t\t],\n\t\t[210,\t\t475,\t\t0,\t\t0.0001481994459833795,\t\t0.00531999712702368,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.856\t\t],\n\t\t[297,\t\t476,\t\t0,\t\t0.0193500826446281,\t\t0.05117658265464801,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t29.267\t\t],\n\t\t[296,\t\t476,\t\t0,\t\t0.005596694214876033,\t\t0.014801987636898,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t8.465\t\t],\n\t\t[295,\t\t476,\t\t0,\t\t0.0009474380165289256,\t\t0.00250575880492432,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.433\t\t],\n\t\t[313,\t\t478,\t\t0,\t\t0.008696849030470914,\t\t0.31219557906752804,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t50.233000000000004\t\t],\n\t\t[477,\t\t478,\t\t0,\t\t1.5235457063711912e-05,\t\t0.0005469155924977479,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.08800000000000001\t\t],\n\t\t[245,\t\t478,\t\t0,\t\t0.005264542936288089,\t\t0.188984197007248,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t30.408\t\t],\n\t\t[479,\t\t481,\t\t0,\t\t0.028420495867768597,\t\t0.07516576970575199,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t42.986000000000004\t\t],\n\t\t[565,\t\t481,\t\t0,\t\t0.024842314049586776,\t\t0.065702289836964,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t37.574\t\t],\n\t\t[480,\t\t481,\t\t0,\t\t7.735537190082645e-05,\t\t0.000204587425105844,\t\t495.0,\t\t495.0,\t\t495.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.11699999999999999\t\t],\n\t\t[415,\t\t482,\t\t0,\t\t0.011021814404432133,\t\t0.0989140353680364,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t31.831\t\t],\n\t\t[56,\t\t482,\t\t0,\t\t0.002630886426592798,\t\t0.0236105947261788,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t7.598\t\t],\n\t\t[409,\t\t482,\t\t0,\t\t0.0007635041551246537,\t\t0.0068519822810072005,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t2.205\t\t],\n\t\t[483,\t\t484,\t\t0,\t\t9.037396121883656e-05,\t\t0.000811050963873968,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.261\t\t],\n\t\t[3,\t\t484,\t\t0,\t\t0.010022160664819944,\t\t0.08994275516621358,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t28.944000000000003\t\t],\n\t\t[301,\t\t484,\t\t0,\t\t0.00966516620498615,\t\t0.08673894848517479,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t27.913\t\t],\n\t\t[233,\t\t485,\t\t0,\t\t0.01410180055401662,\t\t0.1265550251138996,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.726\t\t],\n\t\t[392,\t\t485,\t\t0,\t\t0.00914819944598338,\t\t0.0820994883738036,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t26.42\t\t],\n\t\t[391,\t\t485,\t\t0,\t\t8.518005540166207e-05,\t\t0.000764438839512864,\t\t856.0,\t\t856.0,\t\t856.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.24600000000000002\t\t],\n\t\t[579,\t\t488,\t\t0,\t\t0.004636473829194215,\t\t0.11036180126571601,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t21.038\t\t],\n\t\t[486,\t\t488,\t\t0,\t\t0.00016969696969690082,\t\t0.00403929018798184,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.77\t\t],\n\t\t[487,\t\t488,\t\t0,\t\t0.00014567493112954544,\t\t0.00346749456396992,\t\t1486.0,\t\t1486.0,\t\t1486.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.6609999999999999\t\t],\n\t\t[270,\t\t489,\t\t0,\t\t0.0001745152354570637,\t\t0.0062646695140596,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.008\t\t],\n\t\t[331,\t\t489,\t\t0,\t\t0.003002943213296399,\t\t0.10779830627119119,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t17.345\t\t],\n\t\t[396,\t\t489,\t\t0,\t\t0.01124792243767313,\t\t0.40377286606072005,\t\t1711.0,\t\t1711.0,\t\t1711.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t64.968\t\t],\n\t\t[519,\t\t253,\t\t0,\t\t0.013353485337561985,\t\t0.141267767926912,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t40.394293146100004\t\t],\n\t\t[382,\t\t349,\t\t0,\t\t0.009091647380263157,\t\t1.30547149138788,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t105.02671053600001\t\t],\n\t\t[349,\t\t351,\t\t0,\t\t0.0005858117819605263,\t\t0.0841168325920224,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t6.76729770521\t\t],\n\t\t[459,\t\t465,\t\t0,\t\t1.578788789911157e-05,\t\t0.00016702153987596,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.047758360894800005\t\t],\n\t\t[549,\t\t550,\t\t0,\t\t3.680432518409091e-05,\t\t0.000389356391787088,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.111333083682\t\t],\n\t\t[550,\t\t551,\t\t0,\t\t5.755645674710744e-05,\t\t0.0006088951287918401,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.17410828165999997\t\t],\n\t\t[194,\t\t195,\t\t0,\t\t1.7560672583171745e-05,\t\t0.00252154053805592,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.202860889681\t\t],\n\t\t[247,\t\t248,\t\t0,\t\t2.1755213937811637e-05,\t\t0.0031238355819477198,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.25131623141\t\t],\n\t\t[2,\t\t294,\t\t0,\t\t2.3531392658518004e-05,\t\t0.003378877444715,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.271834647991\t\t],\n\t\t[549,\t\t551,\t\t0,\t\t9.265809538429751e-05,\t\t0.0009802386406577602,\t\t991.0,\t\t991.0,\t\t991.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.28029073853799996\t\t],\n\t\t[54,\t\t365,\t\t0,\t\t2.573045189134349e-05,\t\t0.00369464080598484,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.297238180249\t\t],\n\t\t[131,\t\t265,\t\t0,\t\t2.7616389041343487e-05,\t\t0.00396544290388756,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.319024526206\t\t],\n\t\t[91,\t\t92,\t\t0,\t\t2.8945628197853184e-05,\t\t0.0041563086239824396,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.33437989694200004\t\t],\n\t\t[247,\t\t249,\t\t0,\t\t3.098840072160664e-05,\t\t0.00444963074500788,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.357978005136\t\t],\n\t\t[186,\t\t191,\t\t0,\t\t3.1591661821191135e-05,\t\t0.00453625312865552,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.36494687735799997\t\t],\n\t\t[129,\t\t173,\t\t0,\t\t3.202671277479225e-05,\t\t0.00459872218332188,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.369972585975\t\t],\n\t\t[96,\t\t202,\t\t0,\t\t3.5971247867797784e-05,\t\t0.00516511877739804,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.415539855369\t\t],\n\t\t[53,\t\t320,\t\t0,\t\t3.784209581142659e-05,\t\t0.00543375421308236,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.437151890814\t\t],\n\t\t[24,\t\t396,\t\t0,\t\t4.144748602818559e-05,\t\t0.005951452925597279,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.47880135859800005\t\t],\n\t\t[133,\t\t156,\t\t0,\t\t4.431754564044322e-05,\t\t0.0063635653674415605,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.511956287238\t\t],\n\t\t[442,\t\t452,\t\t0,\t\t4.483572190450138e-05,\t\t0.006437970402313801,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.517942259441\t\t],\n\t\t[445,\t\t452,\t\t0,\t\t4.490753296371191e-05,\t\t0.0064482817668697215,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.518771820797\t\t],\n\t\t[247,\t\t250,\t\t0,\t\t4.594910768732687e-05,\t\t0.00659784169268824,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.530804092004\t\t],\n\t\t[187,\t\t195,\t\t0,\t\t4.755760376239612e-05,\t\t0.006828805970367921,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.549385438663\t\t],\n\t\t[216,\t\t236,\t\t0,\t\t5.03353075283241e-05,\t\t0.00722765701751724,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.581473472567\t\t],\n\t\t[244,\t\t389,\t\t0,\t\t5.1633313019736845e-05,\t\t0.007414037889302401,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.596468032004\t\t],\n\t\t[394,\t\t406,\t\t0,\t\t5.6346419007686985e-05,\t\t0.008090793734075721,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.650913832377\t\t],\n\t\t[442,\t\t445,\t\t0,\t\t6.388070648310249e-05,\t\t0.00917264360085512,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.737949921293\t\t],\n\t\t[442,\t\t444,\t\t0,\t\t6.584378362735456e-05,\t\t0.00945452224616264,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.760627388463\t\t],\n\t\t[198,\t\t472,\t\t0,\t\t8.37554210498615e-05,\t\t0.0120264578966664,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.967542623967\t\t],\n\t\t[464,\t\t467,\t\t0,\t\t8.460287496468144e-05,\t\t0.01214814397621276,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t0.977332411594\t\t],\n\t\t[198,\t\t251,\t\t0,\t\t8.83613182396122e-05,\t\t0.012687819608389479,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.0207499483\t\t],\n\t\t[112,\t\t143,\t\t0,\t\t9.049653833033241e-05,\t\t0.012994416294241841,\t\t3423.0,\t\t3423.0,\t\t3423.0,\t\t0,\t\t1,\t\t1,\t\t-360,\t\t1.04541601079\t\t],\n\t\t[2,\t\t490,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[5,\t\t491,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[10,\t\t492,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[12,\t\t493,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[13,\t\t494,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[15,\t\t495,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[18,\t\t496,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[20,\t\t497,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[22,\t\t498,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[24,\t\t499,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[26,\t\t500,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[30,\t\t501,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[32,\t\t502,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[37,\t\t503,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[42,\t\t504,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[46,\t\t505,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[52,\t\t506,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[56,\t\t507,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[61,\t\t508,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[68,\t\t509,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[69,\t\t510,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[74,\t\t511,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[78,\t\t512,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[86,\t\t513,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[87,\t\t514,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[94,\t\t515,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[95,\t\t516,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[96,\t\t517,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[99,\t\t518,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[100,\t\t519,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[104,\t\t520,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[105,\t\t521,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[106,\t\t522,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[107,\t\t523,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[117,\t\t524,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[120,\t\t525,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[123,\t\t526,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[124,\t\t527,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[125,\t\t528,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[128,\t\t529,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[129,\t\t530,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[138,\t\t531,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[143,\t\t532,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[156,\t\t533,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[157,\t\t534,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[159,\t\t535,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[160,\t\t536,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[165,\t\t537,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[184,\t\t538,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[191,\t\t539,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[195,\t\t540,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[201,\t\t541,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[220,\t\t542,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[231,\t\t543,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[232,\t\t544,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[233,\t\t545,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[236,\t\t546,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[245,\t\t547,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[246,\t\t548,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[248,\t\t549,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[249,\t\t550,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[250,\t\t551,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[259,\t\t552,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[261,\t\t553,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[262,\t\t554,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[265,\t\t555,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[270,\t\t556,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[277,\t\t557,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[279,\t\t558,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[280,\t\t559,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[290,\t\t560,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[301,\t\t561,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[305,\t\t562,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[306,\t\t563,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[310,\t\t564,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[313,\t\t565,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[315,\t\t566,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[320,\t\t567,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[330,\t\t568,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[332,\t\t569,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[334,\t\t570,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[336,\t\t571,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[349,\t\t572,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[351,\t\t573,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[358,\t\t574,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[360,\t\t575,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[380,\t\t576,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[382,\t\t577,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[383,\t\t578,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[389,\t\t579,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[401,\t\t580,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[402,\t\t581,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[409,\t\t582,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[415,\t\t583,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[444,\t\t584,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t],\n\t\t[452,\t\t585,\t\t0,\t\t0.005,\t\t0.0,\t\t2000.0,\t\t2000.0,\t\t2000.0,\t\t1.0,\t\t0,\t\t1,\t\t-360,\t\t360\t\t]\n\t])\n\tppc[\"gen_control\"] = array([\n\t\t[586,\t\t1,\t\t0.08658028904199107,\t\t4.329014452099554,\t\t0, 0, 0],\n\t\t[589,\t\t1,\t\t0.010042676909098597,\t\t0.5021338454549299,\t\t0, 0, 0],\n\t\t[590,\t\t1,\t\t0.012095775674984046,\t\t0.6047887837492023,\t\t0, 0, 0],\n\t\t[593,\t\t1,\t\t0.0017666198683200384,\t\t0.08833099341600192,\t\t0, 0, 0],\n\t\t[594,\t\t1,\t\t0.006047887837492023,\t\t0.30239439187460115,\t\t0, 0, 0],\n\t\t[595,\t\t1,\t\t1.50560576164933,\t\t75.2802880824665,\t\t0, 0, 0],\n\t\t[598,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[599,\t\t1,\t\t0.0029602819415092537,\t\t0.1480140970754627,\t\t0, 0, 0],\n\t\t[601,\t\t1,\t\t0.019576058000303126,\t\t0.9788029000151565,\t\t0, 0, 0],\n\t\t[602,\t\t1,\t\t0.007830423200121252,\t\t0.39152116000606263,\t\t0, 0, 0],\n\t\t[603,\t\t1,\t\t1.0997606567649967,\t\t54.98803283824984,\t\t0, 0, 0],\n\t\t[607,\t\t1,\t\t0.5729577951308232,\t\t28.64788975654116,\t\t0, 0, 0],\n\t\t[608,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[609,\t\t1,\t\t0.0057932399285449895,\t\t0.2896619964272495,\t\t0, 0, 0],\n\t\t[612,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[613,\t\t1,\t\t0.027056340325622208,\t\t1.3528170162811104,\t\t0, 0, 0],\n\t\t[614,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[616,\t\t1,\t\t0.0046154933496649645,\t\t0.23077466748324824,\t\t0, 0, 0],\n\t\t[617,\t\t1,\t\t0.04360845440717932,\t\t2.1804227203589663,\t\t0, 0, 0],\n\t\t[618,\t\t1,\t\t0.010631550198538607,\t\t0.5315775099269304,\t\t0, 0, 0],\n\t\t[619,\t\t1,\t\t0.037560566569687294,\t\t1.8780283284843649,\t\t0, 0, 0],\n\t\t[621,\t\t1,\t\t0.24350706293059987,\t\t12.175353146529993,\t\t0, 0, 0],\n\t\t[624,\t\t1,\t\t0.004297183463481174,\t\t0.21485917317405873,\t\t0, 0, 0],\n\t\t[628,\t\t1,\t\t0.14292113889652203,\t\t7.1460569448261015,\t\t0, 0, 0],\n\t\t[629,\t\t1,\t\t0.023968734429639437,\t\t1.198436721481972,\t\t0, 0, 0],\n\t\t[631,\t\t1,\t\t0.025401128917466494,\t\t1.2700564458733248,\t\t0, 0, 0],\n\t\t[632,\t\t1,\t\t0.01435577586688896,\t\t0.717788793344448,\t\t0, 0, 0],\n\t\t[637,\t\t1,\t\t0.017093240888069558,\t\t0.854662044403478,\t\t0, 0, 0],\n\t\t[638,\t\t1,\t\t0.02048324117592693,\t\t1.0241620587963465,\t\t0, 0, 0],\n\t\t[640,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[641,\t\t1,\t\t0.0040107045659157625,\t\t0.20053522829578813,\t\t0, 0, 0],\n\t\t[642,\t\t1,\t\t0.00919915571071155,\t\t0.4599577855355775,\t\t0, 0, 0],\n\t\t[643,\t\t1,\t\t0.27279157245950864,\t\t13.639578622975431,\t\t0, 0, 0],\n\t\t[647,\t\t1,\t\t0.00445633840657307,\t\t0.2228169203286535,\t\t0, 0, 0],\n\t\t[650,\t\t1,\t\t0.4216014442504307,\t\t21.080072212521536,\t\t0, 0, 0],\n\t\t[652,\t\t1,\t\t0.00746436683100989,\t\t0.37321834155049455,\t\t0, 0, 0],\n\t\t[655,\t\t1,\t\t0.019576058000303126,\t\t0.9788029000151565,\t\t0, 0, 0],\n\t\t[663,\t\t1,\t\t0.00238732414637843,\t\t0.1193662073189215,\t\t0, 0, 0],\n\t\t[666,\t\t1,\t\t0.00919915571071155,\t\t0.4599577855355775,\t\t0, 0, 0],\n\t\t[670,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[672,\t\t1,\t\t0.010536057232683471,\t\t0.5268028616341736,\t\t0, 0, 0],\n\t\t[676,\t\t1,\t\t0.11777465788800255,\t\t5.888732894400127,\t\t0, 0, 0],\n\t\t[681,\t\t1,\t\t0.0063821132179850025,\t\t0.31910566089925013,\t\t0, 0, 0],\n\t\t[683,\t\t1,\t\t0.008753521870054244,\t\t0.4376760935027122,\t\t0, 0, 0],\n\t\t[687,\t\t1,\t\t0.42303383873825773,\t\t21.151691936912886,\t\t0, 0, 0],\n\t\t[689,\t\t1,\t\t0.09867606471697511,\t\t4.933803235848756,\t\t0, 0, 0],\n\t\t[691,\t\t1,\t\t0.008276057040778557,\t\t0.4138028520389279,\t\t0, 0, 0],\n\t\t[694,\t\t1,\t\t0.005220282133414166,\t\t0.2610141066707083,\t\t0, 0, 0],\n\t\t[695,\t\t1,\t\t0.004679155326901723,\t\t0.23395776634508614,\t\t0, 0, 0],\n\t\t[696,\t\t1,\t\t0.22950142793851305,\t\t11.475071396925653,\t\t0, 0, 0],\n\t\t[697,\t\t1,\t\t0.0036923946797319715,\t\t0.1846197339865986,\t\t0, 0, 0],\n\t\t[698,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[702,\t\t1,\t\t0.023363945645890238,\t\t1.168197282294512,\t\t0, 0, 0],\n\t\t[705,\t\t1,\t\t0.005411268065124442,\t\t0.27056340325622213,\t\t0, 0, 0],\n\t\t[707,\t\t1,\t\t0.010822536130248884,\t\t0.5411268065124443,\t\t0, 0, 0],\n\t\t[713,\t\t1,\t\t0.004265352474862795,\t\t0.21326762374313976,\t\t0, 0, 0],\n\t\t[714,\t\t1,\t\t0.00477464829275686,\t\t0.238732414637843,\t\t0, 0, 0],\n\t\t[716,\t\t1,\t\t1.5915494309189534e-05,\t\t0.0007957747154594768,\t\t0, 0, 0],\n\t\t[717,\t\t1,\t\t0.0017507043740108488,\t\t0.08753521870054244,\t\t0, 0, 0],\n\t\t[719,\t\t1,\t\t0.623250757147862,\t\t31.162537857393104,\t\t0, 0, 0],\n\t\t[722,\t\t1,\t\t0.006589014644004467,\t\t0.3294507322002233,\t\t0, 0, 0],\n\t\t[723,\t\t1,\t\t0.006270704757820675,\t\t0.31353523789103377,\t\t0, 0, 0],\n\t\t[724,\t\t1,\t\t0.0019257748114119334,\t\t0.09628874057059668,\t\t0, 0, 0],\n\t\t[727,\t\t1,\t\t0.019576058000303126,\t\t0.9788029000151565,\t\t0, 0, 0],\n\t\t[728,\t\t1,\t\t0.16233804195373325,\t\t8.116902097686662,\t\t0, 0, 0],\n\t\t[730,\t\t1,\t\t0.10077690996578814,\t\t5.038845498289407,\t\t0, 0, 0],\n\t\t[732,\t\t1,\t\t0.004647324338283344,\t\t0.2323662169141672,\t\t0, 0, 0],\n\t\t[735,\t\t1,\t\t0.013496339174192726,\t\t0.6748169587096363,\t\t0, 0, 0],\n\t\t[738,\t\t1,\t\t0.04408591923645501,\t\t2.2042959618227504,\t\t0, 0, 0],\n\t\t[741,\t\t1,\t\t0.0340591578216656,\t\t1.7029578910832803,\t\t0, 0, 0],\n\t\t[742,\t\t1,\t\t0.0028647889756541157,\t\t0.14323944878270578,\t\t0, 0, 0],\n\t\t[743,\t\t1,\t\t0.44881693951914486,\t\t22.440846975957243,\t\t0, 0, 0],\n\t\t[746,\t\t1,\t\t0.03183098861837907,\t\t1.5915494309189535,\t\t0, 0, 0],\n\t\t[747,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[748,\t\t1,\t\t0.03501408748021698,\t\t1.7507043740108488,\t\t0, 0, 0],\n\t\t[749,\t\t1,\t\t0.0025464790894703256,\t\t0.12732395447351627,\t\t0, 0, 0],\n\t\t[750,\t\t1,\t\t0.028902537665488188,\t\t1.4451268832744095,\t\t0, 0, 0],\n\t\t[753,\t\t1,\t\t0.049624511256052974,\t\t2.4812255628026487,\t\t0, 0, 0],\n\t\t[758,\t\t1,\t\t0.0058887328944001276,\t\t0.2944366447200064,\t\t0, 0, 0],\n\t\t[760,\t\t1,\t\t0.2527380496299298,\t\t12.636902481496492,\t\t0, 0, 0],\n\t\t[761,\t\t1,\t\t0.004997465213085514,\t\t0.2498732606542757,\t\t0, 0, 0],\n\t\t[762,\t\t1,\t\t0.3517324242330887,\t\t17.586621211654435,\t\t0, 0, 0],\n\t\t[763,\t\t1,\t\t0.006461690689530951,\t\t0.32308453447654756,\t\t0, 0, 0],\n\t\t[765,\t\t1,\t\t0.018780283284843647,\t\t0.9390141642421824,\t\t0, 0, 0],\n\t\t[767,\t\t1,\t\t0.0035650707252584553,\t\t0.17825353626292276,\t\t0, 0, 0],\n\t\t[769,\t\t1,\t\t0.013782818071758136,\t\t0.6891409035879068,\t\t0, 0, 0],\n\t\t[771,\t\t1,\t\t0.21963382146681557,\t\t10.981691073340778,\t\t0, 0, 0],\n\t\t[772,\t\t1,\t\t0.002992112930127632,\t\t0.1496056465063816,\t\t0, 0, 0],\n\t\t[774,\t\t1,\t\t0.010663381187156987,\t\t0.5331690593578494,\t\t0, 0, 0],\n\t\t[777,\t\t1,\t\t0.012573240504259732,\t\t0.6286620252129866,\t\t0, 0, 0],\n\t\t[778,\t\t1,\t\t0.004679155326901723,\t\t0.23395776634508614,\t\t0, 0, 0],\n\t\t[781,\t\t1,\t\t0.4169859509007658,\t\t20.84929754503829,\t\t0, 0, 0],\n\t\t[784,\t\t1,\t\t0.4058451048843331,\t\t20.292255244216655,\t\t0, 0, 0],\n\t\t[785,\t\t1,\t\t0.00047746482927568597,\t\t0.0238732414637843,\t\t0, 0, 0],\n\t\t[787,\t\t1,\t\t0.24764509145098912,\t\t12.382254572549456,\t\t0, 0, 0],\n\t\t[788,\t\t1,\t\t0.2785211504108168,\t\t13.926057520540843,\t\t0, 0, 0],\n\t\t[789,\t\t1,\t\t0.0123185925953127,\t\t0.615929629765635,\t\t0, 0, 0],\n\t\t[791,\t\t1,\t\t0.0031830988618379067,\t\t0.15915494309189535,\t\t0, 0, 0],\n\t\t[792,\t\t1,\t\t0.009979014931861837,\t\t0.49895074659309185,\t\t0, 0, 0],\n\t\t[795,\t\t1,\t\t0.004329014452099553,\t\t0.2164507226049777,\t\t0, 0, 0],\n\t\t[800,\t\t1,\t\t0.0058091554228541795,\t\t0.290457771142709,\t\t0, 0, 0],\n\t\t[801,\t\t1,\t\t0.007957747154594767,\t\t0.3978873577297384,\t\t0, 0, 0],\n\t\t[802,\t\t1,\t\t0.07957747154594767,\t\t3.9788735772973833,\t\t0, 0, 0],\n\t\t[805,\t\t1,\t\t0.44881693951914486,\t\t22.440846975957243,\t\t0, 0, 0],\n\t\t[806,\t\t1,\t\t0.005697746962689853,\t\t0.2848873481344927,\t\t0, 0, 0],\n\t\t[808,\t\t1,\t\t0.034616200122487235,\t\t1.7308100061243619,\t\t0, 0, 0],\n\t\t[809,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[811,\t\t1,\t\t0.0040107045659157625,\t\t0.20053522829578813,\t\t0, 0, 0],\n\t\t[814,\t\t1,\t\t0.014164789935178685,\t\t0.7082394967589343,\t\t0, 0, 0],\n\t\t[816,\t\t1,\t\t0.012748310941660816,\t\t0.6374155470830408,\t\t0, 0, 0],\n\t\t[817,\t\t1,\t\t0.017188733853924696,\t\t0.8594366926962349,\t\t0, 0, 0],\n\t\t[821,\t\t1,\t\t0.013130282805081364,\t\t0.6565141402540683,\t\t0, 0, 0],\n\t\t[822,\t\t1,\t\t0.04265352474862795,\t\t2.1326762374313977,\t\t0, 0, 0],\n\t\t[826,\t\t1,\t\t0.018461973398659858,\t\t0.9230986699329929,\t\t0, 0, 0],\n\t\t[830,\t\t1,\t\t0.02832957987035737,\t\t1.4164789935178685,\t\t0, 0, 0],\n\t\t[834,\t\t1,\t\t0.007416620348082323,\t\t0.37083101740411617,\t\t0, 0, 0],\n\t\t[835,\t\t1,\t\t0.010138169874953733,\t\t0.5069084937476867,\t\t0, 0, 0],\n\t\t[836,\t\t1,\t\t0.008116902097686661,\t\t0.4058451048843331,\t\t0, 0, 0],\n\t\t[837,\t\t1,\t\t0.15024226627874918,\t\t7.512113313937459,\t\t0, 0, 0],\n\t\t[839,\t\t1,\t\t0.011666057328635928,\t\t0.5833028664317964,\t\t0, 0, 0],\n\t\t[841,\t\t1,\t\t0.0037083101740411615,\t\t0.18541550870205808,\t\t0, 0, 0],\n\t\t[843,\t\t1,\t\t0.10599719209920229,\t\t5.2998596049601145,\t\t0, 0, 0],\n\t\t[844,\t\t1,\t\t0.012732395447351627,\t\t0.6366197723675814,\t\t0, 0, 0],\n\t\t[845,\t\t1,\t\t0.10122254380644544,\t\t5.061127190322272,\t\t0, 0, 0],\n\t\t[849,\t\t1,\t\t0.24796340133717296,\t\t12.398170066858649,\t\t0, 0, 0],\n\t\t[850,\t\t1,\t\t0.005092958178940651,\t\t0.25464790894703254,\t\t0, 0, 0],\n\t\t[851,\t\t1,\t\t0.01265281797580568,\t\t0.632640898790284,\t\t0, 0, 0],\n\t\t[853,\t\t1,\t\t0.0036923946797319715,\t\t0.1846197339865986,\t\t0, 0, 0],\n\t\t[855,\t\t1,\t\t0.21899720169444797,\t\t10.949860084722399,\t\t0, 0, 0],\n\t\t[856,\t\t1,\t\t0.011459155902616463,\t\t0.5729577951308231,\t\t0, 0, 0],\n\t\t[857,\t\t1,\t\t0.4462704604296745,\t\t22.313523021483725,\t\t0, 0, 0],\n\t\t[858,\t\t1,\t\t0.01808000153523931,\t\t0.9040000767619655,\t\t0, 0, 0],\n\t\t[859,\t\t1,\t\t0.027056340325622208,\t\t1.3528170162811104,\t\t0, 0, 0],\n\t\t[860,\t\t1,\t\t0.0039788735772973835,\t\t0.1989436788648692,\t\t0, 0, 0],\n\t\t[864,\t\t1,\t\t0.2785211504108168,\t\t13.926057520540843,\t\t0, 0, 0],\n\t\t[865,\t\t1,\t\t0.0035014087480216977,\t\t0.17507043740108488,\t\t0, 0, 0],\n\t\t[867,\t\t1,\t\t0.24478030247533505,\t\t12.239015123766753,\t\t0, 0, 0],\n\t\t[869,\t\t1,\t\t0.4329014452099553,\t\t21.645072260497766,\t\t0, 0, 0],\n\t\t[870,\t\t1,\t\t0.018589297353133374,\t\t0.9294648676566688,\t\t0, 0, 0],\n\t\t[872,\t\t1,\t\t0.00716197243913529,\t\t0.3580986219567645,\t\t0, 0, 0],\n\t\t[873,\t\t1,\t\t0.038833806114422456,\t\t1.941690305721123,\t\t0, 0, 0],\n\t\t[874,\t\t1,\t\t0.006589014644004467,\t\t0.3294507322002233,\t\t0, 0, 0],\n\t\t[875,\t\t1,\t\t0.007766761222884492,\t\t0.38833806114422464,\t\t0, 0, 0],\n\t\t[877,\t\t1,\t\t0.007894085177358009,\t\t0.39470425886790045,\t\t0, 0, 0],\n\t\t[881,\t\t1,\t\t0.3187236890358296,\t\t15.93618445179148,\t\t0, 0, 0],\n\t\t[882,\t\t1,\t\t0.005538592019597957,\t\t0.2769296009798979,\t\t0, 0, 0],\n\t\t[883,\t\t1,\t\t0.005729577951308231,\t\t0.28647889756541156,\t\t0, 0, 0],\n\t\t[885,\t\t1,\t\t0.15597184423005742,\t\t7.798592211502871,\t\t0, 0, 0],\n\t\t[886,\t\t1,\t\t0.8186930272647096,\t\t40.93465136323548,\t\t0, 0, 0],\n\t\t[889,\t\t1,\t\t0.0030239439187460114,\t\t0.15119719593730058,\t\t0, 0, 0],\n\t\t[890,\t\t1,\t\t0.0076394372684109755,\t\t0.3819718634205488,\t\t0, 0, 0],\n\t\t[893,\t\t1,\t\t0.00954929658551372,\t\t0.477464829275686,\t\t0, 0, 0],\n\t\t[894,\t\t1,\t\t0.025146481008519465,\t\t1.2573240504259733,\t\t0, 0, 0],\n\t\t[895,\t\t1,\t\t0.0030239439187460114,\t\t0.15119719593730058,\t\t0, 0, 0],\n\t\t[896,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[898,\t\t1,\t\t0.013464508185574344,\t\t0.6732254092787172,\t\t0, 0, 0],\n\t\t[900,\t\t1,\t\t0.03584169318429482,\t\t1.7920846592147412,\t\t0, 0, 0],\n\t\t[902,\t\t1,\t\t0.006207042780583919,\t\t0.31035213902919595,\t\t0, 0, 0],\n\t\t[903,\t\t1,\t\t0.0031990143561470966,\t\t0.15995071780735484,\t\t0, 0, 0],\n\t\t[905,\t\t1,\t\t0.021851973686517232,\t\t1.0925986843258617,\t\t0, 0, 0],\n\t\t[906,\t\t1,\t\t0.010504226244065093,\t\t0.5252113122032547,\t\t0, 0, 0],\n\t\t[907,\t\t1,\t\t0.02142225534016911,\t\t1.0711127670084555,\t\t0, 0, 0],\n\t\t[909,\t\t1,\t\t0.005856901905781748,\t\t0.2928450952890874,\t\t0, 0, 0],\n\t\t[915,\t\t1,\t\t0.0038197186342054878,\t\t0.1909859317102744,\t\t0, 0, 0],\n\t\t[917,\t\t1,\t\t0.005411268065124442,\t\t0.27056340325622213,\t\t0, 0, 0],\n\t\t[918,\t\t1,\t\t0.012254930618075942,\t\t0.612746530903797,\t\t0, 0, 0],\n\t\t[920,\t\t1,\t\t0.0020371832715762603,\t\t0.10185916357881303,\t\t0, 0, 0],\n\t\t[921,\t\t1,\t\t0.019735212943395024,\t\t0.9867606471697512,\t\t0, 0, 0],\n\t\t[922,\t\t1,\t\t0.05220282133414166,\t\t2.6101410667070835,\t\t0, 0, 0],\n\t\t[923,\t\t1,\t\t0.023236621691416718,\t\t1.161831084570836,\t\t0, 0, 0],\n\t\t[925,\t\t1,\t\t0.008276057040778557,\t\t0.4138028520389279,\t\t0, 0, 0],\n\t\t[931,\t\t1,\t\t0.03455253814525047,\t\t1.7276269072625237,\t\t0, 0, 0],\n\t\t[935,\t\t1,\t\t0.007352958370845565,\t\t0.36764791854227824,\t\t0, 0, 0],\n\t\t[936,\t\t1,\t\t0.016615776058793875,\t\t0.8307888029396938,\t\t0, 0, 0],\n\t\t[937,\t\t1,\t\t0.00477464829275686,\t\t0.238732414637843,\t\t0, 0, 0],\n\t\t[939,\t\t1,\t\t1.5915494309189534e-05,\t\t0.0007957747154594768,\t\t0, 0, 0],\n\t\t[940,\t\t1,\t\t0.009421972631040205,\t\t0.47109863155201026,\t\t0, 0, 0],\n\t\t[944,\t\t1,\t\t0.004042535554534142,\t\t0.2021267777267071,\t\t0, 0, 0],\n\t\t[950,\t\t1,\t\t0.005092958178940651,\t\t0.25464790894703254,\t\t0, 0, 0],\n\t\t[952,\t\t1,\t\t0.005045211696013082,\t\t0.2522605848006541,\t\t0, 0, 0],\n\t\t[957,\t\t1,\t\t0.0019098593171027439,\t\t0.0954929658551372,\t\t0, 0, 0],\n\t\t[958,\t\t1,\t\t0.010615634704229418,\t\t0.530781735211471,\t\t0, 0, 0],\n\t\t[959,\t\t1,\t\t0.007241549910681238,\t\t0.3620774955340619,\t\t0, 0, 0],\n\t\t[960,\t\t1,\t\t0.004217605991935227,\t\t0.21088029959676136,\t\t0, 0, 0],\n\t\t[963,\t\t1,\t\t0.2785211504108168,\t\t13.926057520540843,\t\t0, 0, 0],\n\t\t[965,\t\t1,\t\t0.11204507993669433,\t\t5.602253996834716,\t\t0, 0, 0],\n\t\t[966,\t\t1,\t\t0.021008452488130186,\t\t1.0504226244065094,\t\t0, 0, 0],\n\t\t[967,\t\t1,\t\t0.01193662073189215,\t\t0.5968310365946076,\t\t0, 0, 0],\n\t\t[968,\t\t1,\t\t0.017188733853924696,\t\t0.8594366926962349,\t\t0, 0, 0],\n\t\t[969,\t\t1,\t\t0.018111832523857688,\t\t0.9055916261928845,\t\t0, 0, 0],\n\t\t[971,\t\t1,\t\t0.0031830988618379067,\t\t0.15915494309189535,\t\t0, 0, 0],\n\t\t[973,\t\t1,\t\t0.4287634166895661,\t\t21.438170834478306,\t\t0, 0, 0],\n\t\t[976,\t\t1,\t\t0.008562535938343968,\t\t0.4281267969171984,\t\t0, 0, 0],\n\t\t[978,\t\t1,\t\t0.0007321127382227185,\t\t0.03660563691113593,\t\t0, 0, 0],\n\t\t[981,\t\t1,\t\t0.03787887645587108,\t\t1.8939438227935543,\t\t0, 0, 0],\n\t\t[982,\t\t1,\t\t0.0015756339366097638,\t\t0.07878169683048819,\t\t0, 0, 0],\n\t\t[983,\t\t1,\t\t0.01400563499208679,\t\t0.7002817496043395,\t\t0, 0, 0],\n\t\t[984,\t\t1,\t\t0.14801409707546268,\t\t7.400704853773133,\t\t0, 0, 0],\n\t\t[985,\t\t1,\t\t0.0035014087480216977,\t\t0.17507043740108488,\t\t0, 0, 0],\n\t\t[986,\t\t1,\t\t0.0017825353626292277,\t\t0.08912676813146138,\t\t0, 0, 0],\n\t\t[987,\t\t1,\t\t0.02618098813861678,\t\t1.3090494069308392,\t\t0, 0, 0],\n\t\t[988,\t\t1,\t\t0.0008116902097686662,\t\t0.04058451048843331,\t\t0, 0, 0],\n\t\t[993,\t\t1,\t\t0.06238873769202297,\t\t3.119436884601149,\t\t0, 0, 0],\n\t\t[994,\t\t1,\t\t0.010504226244065093,\t\t0.5252113122032547,\t\t0, 0, 0],\n\t\t[995,\t\t1,\t\t0.0006684507609859605,\t\t0.033422538049298026,\t\t0, 0, 0],\n\t\t[997,\t\t1,\t\t0.005984225860255264,\t\t0.2992112930127632,\t\t0, 0, 0],\n\t\t[999,\t\t1,\t\t0.004965634224467135,\t\t0.24828171122335674,\t\t0, 0, 0],\n\t\t[1000,\t\t1,\t\t0.015597184423005743,\t\t0.7798592211502873,\t\t0, 0, 0],\n\t\t[1002,\t\t1,\t\t0.0031512678732195276,\t\t0.15756339366097638,\t\t0, 0, 0],\n\t\t[1003,\t\t1,\t\t0.2864788975654116,\t\t14.32394487827058,\t\t0, 0, 0],\n\t\t[1007,\t\t1,\t\t0.007416620348082323,\t\t0.37083101740411617,\t\t0, 0, 0],\n\t\t[1008,\t\t1,\t\t0.015597184423005743,\t\t0.7798592211502873,\t\t0, 0, 0],\n\t\t[1010,\t\t1,\t\t0.238732414637843,\t\t11.93662073189215,\t\t0, 0, 0],\n\t\t[1011,\t\t1,\t\t0.005952394871636886,\t\t0.2976197435818443,\t\t0, 0, 0],\n\t\t[1012,\t\t1,\t\t0.9024085273310466,\t\t45.12042636655233,\t\t0, 0, 0],\n\t\t[1014,\t\t1,\t\t0.238732414637843,\t\t11.93662073189215,\t\t0, 0, 0],\n\t\t[1026,\t\t1,\t\t0.20868396138209316,\t\t10.434198069104658,\t\t0, 0, 0],\n\t\t[1027,\t\t3,\t\t0.002298550022578703,\t\t0.11492750112893517,\t\t2.22, 61.69, 0.004502],\n\t\t[1028,\t\t2,\t\t0.025464790894703257,\t\t1.273239544735163,\t\t0, 0, 0],\n\t\t[1029,\t\t2,\t\t0.0015996029245410612,\t\t0.07998014622705306,\t\t0, 0, 0],\n\t\t[1030,\t\t2,\t\t0.06480789282701978,\t\t3.2403946413509894,\t\t0, 0, 0],\n\t\t[1031,\t\t2,\t\t0.06463074564767912,\t\t3.2315372823839565,\t\t0, 0, 0],\n\t\t[1032,\t\t2,\t\t0.009772775025341927,\t\t0.4886387512670964,\t\t0, 0, 0],\n\t\t[1033,\t\t2,\t\t0.0031935716694765437,\t\t0.15967858347382718,\t\t0, 0, 0],\n\t\t[1034,\t\t2,\t\t0.005364335122251813,\t\t0.26821675611259066,\t\t0, 0, 0],\n\t\t[1035,\t\t3,\t\t0.00317587127473044,\t\t0.158793563736522,\t\t2.22, 61.69, 0.004502],\n\t\t[1036,\t\t2,\t\t0.0042795539826391196,\t\t0.21397769913195597,\t\t0, 0, 0],\n\t\t[1037,\t\t2,\t\t0.004583737816416693,\t\t0.22918689082083465,\t\t0, 0, 0],\n\t\t[1038,\t\t2,\t\t0.004358800228219271,\t\t0.21794001141096359,\t\t0, 0, 0],\n\t\t[1039,\t\t2,\t\t0.008449479506347874,\t\t0.42247397531739384,\t\t0, 0, 0],\n\t\t[1040,\t\t3,\t\t2.5955064969193202e-06,\t\t0.00012977532484596601,\t\t2.22, 61.69, 0.004502],\n\t\t[1041,\t\t2,\t\t0.012998987840239671,\t\t0.6499493920119837,\t\t0, 0, 0],\n\t\t[1042,\t\t2,\t\t0.00335501991632689,\t\t0.1677509958163445,\t\t0, 0, 0],\n\t\t[1043,\t\t3,\t\t0.0003026685105316776,\t\t0.015133425526583881,\t\t2.22, 61.69, 0.004502],\n\t\t[1044,\t\t3,\t\t0.0011243820116265814,\t\t0.05621910058132907,\t\t2.22, 61.69, 0.004502],\n\t\t[1045,\t\t2,\t\t0.0019373243262327522,\t\t0.09686621631163762,\t\t0, 0, 0],\n\t\t[1046,\t\t2,\t\t0.0031015144255394987,\t\t0.15507572127697494,\t\t0, 0, 0],\n\t\t[1047,\t\t3,\t\t0.00034416981541931054,\t\t0.017208490770965527,\t\t2.22, 61.69, 0.004502],\n\t\t[1048,\t\t2,\t\t0.0020485945786587064,\t\t0.10242972893293534,\t\t0, 0, 0],\n\t\t[1049,\t\t2,\t\t0.01870104799381521,\t\t0.9350523996907605,\t\t0, 0, 0],\n\t\t[1050,\t\t2,\t\t0.0033601814151550304,\t\t0.1680090707577515,\t\t0, 0, 0],\n\t\t[1051,\t\t2,\t\t0.019380601737792977,\t\t0.969030086889649,\t\t0, 0, 0],\n\t\t[1052,\t\t3,\t\t0.0005247651571922151,\t\t0.026238257859610755,\t\t2.22, 61.69, 0.004502],\n\t\t[1053,\t\t3,\t\t0.00041550140953476974,\t\t0.02077507047673849,\t\t2.22, 61.69, 0.004502],\n\t\t[1054,\t\t2,\t\t0.0069428381079974354,\t\t0.3471419053998717,\t\t0, 0, 0],\n\t\t[1055,\t\t3,\t\t0.0001818229987415119,\t\t0.009091149937075596,\t\t2.22, 61.69, 0.004502],\n\t\t[1056,\t\t2,\t\t0.0384482661909012,\t\t1.9224133095450602,\t\t0, 0, 0],\n\t\t[1057,\t\t2,\t\t0.02718238967557453,\t\t1.3591194837787268,\t\t0, 0, 0],\n\t\t[1058,\t\t2,\t\t0.06721018861714274,\t\t3.3605094308571375,\t\t0, 0, 0],\n\t\t[1059,\t\t2,\t\t0.02641152929543176,\t\t1.320576464771588,\t\t0, 0, 0],\n\t\t[1060,\t\t3,\t\t0.0006590053340983933,\t\t0.03295026670491967,\t\t2.22, 61.69, 0.004502],\n\t\t[1061,\t\t2,\t\t0.010304492946979937,\t\t0.5152246473489969,\t\t0, 0, 0],\n\t\t[1062,\t\t3,\t\t0.00018325491392786168,\t\t0.009162745696393085,\t\t2.22, 61.69, 0.004502],\n\t\t[1063,\t\t3,\t\t0.0005520076745724519,\t\t0.0276003837286226,\t\t2.22, 61.69, 0.004502],\n\t\t[1064,\t\t2,\t\t0.013355424896304362,\t\t0.667771244815218,\t\t0, 0, 0],\n\t\t[1065,\t\t2,\t\t0.021608252882636087,\t\t1.0804126441318045,\t\t0, 0, 0],\n\t\t[1066,\t\t2,\t\t0.008556107291276397,\t\t0.4278053645638199,\t\t0, 0, 0],\n\t\t[1067,\t\t3,\t\t0.002078788013715776,\t\t0.1039394006857888,\t\t2.22, 61.69, 0.004502],\n\t\t[1068,\t\t3,\t\t0.0003188842576981683,\t\t0.015944212884908417,\t\t2.22, 61.69, 0.004502],\n\t\t[1069,\t\t3,\t\t0.00020313001706596343,\t\t0.010156500853298172,\t\t2.22, 61.69, 0.004502],\n\t\t[1070,\t\t3,\t\t5.020379247175116e-05,\t\t0.0025101896235875582,\t\t2.22, 61.69, 0.004502],\n\t\t[1071,\t\t3,\t\t0.0002755733400308117,\t\t0.013778667001540588,\t\t2.22, 61.69, 0.004502],\n\t\t[1072,\t\t2,\t\t0.0034911570519954678,\t\t0.1745578525997734,\t\t0, 0, 0],\n\t\t[1073,\t\t2,\t\t0.001974161472118056,\t\t0.09870807360590281,\t\t0, 0, 0],\n\t\t[1074,\t\t2,\t\t0.0046620003597127105,\t\t0.23310001798563554,\t\t0, 0, 0],\n\t\t[1075,\t\t3,\t\t0.0010048055180333312,\t\t0.05024027590166657,\t\t2.22, 61.69, 0.004502],\n\t\t[1076,\t\t3,\t\t0.00010624248611578546,\t\t0.005312124305789274,\t\t2.22, 61.69, 0.004502],\n\t\t[1077,\t\t3,\t\t0.0016628534246063698,\t\t0.08314267123031849,\t\t2.22, 61.69, 0.004502],\n\t\t[1078,\t\t3,\t\t0.0021908153060440304,\t\t0.10954076530220153,\t\t2.22, 61.69, 0.004502],\n\t\t[1079,\t\t2,\t\t0.002190700708933187,\t\t0.10953503544665937,\t\t0, 0, 0],\n\t\t[1080,\t\t2,\t\t0.008412929217414397,\t\t0.4206464608707199,\t\t0, 0, 0],\n\t\t[1081,\t\t2,\t\t0.025823979083824652,\t\t1.2911989541912325,\t\t0, 0, 0],\n\t\t[1082,\t\t2,\t\t0.03247105626963941,\t\t1.623552813481971,\t\t0, 0, 0],\n\t\t[1083,\t\t2,\t\t0.04034141649573272,\t\t2.017070824786636,\t\t0, 0, 0],\n\t\t[1084,\t\t2,\t\t0.0383703068502718,\t\t1.9185153425135901,\t\t0, 0, 0],\n\t\t[1085,\t\t2,\t\t0.007239283505967098,\t\t0.3619641752983549,\t\t0, 0, 0],\n\t\t[1086,\t\t2,\t\t0.01436208920263519,\t\t0.7181044601317595,\t\t0, 0, 0],\n\t\t[1087,\t\t2,\t\t0.007427186304799236,\t\t0.3713593152399618,\t\t0, 0, 0],\n\t\t[1088,\t\t3,\t\t0.0023416461987310717,\t\t0.11708230993655358,\t\t2.22, 61.69, 0.004502],\n\t\t[1089,\t\t2,\t\t0.024474821190373128,\t\t1.2237410595186564,\t\t0, 0, 0],\n\t\t[1090,\t\t2,\t\t0.0022624979772680404,\t\t0.11312489886340203,\t\t0, 0, 0],\n\t\t[1091,\t\t3,\t\t0.0013601543234855855,\t\t0.06800771617427928,\t\t2.22, 61.69, 0.004502],\n\t\t[1092,\t\t2,\t\t0.0014626466159500494,\t\t0.07313233079750248,\t\t0, 0, 0],\n\t\t[1093,\t\t2,\t\t0.009906140914748767,\t\t0.49530704573743833,\t\t0, 0, 0],\n\t\t[1094,\t\t3,\t\t0.00023930778294026586,\t\t0.011965389147013294,\t\t2.22, 61.69, 0.004502],\n\t\t[1095,\t\t3,\t\t1.3047613994501091e-05,\t\t0.0006523806997250545,\t\t2.22, 61.69, 0.004502],\n\t\t[1096,\t\t2,\t\t0.005379826679377905,\t\t0.2689913339688953,\t\t0, 0, 0],\n\t\t[1097,\t\t3,\t\t0.0002929164939619051,\t\t0.014645824698095257,\t\t2.22, 61.69, 0.004502],\n\t\t[1098,\t\t2,\t\t0.0021303060183860277,\t\t0.10651530091930138,\t\t0, 0, 0],\n\t\t[1099,\t\t2,\t\t0.0073754261124176915,\t\t0.3687713056208846,\t\t0, 0, 0],\n\t\t[1100,\t\t3,\t\t1.3306005265883919e-06,\t\t6.653002632941959e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1101,\t\t2,\t\t0.005343192104787693,\t\t0.2671596052393847,\t\t0, 0, 0],\n\t\t[1102,\t\t2,\t\t0.02234407998394998,\t\t1.1172039991974991,\t\t0, 0, 0],\n\t\t[1103,\t\t2,\t\t0.01562148424141561,\t\t0.7810742120707805,\t\t0, 0, 0],\n\t\t[1104,\t\t3,\t\t1.3172819714966009e-05,\t\t0.0006586409857483004,\t\t2.22, 61.69, 0.004502],\n\t\t[1105,\t\t3,\t\t0.0001386935566767763,\t\t0.006934677833838815,\t\t2.22, 61.69, 0.004502],\n\t\t[1106,\t\t3,\t\t0.00014577275883068604,\t\t0.0072886379415343025,\t\t2.22, 61.69, 0.004502],\n\t\t[1107,\t\t2,\t\t0.004852418696402547,\t\t0.24262093482012728,\t\t0, 0, 0],\n\t\t[1108,\t\t2,\t\t0.02039874588539438,\t\t1.019937294269719,\t\t0, 0, 0],\n\t\t[1109,\t\t3,\t\t4.9542410867097304e-05,\t\t0.002477120543354865,\t\t2.22, 61.69, 0.004502],\n\t\t[1110,\t\t3,\t\t0.00010533237807450261,\t\t0.00526661890372513,\t\t2.22, 61.69, 0.004502],\n\t\t[1111,\t\t2,\t\t0.005706531882583417,\t\t0.2853265941291709,\t\t0, 0, 0],\n\t\t[1112,\t\t2,\t\t0.004426690383932842,\t\t0.2213345191966421,\t\t0, 0, 0],\n\t\t[1113,\t\t3,\t\t0.00022513170529279912,\t\t0.011256585264639957,\t\t2.22, 61.69, 0.004502],\n\t\t[1114,\t\t3,\t\t0.0008560555102861403,\t\t0.042802775514307015,\t\t2.22, 61.69, 0.004502],\n\t\t[1115,\t\t2,\t\t0.0032197222090973076,\t\t0.16098611045486538,\t\t0, 0, 0],\n\t\t[1116,\t\t3,\t\t0.002075453185310181,\t\t0.10377265926550905,\t\t2.22, 61.69, 0.004502],\n\t\t[1117,\t\t2,\t\t0.005780032679669937,\t\t0.2890016339834969,\t\t0, 0, 0],\n\t\t[1118,\t\t3,\t\t0.0005554515385863421,\t\t0.027772576929317106,\t\t2.22, 61.69, 0.004502],\n\t\t[1119,\t\t3,\t\t0.0027536366373517632,\t\t0.13768183186758817,\t\t2.22, 61.69, 0.004502],\n\t\t[1120,\t\t3,\t\t0.0001538074296570127,\t\t0.007690371482850636,\t\t2.22, 61.69, 0.004502],\n\t\t[1121,\t\t3,\t\t3.4414977793908876e-05,\t\t0.0017207488896954439,\t\t2.22, 61.69, 0.004502],\n\t\t[1122,\t\t3,\t\t9.313004041299959e-05,\t\t0.00465650202064998,\t\t2.22, 61.69, 0.004502],\n\t\t[1123,\t\t3,\t\t9.32225252447514e-05,\t\t0.00466112626223757,\t\t2.22, 61.69, 0.004502],\n\t\t[1124,\t\t3,\t\t8.201464578534214e-05,\t\t0.004100732289267108,\t\t2.22, 61.69, 0.004502],\n\t\t[1125,\t\t3,\t\t0.0016436821796102436,\t\t0.08218410898051219,\t\t2.22, 61.69, 0.004502],\n\t\t[1126,\t\t3,\t\t0.0018560581327172175,\t\t0.09280290663586088,\t\t2.22, 61.69, 0.004502],\n\t\t[1127,\t\t2,\t\t0.006703391093283916,\t\t0.3351695546641958,\t\t0, 0, 0],\n\t\t[1128,\t\t3,\t\t0.0001948941120002845,\t\t0.009744705600014225,\t\t2.22, 61.69, 0.004502],\n\t\t[1129,\t\t3,\t\t0.0003016780123772693,\t\t0.015083900618863466,\t\t2.22, 61.69, 0.004502],\n\t\t[1130,\t\t3,\t\t6.530151955301432e-05,\t\t0.003265075977650716,\t\t2.22, 61.69, 0.004502],\n\t\t[1131,\t\t3,\t\t0.00018443373362804407,\t\t0.009221686681402204,\t\t2.22, 61.69, 0.004502],\n\t\t[1132,\t\t3,\t\t2.2886271300209156e-05,\t\t0.0011443135650104578,\t\t2.22, 61.69, 0.004502],\n\t\t[1133,\t\t3,\t\t4.5810964480308454e-05,\t\t0.002290548224015423,\t\t2.22, 61.69, 0.004502],\n\t\t[1134,\t\t3,\t\t3.236913111220881e-05,\t\t0.0016184565556104404,\t\t2.22, 61.69, 0.004502],\n\t\t[1135,\t\t3,\t\t0.0005167964323996007,\t\t0.025839821619980042,\t\t2.22, 61.69, 0.004502],\n\t\t[1136,\t\t3,\t\t2.5636662405410735e-05,\t\t0.0012818331202705368,\t\t2.22, 61.69, 0.004502],\n\t\t[1137,\t\t3,\t\t0.00023357652984116472,\t\t0.011678826492058236,\t\t2.22, 61.69, 0.004502],\n\t\t[1138,\t\t3,\t\t7.98498118498449e-05,\t\t0.003992490592492246,\t\t2.22, 61.69, 0.004502],\n\t\t[1139,\t\t3,\t\t0.0012619566606414858,\t\t0.0630978330320743,\t\t2.22, 61.69, 0.004502],\n\t\t[1140,\t\t3,\t\t0.0018073289497007397,\t\t0.09036644748503699,\t\t2.22, 61.69, 0.004502],\n\t\t[1141,\t\t2,\t\t0.0076053500901520025,\t\t0.38026750450760016,\t\t0, 0, 0],\n\t\t[1142,\t\t3,\t\t7.73959943559724e-05,\t\t0.00386979971779862,\t\t2.22, 61.69, 0.004502],\n\t\t[1143,\t\t3,\t\t0.0016067873237582107,\t\t0.08033936618791054,\t\t2.22, 61.69, 0.004502],\n\t\t[1144,\t\t2,\t\t0.00334399697192306,\t\t0.16719984859615303,\t\t0, 0, 0],\n\t\t[1145,\t\t2,\t\t0.004458888300690503,\t\t0.2229444150345252,\t\t0, 0, 0],\n\t\t[1146,\t\t3,\t\t5.4833151376821656e-05,\t\t0.002741657568841083,\t\t2.22, 61.69, 0.004502],\n\t\t[1147,\t\t3,\t\t0.002909588342312674,\t\t0.14547941711563372,\t\t2.22, 61.69, 0.004502],\n\t\t[1148,\t\t3,\t\t0.0011233492673683868,\t\t0.05616746336841934,\t\t2.22, 61.69, 0.004502],\n\t\t[1149,\t\t3,\t\t0.0005447417794635118,\t\t0.02723708897317559,\t\t2.22, 61.69, 0.004502],\n\t\t[1150,\t\t3,\t\t0.0002306193019977063,\t\t0.011530965099885314,\t\t2.22, 61.69, 0.004502],\n\t\t[1151,\t\t3,\t\t0.0008299047575760064,\t\t0.04149523787880033,\t\t2.22, 61.69, 0.004502],\n\t\t[1152,\t\t3,\t\t7.417749437366368e-06,\t\t0.0003708874718683184,\t\t2.22, 61.69, 0.004502],\n\t\t[1153,\t\t3,\t\t4.37920348658174e-06,\t\t0.000218960174329087,\t\t2.22, 61.69, 0.004502],\n\t\t[1154,\t\t3,\t\t1.0225677287248534e-05,\t\t0.0005112838643624266,\t\t2.22, 61.69, 0.004502],\n\t\t[1155,\t\t3,\t\t3.879887736397654e-05,\t\t0.001939943868198827,\t\t2.22, 61.69, 0.004502],\n\t\t[1156,\t\t3,\t\t0.0010200134924871187,\t\t0.05100067462435595,\t\t2.22, 61.69, 0.004502],\n\t\t[1157,\t\t3,\t\t0.00027719360593007886,\t\t0.013859680296503944,\t\t2.22, 61.69, 0.004502],\n\t\t[1158,\t\t3,\t\t6.640198284893194e-05,\t\t0.003320099142446597,\t\t2.22, 61.69, 0.004502],\n\t\t[1159,\t\t3,\t\t0.0008593149079194712,\t\t0.04296574539597356,\t\t2.22, 61.69, 0.004502],\n\t\t[1160,\t\t2,\t\t0.015175599618213626,\t\t0.7587799809106813,\t\t0, 0, 0],\n\t\t[1161,\t\t3,\t\t0.001608317428775011,\t\t0.08041587143875056,\t\t2.22, 61.69, 0.004502],\n\t\t[1162,\t\t2,\t\t0.031984361657767045,\t\t1.5992180828883522,\t\t0, 0, 0],\n\t\t[1163,\t\t2,\t\t0.021010485834812704,\t\t1.0505242917406352,\t\t0, 0, 0],\n\t\t[1164,\t\t2,\t\t0.018183478445661972,\t\t0.9091739222830987,\t\t0, 0, 0],\n\t\t[1165,\t\t2,\t\t0.003640738012495192,\t\t0.18203690062475963,\t\t0, 0, 0],\n\t\t[1166,\t\t2,\t\t0.0037355845995397383,\t\t0.18677922997698693,\t\t0, 0, 0],\n\t\t[1167,\t\t3,\t\t0.00032173361521807824,\t\t0.016086680760903912,\t\t2.22, 61.69, 0.004502],\n\t\t[1168,\t\t3,\t\t8.56746647323757e-05,\t\t0.004283733236618785,\t\t2.22, 61.69, 0.004502],\n\t\t[1169,\t\t3,\t\t0.00017327803824915608,\t\t0.008663901912457804,\t\t2.22, 61.69, 0.004502],\n\t\t[1170,\t\t3,\t\t1.6933420442211857e-05,\t\t0.000846671022110593,\t\t2.22, 61.69, 0.004502],\n\t\t[1171,\t\t3,\t\t0.0005748603194505088,\t\t0.02874301597252544,\t\t2.22, 61.69, 0.004502],\n\t\t[1172,\t\t3,\t\t0.0002281672447033917,\t\t0.011408362235169585,\t\t2.22, 61.69, 0.004502],\n\t\t[1173,\t\t2,\t\t0.01618626952698487,\t\t0.8093134763492436,\t\t0, 0, 0],\n\t\t[1174,\t\t3,\t\t8.021928882473966e-05,\t\t0.004010964441236983,\t\t2.22, 61.69, 0.004502],\n\t\t[1175,\t\t3,\t\t5.445989361520192e-05,\t\t0.002722994680760096,\t\t2.22, 61.69, 0.004502],\n\t\t[1176,\t\t3,\t\t1.4783581244732665e-05,\t\t0.0007391790622366333,\t\t2.22, 61.69, 0.004502],\n\t\t[1177,\t\t3,\t\t0.0017745146198091144,\t\t0.08872573099045572,\t\t2.22, 61.69, 0.004502],\n\t\t[1178,\t\t3,\t\t0.00020168108435446162,\t\t0.010084054217723081,\t\t2.22, 61.69, 0.004502],\n\t\t[1179,\t\t3,\t\t8.316119408334767e-05,\t\t0.004158059704167384,\t\t2.22, 61.69, 0.004502],\n\t\t[1180,\t\t3,\t\t4.3834108298364086e-05,\t\t0.002191705414918204,\t\t2.22, 61.69, 0.004502],\n\t\t[1181,\t\t2,\t\t0.005289917788662048,\t\t0.2644958894331024,\t\t0, 0, 0],\n\t\t[1182,\t\t2,\t\t0.006322880792722177,\t\t0.3161440396361089,\t\t0, 0, 0],\n\t\t[1183,\t\t3,\t\t0.0024333246840658566,\t\t0.12166623420329284,\t\t2.22, 61.69, 0.004502],\n\t\t[1184,\t\t3,\t\t0.00026859021396164037,\t\t0.013429510698082018,\t\t2.22, 61.69, 0.004502],\n\t\t[1185,\t\t3,\t\t0.0007221796423758263,\t\t0.036108982118791315,\t\t2.22, 61.69, 0.004502],\n\t\t[1186,\t\t3,\t\t0.0024774929167619207,\t\t0.12387464583809603,\t\t2.22, 61.69, 0.004502],\n\t\t[1187,\t\t3,\t\t0.0006248151564821885,\t\t0.031240757824109424,\t\t2.22, 61.69, 0.004502],\n\t\t[1188,\t\t2,\t\t0.011369992521217407,\t\t0.5684996260608703,\t\t0, 0, 0],\n\t\t[1189,\t\t3,\t\t0.001289906586581014,\t\t0.06449532932905071,\t\t2.22, 61.69, 0.004502],\n\t\t[1190,\t\t2,\t\t0.01403960969000889,\t\t0.7019804845004446,\t\t0, 0, 0],\n\t\t[1191,\t\t2,\t\t0.004652379906159672,\t\t0.23261899530798363,\t\t0, 0, 0],\n\t\t[1192,\t\t3,\t\t0.0013658402687938922,\t\t0.06829201343969461,\t\t2.22, 61.69, 0.004502],\n\t\t[1193,\t\t3,\t\t0.00015278576957249078,\t\t0.007639288478624539,\t\t2.22, 61.69, 0.004502],\n\t\t[1194,\t\t3,\t\t0.0005720688022791215,\t\t0.028603440113956075,\t\t2.22, 61.69, 0.004502],\n\t\t[1195,\t\t3,\t\t1.2882573563174789e-05,\t\t0.0006441286781587394,\t\t2.22, 61.69, 0.004502],\n\t\t[1196,\t\t2,\t\t0.009842783066129698,\t\t0.4921391533064849,\t\t0, 0, 0],\n\t\t[1197,\t\t2,\t\t0.00575541689021183,\t\t0.2877708445105915,\t\t0, 0, 0],\n\t\t[1198,\t\t3,\t\t0.002534966273924786,\t\t0.12674831369623932,\t\t2.22, 61.69, 0.004502],\n\t\t[1201,\t\t3,\t\t0.0016021597716395785,\t\t0.08010798858197893,\t\t2.22, 61.69, 0.004502],\n\t\t[1202,\t\t3,\t\t0.0031762475555186724,\t\t0.15881237777593363,\t\t2.22, 61.69, 0.004502],\n\t\t[1203,\t\t2,\t\t0.011626157559117188,\t\t0.5813078779558594,\t\t0, 0, 0],\n\t\t[1204,\t\t3,\t\t0.0030266063343556363,\t\t0.15133031671778183,\t\t2.22, 61.69, 0.004502],\n\t\t[1205,\t\t3,\t\t3.4940417699210975e-05,\t\t0.0017470208849605492,\t\t2.22, 61.69, 0.004502],\n\t\t[1206,\t\t3,\t\t0.00024235441128435216,\t\t0.012117720564217609,\t\t2.22, 61.69, 0.004502],\n\t\t[1207,\t\t3,\t\t0.00022762038155293296,\t\t0.011381019077646649,\t\t2.22, 61.69, 0.004502],\n\t\t[1208,\t\t3,\t\t0.0001427321512302434,\t\t0.007136607561512171,\t\t2.22, 61.69, 0.004502],\n\t\t[1209,\t\t3,\t\t4.75873361221428e-05,\t\t0.00237936680610714,\t\t2.22, 61.69, 0.004502],\n\t\t[1210,\t\t3,\t\t0.0005454262850371943,\t\t0.027271314251859715,\t\t2.22, 61.69, 0.004502],\n\t\t[1211,\t\t3,\t\t0.0011462484513341364,\t\t0.057312422566706815,\t\t2.22, 61.69, 0.004502],\n\t\t[1212,\t\t2,\t\t0.005804182676892941,\t\t0.290209133844647,\t\t0, 0, 0],\n\t\t[1213,\t\t2,\t\t0.0036505499187602444,\t\t0.18252749593801224,\t\t0, 0, 0],\n\t\t[1214,\t\t3,\t\t0.0002868549194435664,\t\t0.014342745972178321,\t\t2.22, 61.69, 0.004502],\n\t\t[1215,\t\t3,\t\t0.00014342822681200328,\t\t0.0071714113406001635,\t\t2.22, 61.69, 0.004502],\n\t\t[1216,\t\t2,\t\t0.00431338348440427,\t\t0.21566917422021353,\t\t0, 0, 0],\n\t\t[1217,\t\t3,\t\t0.0022836580531031417,\t\t0.11418290265515707,\t\t2.22, 61.69, 0.004502],\n\t\t[1218,\t\t3,\t\t6.241945072080783e-05,\t\t0.003120972536040392,\t\t2.22, 61.69, 0.004502],\n\t\t[1219,\t\t3,\t\t0.0007855588922898729,\t\t0.03927794461449365,\t\t2.22, 61.69, 0.004502],\n\t\t[1220,\t\t3,\t\t0.001947919590347708,\t\t0.0973959795173854,\t\t2.22, 61.69, 0.004502],\n\t\t[1221,\t\t2,\t\t0.0377662225422596,\t\t1.88831112711298,\t\t0, 0, 0],\n\t\t[1222,\t\t2,\t\t0.013436354905899806,\t\t0.6718177452949904,\t\t0, 0, 0],\n\t\t[1223,\t\t3,\t\t0.00024230393037435297,\t\t0.01211519651871765,\t\t2.22, 61.69, 0.004502],\n\t\t[1224,\t\t2,\t\t0.010219261097938644,\t\t0.5109630548969322,\t\t0, 0, 0],\n\t\t[1225,\t\t3,\t\t0.0022238071565315737,\t\t0.1111903578265787,\t\t2.22, 61.69, 0.004502],\n\t\t[1226,\t\t3,\t\t0.0002535566380389208,\t\t0.012677831901946041,\t\t2.22, 61.69, 0.004502],\n\t\t[1227,\t\t3,\t\t0.0011129900410750567,\t\t0.05564950205375283,\t\t2.22, 61.69, 0.004502],\n\t\t[1228,\t\t3,\t\t0.00019234621639044032,\t\t0.009617310819522017,\t\t2.22, 61.69, 0.004502],\n\t\t[1229,\t\t2,\t\t0.00326230849376,\t\t0.16311542468800003,\t\t0, 0, 0],\n\t\t[1230,\t\t3,\t\t5.774224065377648e-05,\t\t0.0028871120326888237,\t\t2.22, 61.69, 0.004502],\n\t\t[1231,\t\t3,\t\t0.0021361636602669084,\t\t0.10680818301334541,\t\t2.22, 61.69, 0.004502],\n\t\t[1232,\t\t2,\t\t0.004779428513216963,\t\t0.23897142566084817,\t\t0, 0, 0],\n\t\t[1235,\t\t3,\t\t0.00028910830796175294,\t\t0.014455415398087644,\t\t2.22, 61.69, 0.004502],\n\t\t[1236,\t\t2,\t\t0.002535004450133525,\t\t0.12675022250667625,\t\t0, 0, 0],\n\t\t[1237,\t\t3,\t\t0.0009298092078685558,\t\t0.04649046039342779,\t\t2.22, 61.69, 0.004502],\n\t\t[1238,\t\t2,\t\t0.012012445276594919,\t\t0.600622263829746,\t\t0, 0, 0],\n\t\t[1239,\t\t3,\t\t5.75756369436291e-05,\t\t0.0028787818471814556,\t\t2.22, 61.69, 0.004502],\n\t\t[1240,\t\t2,\t\t0.021613910382114798,\t\t1.08069551910574,\t\t0, 0, 0],\n\t\t[1241,\t\t2,\t\t0.024532881090784327,\t\t1.2266440545392163,\t\t0, 0, 0],\n\t\t[1242,\t\t3,\t\t0.0017235867616422773,\t\t0.08617933808211387,\t\t2.22, 61.69, 0.004502],\n\t\t[1243,\t\t2,\t\t0.005289026999236673,\t\t0.26445134996183367,\t\t0, 0, 0],\n\t\t[1244,\t\t2,\t\t0.00846072422785893,\t\t0.4230362113929465,\t\t0, 0, 0],\n\t\t[1245,\t\t3,\t\t0.0005144458090049472,\t\t0.025722290450247362,\t\t2.22, 61.69, 0.004502],\n\t\t[1246,\t\t2,\t\t0.00337806806675036,\t\t0.16890340333751802,\t\t0, 0, 0],\n\t\t[1247,\t\t3,\t\t0.0013899571448864774,\t\t0.06949785724432388,\t\t2.22, 61.69, 0.004502],\n\t\t[1248,\t\t2,\t\t0.005854245631350222,\t\t0.2927122815675111,\t\t0, 0, 0],\n\t\t[1249,\t\t2,\t\t0.004846915908139961,\t\t0.24234579540699805,\t\t0, 0, 0],\n\t\t[1250,\t\t3,\t\t0.0019627317861894665,\t\t0.09813658930947333,\t\t2.22, 61.69, 0.004502],\n\t\t[1251,\t\t3,\t\t0.0014899668826355728,\t\t0.07449834413177864,\t\t2.22, 61.69, 0.004502],\n\t\t[1252,\t\t3,\t\t0.0009477821555247328,\t\t0.047389107776236644,\t\t2.22, 61.69, 0.004502],\n\t\t[1253,\t\t2,\t\t0.004106369053307717,\t\t0.20531845266538587,\t\t0, 0, 0],\n\t\t[1254,\t\t2,\t\t0.005081603543623868,\t\t0.2540801771811934,\t\t0, 0, 0],\n\t\t[1255,\t\t3,\t\t0.0002430881191708174,\t\t0.01215440595854087,\t\t2.22, 61.69, 0.004502],\n\t\t[1256,\t\t3,\t\t0.0009607764830526361,\t\t0.048038824152631804,\t\t2.22, 61.69, 0.004502],\n\t\t[1257,\t\t2,\t\t0.005662916214121937,\t\t0.28314581070609685,\t\t0, 0, 0],\n\t\t[1258,\t\t2,\t\t0.010814994241697335,\t\t0.5407497120848668,\t\t0, 0, 0],\n\t\t[1259,\t\t2,\t\t0.00695753592752513,\t\t0.34787679637625657,\t\t0, 0, 0],\n\t\t[1260,\t\t3,\t\t0.0012839803779623614,\t\t0.06419901889811806,\t\t2.22, 61.69, 0.004502],\n\t\t[1261,\t\t2,\t\t0.012840592447306919,\t\t0.6420296223653459,\t\t0, 0, 0],\n\t\t[1262,\t\t3,\t\t3.3365758929065435e-05,\t\t0.0016682879464532717,\t\t2.22, 61.69, 0.004502],\n\t\t[1263,\t\t3,\t\t2.243579925674327e-05,\t\t0.0011217899628371635,\t\t2.22, 61.69, 0.004502],\n\t\t[1264,\t\t2,\t\t0.005222533303161435,\t\t0.2611266651580718,\t\t0, 0, 0],\n\t\t[1265,\t\t3,\t\t0.0004236530619172327,\t\t0.021182653095861634,\t\t2.22, 61.69, 0.004502],\n\t\t[1266,\t\t2,\t\t0.007621029313600565,\t\t0.38105146568002835,\t\t0, 0, 0],\n\t\t[1267,\t\t3,\t\t0.002512674942558201,\t\t0.12563374712791006,\t\t2.22, 61.69, 0.004502],\n\t\t[1268,\t\t3,\t\t0.0002183287451274897,\t\t0.010916437256374485,\t\t2.22, 61.69, 0.004502],\n\t\t[1269,\t\t3,\t\t0.0003250471975980552,\t\t0.01625235987990276,\t\t2.22, 61.69, 0.004502],\n\t\t[1270,\t\t3,\t\t0.0024796665722395645,\t\t0.12398332861197821,\t\t2.22, 61.69, 0.004502],\n\t\t[1271,\t\t3,\t\t0.0030157819134425234,\t\t0.15078909567212617,\t\t2.22, 61.69, 0.004502],\n\t\t[1272,\t\t3,\t\t7.840992648188318e-05,\t\t0.003920496324094159,\t\t2.22, 61.69, 0.004502],\n\t\t[1273,\t\t3,\t\t0.00013809561181086458,\t\t0.006904780590543229,\t\t2.22, 61.69, 0.004502],\n\t\t[1274,\t\t2,\t\t0.0033801727100761705,\t\t0.1690086355038085,\t\t0, 0, 0],\n\t\t[1275,\t\t2,\t\t0.006307329492962109,\t\t0.3153664746481055,\t\t0, 0, 0],\n\t\t[1276,\t\t3,\t\t0.001633288835647369,\t\t0.08166444178236844,\t\t2.22, 61.69, 0.004502],\n\t\t[1277,\t\t2,\t\t0.004176942042758357,\t\t0.20884710213791788,\t\t0, 0, 0],\n\t\t[1278,\t\t2,\t\t0.010850406134369231,\t\t0.5425203067184615,\t\t0, 0, 0],\n\t\t[1279,\t\t3,\t\t1.1547461499241629e-07,\t\t5.773730749620814e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1280,\t\t3,\t\t2.2052402508424647e-05,\t\t0.0011026201254212323,\t\t2.22, 61.69, 0.004502],\n\t\t[1281,\t\t3,\t\t0.0001599481510691144,\t\t0.007997407553455719,\t\t2.22, 61.69, 0.004502],\n\t\t[1282,\t\t3,\t\t0.00015112854883249187,\t\t0.007556427441624595,\t\t2.22, 61.69, 0.004502],\n\t\t[1283,\t\t2,\t\t0.04214075813046536,\t\t2.1070379065232685,\t\t0, 0, 0],\n\t\t[1284,\t\t3,\t\t0.0018096758437742202,\t\t0.09048379218871101,\t\t2.22, 61.69, 0.004502],\n\t\t[1285,\t\t3,\t\t0.0001531107626377273,\t\t0.0076555381318863655,\t\t2.22, 61.69, 0.004502],\n\t\t[1286,\t\t3,\t\t0.0011377796471657795,\t\t0.05688898235828898,\t\t2.22, 61.69, 0.004502],\n\t\t[1287,\t\t2,\t\t0.005933272587501368,\t\t0.29666362937506835,\t\t0, 0, 0],\n\t\t[1288,\t\t2,\t\t0.00944760882155904,\t\t0.472380441077952,\t\t0, 0, 0],\n\t\t[1289,\t\t2,\t\t0.011723304434111076,\t\t0.5861652217055537,\t\t0, 0, 0],\n\t\t[1290,\t\t3,\t\t0.0003120693634598793,\t\t0.015603468172993969,\t\t2.22, 61.69, 0.004502],\n\t\t[1291,\t\t2,\t\t0.0062575490505418305,\t\t0.31287745252709154,\t\t0, 0, 0],\n\t\t[1292,\t\t3,\t\t0.002653563231501149,\t\t0.13267816157505744,\t\t2.22, 61.69, 0.004502],\n\t\t[1293,\t\t3,\t\t0.00015292290721046804,\t\t0.007646145360523402,\t\t2.22, 61.69, 0.004502],\n\t\t[1294,\t\t3,\t\t0.0003436110439431119,\t\t0.017180552197155596,\t\t2.22, 61.69, 0.004502],\n\t\t[1295,\t\t3,\t\t0.00037392918854889465,\t\t0.01869645942744473,\t\t2.22, 61.69, 0.004502],\n\t\t[1296,\t\t3,\t\t0.0017284338192132009,\t\t0.08642169096066006,\t\t2.22, 61.69, 0.004502],\n\t\t[1297,\t\t2,\t\t0.011317746197608284,\t\t0.5658873098804141,\t\t0, 0, 0],\n\t\t[1298,\t\t3,\t\t0.00020595303360804683,\t\t0.010297651680402344,\t\t2.22, 61.69, 0.004502],\n\t\t[1299,\t\t3,\t\t8.9869986756113e-05,\t\t0.00449349933780565,\t\t2.22, 61.69, 0.004502],\n\t\t[1300,\t\t3,\t\t0.001511593201166196,\t\t0.07557966005830981,\t\t2.22, 61.69, 0.004502],\n\t\t[1301,\t\t2,\t\t0.0038746782543149596,\t\t0.193733912715748,\t\t0, 0, 0],\n\t\t[1302,\t\t3,\t\t0.0003104985267932093,\t\t0.015524926339660468,\t\t2.22, 61.69, 0.004502],\n\t\t[1303,\t\t3,\t\t0.00027600750632746427,\t\t0.013800375316373212,\t\t2.22, 61.69, 0.004502],\n\t\t[1304,\t\t3,\t\t0.000610793340517708,\t\t0.030539667025885397,\t\t2.22, 61.69, 0.004502],\n\t\t[1305,\t\t3,\t\t2.9075695387122924e-07,\t\t1.4537847693561463e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1306,\t\t3,\t\t0.00011631130798083146,\t\t0.005815565399041573,\t\t2.22, 61.69, 0.004502],\n\t\t[1307,\t\t3,\t\t1.9031130574577255e-05,\t\t0.0009515565287288628,\t\t2.22, 61.69, 0.004502],\n\t\t[1308,\t\t3,\t\t0.00020870441847665842,\t\t0.010435220923832922,\t\t2.22, 61.69, 0.004502],\n\t\t[1309,\t\t3,\t\t0.0002132096944766602,\t\t0.01066048472383301,\t\t2.22, 61.69, 0.004502],\n\t\t[1310,\t\t3,\t\t0.00010478060392325507,\t\t0.005239030196162754,\t\t2.22, 61.69, 0.004502],\n\t\t[1311,\t\t3,\t\t0.0007546493032032542,\t\t0.037732465160162716,\t\t2.22, 61.69, 0.004502],\n\t\t[1312,\t\t2,\t\t0.0070428013304282035,\t\t0.3521400665214102,\t\t0, 0, 0],\n\t\t[1313,\t\t3,\t\t0.0019631283227609974,\t\t0.09815641613804986,\t\t2.22, 61.69, 0.004502],\n\t\t[1314,\t\t3,\t\t0.0007641975650906521,\t\t0.038209878254532606,\t\t2.22, 61.69, 0.004502],\n\t\t[1315,\t\t3,\t\t0.0005015944131679134,\t\t0.02507972065839567,\t\t2.22, 61.69, 0.004502],\n\t\t[1316,\t\t3,\t\t0.000145780634856578,\t\t0.007289031742828901,\t\t2.22, 61.69, 0.004502],\n\t\t[1317,\t\t3,\t\t0.0015252502049763412,\t\t0.07626251024881707,\t\t2.22, 61.69, 0.004502],\n\t\t[1318,\t\t3,\t\t0.00012454395408676328,\t\t0.0062271977043381645,\t\t2.22, 61.69, 0.004502],\n\t\t[1319,\t\t3,\t\t0.001127343871228203,\t\t0.05636719356141015,\t\t2.22, 61.69, 0.004502],\n\t\t[1320,\t\t3,\t\t0.0013215329138219017,\t\t0.06607664569109509,\t\t2.22, 61.69, 0.004502],\n\t\t[1321,\t\t3,\t\t1.025741798764967e-05,\t\t0.0005128708993824835,\t\t2.22, 61.69, 0.004502],\n\t\t[1322,\t\t3,\t\t5.919056262068799e-05,\t\t0.0029595281310344,\t\t2.22, 61.69, 0.004502],\n\t\t[1323,\t\t2,\t\t0.012675857799799822,\t\t0.6337928899899912,\t\t0, 0, 0],\n\t\t[1324,\t\t3,\t\t0.0008316328586631403,\t\t0.04158164293315702,\t\t2.22, 61.69, 0.004502],\n\t\t[1325,\t\t2,\t\t0.0057612535388438385,\t\t0.2880626769421919,\t\t0, 0, 0],\n\t\t[1326,\t\t2,\t\t0.0036242041289439157,\t\t0.1812102064471958,\t\t0, 0, 0],\n\t\t[1327,\t\t2,\t\t0.0032338308031027566,\t\t0.16169154015513784,\t\t0, 0, 0],\n\t\t[1328,\t\t3,\t\t0.0010226241895011407,\t\t0.05113120947505704,\t\t2.22, 61.69, 0.004502],\n\t\t[1329,\t\t2,\t\t0.013921309839652627,\t\t0.6960654919826315,\t\t0, 0, 0],\n\t\t[1330,\t\t3,\t\t0.0019182008434651947,\t\t0.09591004217325974,\t\t2.22, 61.69, 0.004502],\n\t\t[1331,\t\t3,\t\t1.841349064624893e-05,\t\t0.0009206745323124464,\t\t2.22, 61.69, 0.004502],\n\t\t[1332,\t\t3,\t\t0.0016738699394560756,\t\t0.08369349697280379,\t\t2.22, 61.69, 0.004502],\n\t\t[1333,\t\t3,\t\t0.0029061854047842247,\t\t0.14530927023921122,\t\t2.22, 61.69, 0.004502],\n\t\t[1334,\t\t3,\t\t5.761014482450118e-05,\t\t0.0028805072412250595,\t\t2.22, 61.69, 0.004502],\n\t\t[1335,\t\t3,\t\t0.00021052629514022267,\t\t0.010526314757011134,\t\t2.22, 61.69, 0.004502],\n\t\t[1336,\t\t3,\t\t0.0018954102795459078,\t\t0.0947705139772954,\t\t2.22, 61.69, 0.004502],\n\t\t[1337,\t\t2,\t\t0.003303921795797683,\t\t0.16519608978988415,\t\t0, 0, 0],\n\t\t[1338,\t\t3,\t\t5.300015004820578e-05,\t\t0.0026500075024102894,\t\t2.22, 61.69, 0.004502],\n\t\t[1339,\t\t3,\t\t0.0006421253879349708,\t\t0.032106269396748544,\t\t2.22, 61.69, 0.004502],\n\t\t[1340,\t\t2,\t\t0.0019890355643717287,\t\t0.09945177821858646,\t\t0, 0, 0],\n\t\t[1341,\t\t2,\t\t0.005924529413907861,\t\t0.2962264706953931,\t\t0, 0, 0],\n\t\t[1342,\t\t3,\t\t2.7387437160360416e-05,\t\t0.0013693718580180209,\t\t2.22, 61.69, 0.004502],\n\t\t[1343,\t\t3,\t\t3.943679326899658e-05,\t\t0.001971839663449829,\t\t2.22, 61.69, 0.004502],\n\t\t[1344,\t\t3,\t\t1.4391232894862565e-05,\t\t0.0007195616447431282,\t\t2.22, 61.69, 0.004502],\n\t\t[1345,\t\t3,\t\t0.00025281368060892654,\t\t0.012640684030446329,\t\t2.22, 61.69, 0.004502],\n\t\t[1346,\t\t2,\t\t0.013669449762218379,\t\t0.6834724881109189,\t\t0, 0, 0],\n\t\t[1347,\t\t2,\t\t0.01477118570778878,\t\t0.7385592853894392,\t\t0, 0, 0],\n\t\t[1348,\t\t3,\t\t0.000584562357708931,\t\t0.02922811788544655,\t\t2.22, 61.69, 0.004502],\n\t\t[1349,\t\t3,\t\t0.0012037349571321803,\t\t0.06018674785660902,\t\t2.22, 61.69, 0.004502],\n\t\t[1350,\t\t3,\t\t6.046050411995944e-06,\t\t0.0003023025205997972,\t\t2.22, 61.69, 0.004502],\n\t\t[1351,\t\t3,\t\t4.796502941013963e-07,\t\t2.3982514705069816e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1352,\t\t3,\t\t2.760384018212869e-05,\t\t0.0013801920091064345,\t\t2.22, 61.69, 0.004502],\n\t\t[1354,\t\t3,\t\t4.276029671133181e-06,\t\t0.00021380148355665902,\t\t2.22, 61.69, 0.004502],\n\t\t[1355,\t\t3,\t\t0.0001074820707981226,\t\t0.005374103539906131,\t\t2.22, 61.69, 0.004502],\n\t\t[1356,\t\t2,\t\t0.004678278776831856,\t\t0.23391393884159278,\t\t0, 0, 0],\n\t\t[1357,\t\t2,\t\t0.003594349677217709,\t\t0.17971748386088549,\t\t0, 0, 0],\n\t\t[1358,\t\t3,\t\t1.57431431082847e-05,\t\t0.0007871571554142351,\t\t2.22, 61.69, 0.004502],\n\t\t[1359,\t\t2,\t\t0.004496673943395517,\t\t0.22483369716977586,\t\t0, 0, 0],\n\t\t[1360,\t\t3,\t\t0.0010909105792324338,\t\t0.054545528961621695,\t\t2.22, 61.69, 0.004502],\n\t\t[1361,\t\t2,\t\t0.0040238936307783425,\t\t0.20119468153891715,\t\t0, 0, 0],\n\t\t[1362,\t\t2,\t\t0.005036121783141224,\t\t0.2518060891570612,\t\t0, 0, 0],\n\t\t[1363,\t\t3,\t\t2.301886324440155e-06,\t\t0.00011509431622200775,\t\t2.22, 61.69, 0.004502],\n\t\t[1364,\t\t3,\t\t3.887723536233725e-06,\t\t0.00019438617681168623,\t\t2.22, 61.69, 0.004502],\n\t\t[1365,\t\t3,\t\t2.8999446623259055e-08,\t\t1.449972331162953e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1366,\t\t3,\t\t7.830373844390861e-05,\t\t0.003915186922195431,\t\t2.22, 61.69, 0.004502],\n\t\t[1367,\t\t3,\t\t0.0027924620350495274,\t\t0.13962310175247636,\t\t2.22, 61.69, 0.004502],\n\t\t[1368,\t\t3,\t\t0.00017611255606875446,\t\t0.008805627803437724,\t\t2.22, 61.69, 0.004502],\n\t\t[1369,\t\t3,\t\t0.0005073133310147165,\t\t0.025365666550735824,\t\t2.22, 61.69, 0.004502],\n\t\t[1370,\t\t3,\t\t2.185563890765493e-05,\t\t0.0010927819453827466,\t\t2.22, 61.69, 0.004502],\n\t\t[1371,\t\t2,\t\t0.0024031239337826537,\t\t0.12015619668913267,\t\t0, 0, 0],\n\t\t[1372,\t\t2,\t\t0.012284634505654547,\t\t0.6142317252827274,\t\t0, 0, 0],\n\t\t[1373,\t\t3,\t\t0.0022409179594482334,\t\t0.11204589797241167,\t\t2.22, 61.69, 0.004502],\n\t\t[1376,\t\t2,\t\t0.011218109707548912,\t\t0.5609054853774457,\t\t0, 0, 0],\n\t\t[1377,\t\t2,\t\t0.01492085689824784,\t\t0.7460428449123921,\t\t0, 0, 0],\n\t\t[1378,\t\t2,\t\t0.01566275025445262,\t\t0.783137512722631,\t\t0, 0, 0],\n\t\t[1379,\t\t3,\t\t5.1310566028095876e-05,\t\t0.002565528301404794,\t\t2.22, 61.69, 0.004502],\n\t\t[1380,\t\t3,\t\t7.724465320438908e-05,\t\t0.003862232660219454,\t\t2.22, 61.69, 0.004502],\n\t\t[1381,\t\t3,\t\t6.446222679588771e-05,\t\t0.003223111339794386,\t\t2.22, 61.69, 0.004502],\n\t\t[1382,\t\t2,\t\t0.008838822964419164,\t\t0.4419411482209583,\t\t0, 0, 0],\n\t\t[1383,\t\t2,\t\t0.006991449967869686,\t\t0.34957249839348425,\t\t0, 0, 0],\n\t\t[1384,\t\t3,\t\t0.0002972463393517766,\t\t0.014862316967588829,\t\t2.22, 61.69, 0.004502],\n\t\t[1385,\t\t3,\t\t7.92302201959824e-06,\t\t0.0003961511009799121,\t\t2.22, 61.69, 0.004502],\n\t\t[1386,\t\t3,\t\t4.2899112828393286e-05,\t\t0.002144955641419664,\t\t2.22, 61.69, 0.004502],\n\t\t[1387,\t\t3,\t\t0.00022240699424911273,\t\t0.011120349712455638,\t\t2.22, 61.69, 0.004502],\n\t\t[1388,\t\t3,\t\t5.909025672850305e-05,\t\t0.0029545128364251525,\t\t2.22, 61.69, 0.004502],\n\t\t[1389,\t\t3,\t\t1.3594135764164036e-05,\t\t0.0006797067882082019,\t\t2.22, 61.69, 0.004502],\n\t\t[1390,\t\t3,\t\t0.00023763846235409512,\t\t0.011881923117704758,\t\t2.22, 61.69, 0.004502],\n\t\t[1391,\t\t3,\t\t3.321367742134543e-05,\t\t0.0016606838710672715,\t\t2.22, 61.69, 0.004502],\n\t\t[1392,\t\t3,\t\t0.0012290826914265437,\t\t0.06145413457132718,\t\t2.22, 61.69, 0.004502],\n\t\t[1393,\t\t3,\t\t8.763130962106806e-05,\t\t0.004381565481053403,\t\t2.22, 61.69, 0.004502],\n\t\t[1394,\t\t3,\t\t6.862035771367977e-05,\t\t0.003431017885683988,\t\t2.22, 61.69, 0.004502],\n\t\t[1395,\t\t3,\t\t4.696755105006889e-06,\t\t0.00023483775525034447,\t\t2.22, 61.69, 0.004502],\n\t\t[1396,\t\t3,\t\t1.6623117797696163e-06,\t\t8.311558898848081e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1397,\t\t3,\t\t0.0015969317375463513,\t\t0.07984658687731756,\t\t2.22, 61.69, 0.004502],\n\t\t[1398,\t\t3,\t\t0.00017695743260373348,\t\t0.008847871630186674,\t\t2.22, 61.69, 0.004502],\n\t\t[1399,\t\t3,\t\t0.0011375222056992432,\t\t0.05687611028496216,\t\t2.22, 61.69, 0.004502],\n\t\t[1400,\t\t3,\t\t8.258214886247176e-05,\t\t0.004129107443123589,\t\t2.22, 61.69, 0.004502],\n\t\t[1401,\t\t2,\t\t0.005687529053514607,\t\t0.28437645267573036,\t\t0, 0, 0],\n\t\t[1402,\t\t3,\t\t0.001676149990745289,\t\t0.08380749953726446,\t\t2.22, 61.69, 0.004502],\n\t\t[1403,\t\t2,\t\t0.007617262031172502,\t\t0.38086310155862513,\t\t0, 0, 0],\n\t\t[1404,\t\t2,\t\t0.0067734988181819555,\t\t0.33867494090909783,\t\t0, 0, 0],\n\t\t[1405,\t\t3,\t\t0.0018812625008740895,\t\t0.09406312504370447,\t\t2.22, 61.69, 0.004502],\n\t\t[1406,\t\t3,\t\t0.0006852566793279422,\t\t0.03426283396639711,\t\t2.22, 61.69, 0.004502],\n\t\t[1407,\t\t3,\t\t1.3471796788943673e-05,\t\t0.0006735898394471837,\t\t2.22, 61.69, 0.004502],\n\t\t[1408,\t\t3,\t\t0.002615151153581973,\t\t0.13075755767909866,\t\t2.22, 61.69, 0.004502],\n\t\t[1409,\t\t3,\t\t0.0007652033584917757,\t\t0.038260167924588785,\t\t2.22, 61.69, 0.004502],\n\t\t[1410,\t\t3,\t\t0.002385192626051519,\t\t0.11925963130257596,\t\t2.22, 61.69, 0.004502],\n\t\t[1411,\t\t3,\t\t0.0025079869254713357,\t\t0.1253993462735668,\t\t2.22, 61.69, 0.004502],\n\t\t[1412,\t\t3,\t\t0.00034193149839380297,\t\t0.01709657491969015,\t\t2.22, 61.69, 0.004502],\n\t\t[1413,\t\t3,\t\t0.0003039144901162519,\t\t0.015195724505812597,\t\t2.22, 61.69, 0.004502],\n\t\t[1414,\t\t3,\t\t0.001654733253695335,\t\t0.08273666268476676,\t\t2.22, 61.69, 0.004502],\n\t\t[1415,\t\t3,\t\t0.0004362516227410405,\t\t0.021812581137052027,\t\t2.22, 61.69, 0.004502],\n\t\t[1416,\t\t3,\t\t0.0004029092265882156,\t\t0.020145461329410783,\t\t2.22, 61.69, 0.004502],\n\t\t[1417,\t\t3,\t\t6.808952303623334e-08,\t\t3.404476151811667e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1418,\t\t2,\t\t0.005619099755523237,\t\t0.28095498777616185,\t\t0, 0, 0],\n\t\t[1419,\t\t3,\t\t0.00211745485704481,\t\t0.10587274285224049,\t\t2.22, 61.69, 0.004502],\n\t\t[1420,\t\t3,\t\t8.91112970779674e-05,\t\t0.00445556485389837,\t\t2.22, 61.69, 0.004502],\n\t\t[1421,\t\t3,\t\t0.00044387476697737416,\t\t0.02219373834886871,\t\t2.22, 61.69, 0.004502],\n\t\t[1422,\t\t3,\t\t0.00030115264331514286,\t\t0.015057632165757144,\t\t2.22, 61.69, 0.004502],\n\t\t[1423,\t\t3,\t\t0.00012293234040278847,\t\t0.006146617020139425,\t\t2.22, 61.69, 0.004502],\n\t\t[1424,\t\t2,\t\t0.00641540397482647,\t\t0.3207701987413235,\t\t0, 0, 0],\n\t\t[1425,\t\t3,\t\t0.001350721738292593,\t\t0.06753608691462964,\t\t2.22, 61.69, 0.004502],\n\t\t[1426,\t\t2,\t\t0.004377563184547638,\t\t0.2188781592273819,\t\t0, 0, 0],\n\t\t[1427,\t\t2,\t\t0.03060222784928668,\t\t1.5301113924643341,\t\t0, 0, 0],\n\t\t[1428,\t\t2,\t\t0.021319488529000553,\t\t1.0659744264500277,\t\t0, 0, 0],\n\t\t[1429,\t\t3,\t\t0.000658318690093667,\t\t0.03291593450468335,\t\t2.22, 61.69, 0.004502],\n\t\t[1430,\t\t3,\t\t9.820641622425884e-07,\t\t4.9103208112129425e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1431,\t\t2,\t\t0.014493414492796078,\t\t0.724670724639804,\t\t0, 0, 0],\n\t\t[1432,\t\t3,\t\t0.0003716433863367817,\t\t0.01858216931683909,\t\t2.22, 61.69, 0.004502],\n\t\t[1433,\t\t2,\t\t0.036688879163843384,\t\t1.8344439581921694,\t\t0, 0, 0],\n\t\t[1434,\t\t2,\t\t0.0026062503484175956,\t\t0.13031251742087976,\t\t0, 0, 0],\n\t\t[1435,\t\t2,\t\t0.002539145570389532,\t\t0.1269572785194766,\t\t0, 0, 0],\n\t\t[1436,\t\t2,\t\t0.002591208267120717,\t\t0.12956041335603585,\t\t0, 0, 0],\n\t\t[1437,\t\t2,\t\t0.015172047044780135,\t\t0.7586023522390068,\t\t0, 0, 0],\n\t\t[1438,\t\t2,\t\t0.025007389641183632,\t\t1.2503694820591817,\t\t0, 0, 0],\n\t\t[1439,\t\t2,\t\t0.0063091033600462575,\t\t0.3154551680023129,\t\t0, 0, 0],\n\t\t[1440,\t\t3,\t\t5.306917668409132e-05,\t\t0.0026534588342045657,\t\t2.22, 61.69, 0.004502],\n\t\t[1441,\t\t3,\t\t1.0923020560921105e-05,\t\t0.0005461510280460552,\t\t2.22, 61.69, 0.004502],\n\t\t[1442,\t\t3,\t\t4.555157486056611e-05,\t\t0.0022775787430283057,\t\t2.22, 61.69, 0.004502],\n\t\t[1443,\t\t2,\t\t0.0026111964035441713,\t\t0.13055982017720855,\t\t0, 0, 0],\n\t\t[1444,\t\t3,\t\t0.0005717925297728792,\t\t0.028589626488643962,\t\t2.22, 61.69, 0.004502],\n\t\t[1445,\t\t3,\t\t0.0015938921576921367,\t\t0.07969460788460683,\t\t2.22, 61.69, 0.004502],\n\t\t[1446,\t\t2,\t\t0.04829066125331256,\t\t2.414533062665628,\t\t0, 0, 0],\n\t\t[1447,\t\t2,\t\t0.005696308888305882,\t\t0.2848154444152941,\t\t0, 0, 0],\n\t\t[1448,\t\t3,\t\t0.00047896583949883246,\t\t0.023948291974941624,\t\t2.22, 61.69, 0.004502],\n\t\t[1449,\t\t2,\t\t0.006075750962706547,\t\t0.3037875481353274,\t\t0, 0, 0],\n\t\t[1450,\t\t2,\t\t0.0037724056227270084,\t\t0.18862028113635043,\t\t0, 0, 0],\n\t\t[1451,\t\t2,\t\t0.0043416728967246255,\t\t0.21708364483623127,\t\t0, 0, 0],\n\t\t[1452,\t\t3,\t\t0.0015322750739690742,\t\t0.0766137536984537,\t\t2.22, 61.69, 0.004502],\n\t\t[1453,\t\t2,\t\t0.004134065549943135,\t\t0.20670327749715672,\t\t0, 0, 0],\n\t\t[1454,\t\t2,\t\t0.009875666531734596,\t\t0.49378332658672985,\t\t0, 0, 0],\n\t\t[1455,\t\t3,\t\t4.166284213856912e-05,\t\t0.0020831421069284557,\t\t2.22, 61.69, 0.004502],\n\t\t[1456,\t\t2,\t\t0.0031865889687578697,\t\t0.15932944843789354,\t\t0, 0, 0],\n\t\t[1457,\t\t3,\t\t0.00012749408723576006,\t\t0.006374704361788003,\t\t2.22, 61.69, 0.004502],\n\t\t[1458,\t\t3,\t\t1.5673534819523866e-05,\t\t0.0007836767409761935,\t\t2.22, 61.69, 0.004502],\n\t\t[1459,\t\t3,\t\t0.00033798517072819835,\t\t0.01689925853640992,\t\t2.22, 61.69, 0.004502],\n\t\t[1460,\t\t2,\t\t0.006461593448980158,\t\t0.3230796724490079,\t\t0, 0, 0],\n\t\t[1461,\t\t3,\t\t0.001142843079861875,\t\t0.05714215399309376,\t\t2.22, 61.69, 0.004502],\n\t\t[1462,\t\t3,\t\t0.00015295973435731913,\t\t0.007647986717865956,\t\t2.22, 61.69, 0.004502],\n\t\t[1463,\t\t3,\t\t4.5276834778775515e-05,\t\t0.002263841738938776,\t\t2.22, 61.69, 0.004502],\n\t\t[1464,\t\t2,\t\t0.013934601684842136,\t\t0.6967300842421068,\t\t0, 0, 0],\n\t\t[1465,\t\t3,\t\t0.0003374045759652472,\t\t0.01687022879826236,\t\t2.22, 61.69, 0.004502],\n\t\t[1466,\t\t3,\t\t0.0003619193984034768,\t\t0.01809596992017384,\t\t2.22, 61.69, 0.004502],\n\t\t[1467,\t\t3,\t\t0.00013344536897072216,\t\t0.006672268448536108,\t\t2.22, 61.69, 0.004502],\n\t\t[1468,\t\t3,\t\t0.0015144656821575462,\t\t0.0757232841078773,\t\t2.22, 61.69, 0.004502],\n\t\t[1469,\t\t2,\t\t0.004138503876498319,\t\t0.20692519382491598,\t\t0, 0, 0],\n\t\t[1470,\t\t2,\t\t0.0020014495173752657,\t\t0.10007247586876329,\t\t0, 0, 0],\n\t\t[1471,\t\t2,\t\t0.004038395628360613,\t\t0.20191978141803063,\t\t0, 0, 0],\n\t\t[1472,\t\t3,\t\t0.0007626820845032627,\t\t0.03813410422516314,\t\t2.22, 61.69, 0.004502],\n\t\t[1473,\t\t3,\t\t0.0005323801851315335,\t\t0.026619009256576683,\t\t2.22, 61.69, 0.004502],\n\t\t[1474,\t\t3,\t\t8.905977123682595e-05,\t\t0.004452988561841298,\t\t2.22, 61.69, 0.004502],\n\t\t[1475,\t\t3,\t\t2.4884191103347185e-05,\t\t0.0012442095551673594,\t\t2.22, 61.69, 0.004502],\n\t\t[1476,\t\t2,\t\t0.01216740582073879,\t\t0.6083702910369395,\t\t0, 0, 0],\n\t\t[1477,\t\t3,\t\t0.0007717725169969112,\t\t0.03858862584984556,\t\t2.22, 61.69, 0.004502],\n\t\t[1478,\t\t3,\t\t1.03629245449834e-06,\t\t5.181462272491701e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1479,\t\t3,\t\t0.00035603636123413484,\t\t0.01780181806170674,\t\t2.22, 61.69, 0.004502],\n\t\t[1480,\t\t3,\t\t0.0011893307912248102,\t\t0.05946653956124052,\t\t2.22, 61.69, 0.004502],\n\t\t[1481,\t\t3,\t\t3.3833873695351113e-06,\t\t0.00016916936847675558,\t\t2.22, 61.69, 0.004502],\n\t\t[1482,\t\t3,\t\t0.0011147740798471094,\t\t0.055738703992355476,\t\t2.22, 61.69, 0.004502],\n\t\t[1483,\t\t3,\t\t0.0002291607516312977,\t\t0.011458037581564884,\t\t2.22, 61.69, 0.004502],\n\t\t[1484,\t\t3,\t\t1.9041073525508303e-06,\t\t9.520536762754152e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1485,\t\t3,\t\t3.5876538426778735e-05,\t\t0.0017938269213389369,\t\t2.22, 61.69, 0.004502],\n\t\t[1486,\t\t3,\t\t0.00018457774197472868,\t\t0.009228887098736434,\t\t2.22, 61.69, 0.004502],\n\t\t[1487,\t\t3,\t\t7.276038526853737e-05,\t\t0.0036380192634268686,\t\t2.22, 61.69, 0.004502],\n\t\t[1488,\t\t3,\t\t0.0003000059684869966,\t\t0.01500029842434983,\t\t2.22, 61.69, 0.004502],\n\t\t[1489,\t\t3,\t\t7.571817467557017e-06,\t\t0.00037859087337785094,\t\t2.22, 61.69, 0.004502],\n\t\t[1490,\t\t2,\t\t0.020504288751418347,\t\t1.0252144375709173,\t\t0, 0, 0],\n\t\t[1491,\t\t2,\t\t0.005387257187745477,\t\t0.26936285938727383,\t\t0, 0, 0],\n\t\t[1492,\t\t2,\t\t0.014637639488319377,\t\t0.7318819744159688,\t\t0, 0, 0],\n\t\t[1493,\t\t2,\t\t0.005319414988695112,\t\t0.26597074943475557,\t\t0, 0, 0],\n\t\t[1494,\t\t2,\t\t0.0257504251653254,\t\t1.28752125826627,\t\t0, 0, 0],\n\t\t[1495,\t\t2,\t\t0.004260305180484296,\t\t0.2130152590242148,\t\t0, 0, 0],\n\t\t[1496,\t\t3,\t\t1.5185873075624022e-08,\t\t7.592936537812012e-07,\t\t2.22, 61.69, 0.004502],\n\t\t[1497,\t\t2,\t\t0.005670372667342641,\t\t0.28351863336713207,\t\t0, 0, 0],\n\t\t[1498,\t\t2,\t\t0.006735488235440387,\t\t0.3367744117720194,\t\t0, 0, 0],\n\t\t[1499,\t\t3,\t\t0.00014557430965896176,\t\t0.0072787154829480885,\t\t2.22, 61.69, 0.004502],\n\t\t[1500,\t\t3,\t\t9.85597782087346e-06,\t\t0.000492798891043673,\t\t2.22, 61.69, 0.004502],\n\t\t[1501,\t\t3,\t\t0.0005198212383651805,\t\t0.02599106191825903,\t\t2.22, 61.69, 0.004502],\n\t\t[1502,\t\t3,\t\t4.105448673151168e-05,\t\t0.002052724336575584,\t\t2.22, 61.69, 0.004502],\n\t\t[1503,\t\t3,\t\t0.0029266803181735935,\t\t0.14633401590867967,\t\t2.22, 61.69, 0.004502],\n\t\t[1504,\t\t2,\t\t0.012020835078490423,\t\t0.6010417539245212,\t\t0, 0, 0],\n\t\t[1505,\t\t3,\t\t0.0014407364034016888,\t\t0.07203682017008443,\t\t2.22, 61.69, 0.004502],\n\t\t[1506,\t\t2,\t\t0.0035909631390018642,\t\t0.17954815695009319,\t\t0, 0, 0],\n\t\t[1507,\t\t3,\t\t0.000982816273068341,\t\t0.04914081365341705,\t\t2.22, 61.69, 0.004502],\n\t\t[1508,\t\t3,\t\t4.154538017488063e-06,\t\t0.00020772690087440316,\t\t2.22, 61.69, 0.004502],\n\t\t[1509,\t\t3,\t\t1.37186634032331e-07,\t\t6.85933170161655e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1510,\t\t2,\t\t0.00681234986437375,\t\t0.34061749321868756,\t\t0, 0, 0],\n\t\t[1511,\t\t2,\t\t0.00988173435818505,\t\t0.4940867179092525,\t\t0, 0, 0],\n\t\t[1512,\t\t2,\t\t0.004082645917281524,\t\t0.20413229586407625,\t\t0, 0, 0],\n\t\t[1513,\t\t3,\t\t0.001467522271804366,\t\t0.07337611359021831,\t\t2.22, 61.69, 0.004502],\n\t\t[1514,\t\t3,\t\t1.3202056818036577e-06,\t\t6.601028409018288e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1515,\t\t3,\t\t1.7255068668904044e-07,\t\t8.627534334452021e-06,\t\t2.22, 61.69, 0.004502],\n\t\t[1516,\t\t3,\t\t1.8340973111507537e-06,\t\t9.170486555753769e-05,\t\t2.22, 61.69, 0.004502],\n\t\t[1517,\t\t3,\t\t8.192048507877762e-05,\t\t0.0040960242539388805,\t\t2.22, 61.69, 0.004502],\n\t\t[1518,\t\t3,\t\t4.268803271333055e-05,\t\t0.0021344016356665274,\t\t2.22, 61.69, 0.004502],\n\t\t[1519,\t\t3,\t\t2.9627970642356104e-06,\t\t0.00014813985321178054,\t\t2.22, 61.69, 0.004502]\n\t])\n\tppc[\"branch_switch\"] = array([\n\t\t[586,\t\t1,\t\t0\t\t],\n\t\t[589,\t\t108,\t\t0\t\t],\n\t\t[590,\t\t108,\t\t0\t\t],\n\t\t[593,\t\t112,\t\t0\t\t],\n\t\t[594,\t\t114,\t\t0\t\t],\n\t\t[595,\t\t115,\t\t0\t\t],\n\t\t[598,\t\t118,\t\t0\t\t],\n\t\t[599,\t\t119,\t\t0\t\t],\n\t\t[601,\t\t119,\t\t0\t\t],\n\t\t[602,\t\t121,\t\t0\t\t],\n\t\t[603,\t\t526,\t\t0\t\t],\n\t\t[607,\t\t127,\t\t0\t\t],\n\t\t[608,\t\t127,\t\t0\t\t],\n\t\t[609,\t\t529,\t\t0\t\t],\n\t\t[612,\t\t493,\t\t0\t\t],\n\t\t[613,\t\t130,\t\t0\t\t],\n\t\t[614,\t\t130,\t\t0\t\t],\n\t\t[616,\t\t132,\t\t0\t\t],\n\t\t[617,\t\t133,\t\t0\t\t],\n\t\t[618,\t\t133,\t\t0\t\t],\n\t\t[619,\t\t134,\t\t0\t\t],\n\t\t[621,\t\t136,\t\t0\t\t],\n\t\t[624,\t\t14,\t\t0\t\t],\n\t\t[628,\t\t142,\t\t0\t\t],\n\t\t[629,\t\t145,\t\t0\t\t],\n\t\t[631,\t\t145,\t\t0\t\t],\n\t\t[632,\t\t145,\t\t0\t\t],\n\t\t[637,\t\t148,\t\t0\t\t],\n\t\t[638,\t\t149,\t\t0\t\t],\n\t\t[640,\t\t153,\t\t0\t\t],\n\t\t[641,\t\t155,\t\t0\t\t],\n\t\t[642,\t\t533,\t\t0\t\t],\n\t\t[643,\t\t534,\t\t0\t\t],\n\t\t[647,\t\t536,\t\t0\t\t],\n\t\t[650,\t\t166,\t\t0\t\t],\n\t\t[652,\t\t167,\t\t0\t\t],\n\t\t[655,\t\t170,\t\t0\t\t],\n\t\t[663,\t\t178,\t\t0\t\t],\n\t\t[666,\t\t180,\t\t0\t\t],\n\t\t[670,\t\t183,\t\t0\t\t],\n\t\t[672,\t\t185,\t\t0\t\t],\n\t\t[676,\t\t19,\t\t0\t\t],\n\t\t[681,\t\t197,\t\t0\t\t],\n\t\t[683,\t\t200,\t\t0\t\t],\n\t\t[687,\t\t202,\t\t0\t\t],\n\t\t[689,\t\t204,\t\t0\t\t],\n\t\t[691,\t\t209,\t\t0\t\t],\n\t\t[694,\t\t21,\t\t0\t\t],\n\t\t[695,\t\t210,\t\t0\t\t],\n\t\t[696,\t\t211,\t\t0\t\t],\n\t\t[697,\t\t211,\t\t0\t\t],\n\t\t[698,\t\t212,\t\t0\t\t],\n\t\t[702,\t\t215,\t\t0\t\t],\n\t\t[705,\t\t217,\t\t0\t\t],\n\t\t[707,\t\t219,\t\t0\t\t],\n\t\t[713,\t\t225,\t\t0\t\t],\n\t\t[714,\t\t225,\t\t0\t\t],\n\t\t[716,\t\t226,\t\t0\t\t],\n\t\t[717,\t\t227,\t\t0\t\t],\n\t\t[719,\t\t229,\t\t0\t\t],\n\t\t[722,\t\t545,\t\t0\t\t],\n\t\t[723,\t\t235,\t\t0\t\t],\n\t\t[724,\t\t238,\t\t0\t\t],\n\t\t[727,\t\t243,\t\t0\t\t],\n\t\t[728,\t\t244,\t\t0\t\t],\n\t\t[730,\t\t547,\t\t0\t\t],\n\t\t[732,\t\t247,\t\t0\t\t],\n\t\t[735,\t\t253,\t\t0\t\t],\n\t\t[738,\t\t258,\t\t0\t\t],\n\t\t[741,\t\t264,\t\t0\t\t],\n\t\t[742,\t\t264,\t\t0\t\t],\n\t\t[743,\t\t500,\t\t0\t\t],\n\t\t[746,\t\t273,\t\t0\t\t],\n\t\t[747,\t\t273,\t\t0\t\t],\n\t\t[748,\t\t274,\t\t0\t\t],\n\t\t[749,\t\t274,\t\t0\t\t],\n\t\t[750,\t\t557,\t\t0\t\t],\n\t\t[753,\t\t28,\t\t0\t\t],\n\t\t[758,\t\t286,\t\t0\t\t],\n\t\t[760,\t\t287,\t\t0\t\t],\n\t\t[761,\t\t288,\t\t0\t\t],\n\t\t[762,\t\t289,\t\t0\t\t],\n\t\t[763,\t\t560,\t\t0\t\t],\n\t\t[765,\t\t560,\t\t0\t\t],\n\t\t[767,\t\t292,\t\t0\t\t],\n\t\t[769,\t\t293,\t\t0\t\t],\n\t\t[771,\t\t297,\t\t0\t\t],\n\t\t[772,\t\t3,\t\t0\t\t],\n\t\t[774,\t\t300,\t\t0\t\t],\n\t\t[777,\t\t300,\t\t0\t\t],\n\t\t[778,\t\t300,\t\t0\t\t],\n\t\t[781,\t\t303,\t\t0\t\t],\n\t\t[784,\t\t563,\t\t0\t\t],\n\t\t[785,\t\t501,\t\t0\t\t],\n\t\t[787,\t\t308,\t\t0\t\t],\n\t\t[788,\t\t311,\t\t0\t\t],\n\t\t[789,\t\t565,\t\t0\t\t],\n\t\t[791,\t\t314,\t\t0\t\t],\n\t\t[792,\t\t316,\t\t0\t\t],\n\t\t[795,\t\t319,\t\t0\t\t],\n\t\t[800,\t\t326,\t\t0\t\t],\n\t\t[801,\t\t327,\t\t0\t\t],\n\t\t[802,\t\t327,\t\t0\t\t],\n\t\t[805,\t\t328,\t\t0\t\t],\n\t\t[806,\t\t328,\t\t0\t\t],\n\t\t[808,\t\t329,\t\t0\t\t],\n\t\t[809,\t\t329,\t\t0\t\t],\n\t\t[811,\t\t568,\t\t0\t\t],\n\t\t[814,\t\t570,\t\t0\t\t],\n\t\t[816,\t\t335,\t\t0\t\t],\n\t\t[817,\t\t571,\t\t0\t\t],\n\t\t[821,\t\t338,\t\t0\t\t],\n\t\t[822,\t\t339,\t\t0\t\t],\n\t\t[826,\t\t339,\t\t0\t\t],\n\t\t[830,\t\t345,\t\t0\t\t],\n\t\t[834,\t\t572,\t\t0\t\t],\n\t\t[835,\t\t572,\t\t0\t\t],\n\t\t[836,\t\t572,\t\t0\t\t],\n\t\t[837,\t\t350,\t\t0\t\t],\n\t\t[839,\t\t350,\t\t0\t\t],\n\t\t[841,\t\t573,\t\t0\t\t],\n\t\t[843,\t\t352,\t\t0\t\t],\n\t\t[844,\t\t352,\t\t0\t\t],\n\t\t[845,\t\t356,\t\t0\t\t],\n\t\t[849,\t\t574,\t\t0\t\t],\n\t\t[850,\t\t574,\t\t0\t\t],\n\t\t[851,\t\t575,\t\t0\t\t],\n\t\t[853,\t\t362,\t\t0\t\t],\n\t\t[855,\t\t363,\t\t0\t\t],\n\t\t[856,\t\t363,\t\t0\t\t],\n\t\t[857,\t\t365,\t\t0\t\t],\n\t\t[858,\t\t368,\t\t0\t\t],\n\t\t[859,\t\t368,\t\t0\t\t],\n\t\t[860,\t\t371,\t\t0\t\t],\n\t\t[864,\t\t374,\t\t0\t\t],\n\t\t[865,\t\t375,\t\t0\t\t],\n\t\t[867,\t\t376,\t\t0\t\t],\n\t\t[869,\t\t503,\t\t0\t\t],\n\t\t[870,\t\t503,\t\t0\t\t],\n\t\t[872,\t\t378,\t\t0\t\t],\n\t\t[873,\t\t576,\t\t0\t\t],\n\t\t[874,\t\t576,\t\t0\t\t],\n\t\t[875,\t\t381,\t\t0\t\t],\n\t\t[877,\t\t578,\t\t0\t\t],\n\t\t[881,\t\t388,\t\t0\t\t],\n\t\t[882,\t\t388,\t\t0\t\t],\n\t\t[883,\t\t388,\t\t0\t\t],\n\t\t[885,\t\t393,\t\t0\t\t],\n\t\t[886,\t\t394,\t\t0\t\t],\n\t\t[889,\t\t397,\t\t0\t\t],\n\t\t[890,\t\t40,\t\t0\t\t],\n\t\t[893,\t\t400,\t\t0\t\t],\n\t\t[894,\t\t400,\t\t0\t\t],\n\t\t[895,\t\t580,\t\t0\t\t],\n\t\t[896,\t\t581,\t\t0\t\t],\n\t\t[898,\t\t403,\t\t0\t\t],\n\t\t[900,\t\t405,\t\t0\t\t],\n\t\t[902,\t\t405,\t\t0\t\t],\n\t\t[903,\t\t406,\t\t0\t\t],\n\t\t[905,\t\t413,\t\t0\t\t],\n\t\t[906,\t\t414,\t\t0\t\t],\n\t\t[907,\t\t583,\t\t0\t\t],\n\t\t[909,\t\t417,\t\t0\t\t],\n\t\t[915,\t\t423,\t\t0\t\t],\n\t\t[917,\t\t43,\t\t0\t\t],\n\t\t[918,\t\t424,\t\t0\t\t],\n\t\t[920,\t\t428,\t\t0\t\t],\n\t\t[921,\t\t428,\t\t0\t\t],\n\t\t[922,\t\t429,\t\t0\t\t],\n\t\t[923,\t\t432,\t\t0\t\t],\n\t\t[925,\t\t44,\t\t0\t\t],\n\t\t[931,\t\t439,\t\t0\t\t],\n\t\t[935,\t\t45,\t\t0\t\t],\n\t\t[936,\t\t445,\t\t0\t\t],\n\t\t[937,\t\t447,\t\t0\t\t],\n\t\t[939,\t\t450,\t\t0\t\t],\n\t\t[940,\t\t451,\t\t0\t\t],\n\t\t[944,\t\t458,\t\t0\t\t],\n\t\t[950,\t\t462,\t\t0\t\t],\n\t\t[952,\t\t47,\t\t0\t\t],\n\t\t[957,\t\t478,\t\t0\t\t],\n\t\t[958,\t\t478,\t\t0\t\t],\n\t\t[959,\t\t478,\t\t0\t\t],\n\t\t[960,\t\t479,\t\t0\t\t],\n\t\t[963,\t\t481,\t\t0\t\t],\n\t\t[965,\t\t49,\t\t0\t\t],\n\t\t[966,\t\t49,\t\t0\t\t],\n\t\t[967,\t\t49,\t\t0\t\t],\n\t\t[968,\t\t486,\t\t0\t\t],\n\t\t[969,\t\t486,\t\t0\t\t],\n\t\t[971,\t\t51,\t\t0\t\t],\n\t\t[973,\t\t506,\t\t0\t\t],\n\t\t[976,\t\t58,\t\t0\t\t],\n\t\t[978,\t\t491,\t\t0\t\t],\n\t\t[981,\t\t62,\t\t0\t\t],\n\t\t[982,\t\t62,\t\t0\t\t],\n\t\t[983,\t\t62,\t\t0\t\t],\n\t\t[984,\t\t63,\t\t0\t\t],\n\t\t[985,\t\t63,\t\t0\t\t],\n\t\t[986,\t\t64,\t\t0\t\t],\n\t\t[987,\t\t65,\t\t0\t\t],\n\t\t[988,\t\t66,\t\t0\t\t],\n\t\t[993,\t\t67,\t\t0\t\t],\n\t\t[994,\t\t67,\t\t0\t\t],\n\t\t[995,\t\t509,\t\t0\t\t],\n\t\t[997,\t\t510,\t\t0\t\t],\n\t\t[999,\t\t70,\t\t0\t\t],\n\t\t[1000,\t\t71,\t\t0\t\t],\n\t\t[1002,\t\t71,\t\t0\t\t],\n\t\t[1003,\t\t72,\t\t0\t\t],\n\t\t[1007,\t\t511,\t\t0\t\t],\n\t\t[1008,\t\t75,\t\t0\t\t],\n\t\t[1010,\t\t79,\t\t0\t\t],\n\t\t[1011,\t\t79,\t\t0\t\t],\n\t\t[1012,\t\t81,\t\t0\t\t],\n\t\t[1014,\t\t83,\t\t0\t\t],\n\t\t[1026,\t\t518,\t\t0\t\t],\n\t\t[1027,\t\t218,\t\t0\t\t],\n\t\t[1028,\t\t221,\t\t0\t\t],\n\t\t[1029,\t\t268,\t\t0\t\t],\n\t\t[1030,\t\t269,\t\t0\t\t],\n\t\t[1031,\t\t498,\t\t0\t\t],\n\t\t[1032,\t\t1,\t\t0\t\t],\n\t\t[1033,\t\t3,\t\t0\t\t],\n\t\t[1034,\t\t4,\t\t0\t\t],\n\t\t[1035,\t\t6,\t\t0\t\t],\n\t\t[1036,\t\t7,\t\t0\t\t],\n\t\t[1037,\t\t8,\t\t0\t\t],\n\t\t[1038,\t\t9,\t\t0\t\t],\n\t\t[1039,\t\t11,\t\t0\t\t],\n\t\t[1040,\t\t14,\t\t0\t\t],\n\t\t[1041,\t\t16,\t\t0\t\t],\n\t\t[1042,\t\t17,\t\t0\t\t],\n\t\t[1043,\t\t19,\t\t0\t\t],\n\t\t[1044,\t\t21,\t\t0\t\t],\n\t\t[1045,\t\t23,\t\t0\t\t],\n\t\t[1046,\t\t25,\t\t0\t\t],\n\t\t[1047,\t\t27,\t\t0\t\t],\n\t\t[1048,\t\t28,\t\t0\t\t],\n\t\t[1049,\t\t29,\t\t0\t\t],\n\t\t[1050,\t\t31,\t\t0\t\t],\n\t\t[1051,\t\t33,\t\t0\t\t],\n\t\t[1052,\t\t34,\t\t0\t\t],\n\t\t[1053,\t\t35,\t\t0\t\t],\n\t\t[1054,\t\t36,\t\t0\t\t],\n\t\t[1055,\t\t38,\t\t0\t\t],\n\t\t[1056,\t\t39,\t\t0\t\t],\n\t\t[1057,\t\t40,\t\t0\t\t],\n\t\t[1058,\t\t41,\t\t0\t\t],\n\t\t[1059,\t\t43,\t\t0\t\t],\n\t\t[1060,\t\t44,\t\t0\t\t],\n\t\t[1061,\t\t45,\t\t0\t\t],\n\t\t[1062,\t\t47,\t\t0\t\t],\n\t\t[1063,\t\t48,\t\t0\t\t],\n\t\t[1064,\t\t49,\t\t0\t\t],\n\t\t[1065,\t\t50,\t\t0\t\t],\n\t\t[1066,\t\t51,\t\t0\t\t],\n\t\t[1067,\t\t53,\t\t0\t\t],\n\t\t[1068,\t\t54,\t\t0\t\t],\n\t\t[1069,\t\t55,\t\t0\t\t],\n\t\t[1070,\t\t57,\t\t0\t\t],\n\t\t[1071,\t\t58,\t\t0\t\t],\n\t\t[1072,\t\t59,\t\t0\t\t],\n\t\t[1073,\t\t60,\t\t0\t\t],\n\t\t[1074,\t\t62,\t\t0\t\t],\n\t\t[1075,\t\t63,\t\t0\t\t],\n\t\t[1076,\t\t64,\t\t0\t\t],\n\t\t[1077,\t\t65,\t\t0\t\t],\n\t\t[1078,\t\t66,\t\t0\t\t],\n\t\t[1079,\t\t67,\t\t0\t\t],\n\t\t[1080,\t\t70,\t\t0\t\t],\n\t\t[1081,\t\t71,\t\t0\t\t],\n\t\t[1082,\t\t72,\t\t0\t\t],\n\t\t[1083,\t\t73,\t\t0\t\t],\n\t\t[1084,\t\t75,\t\t0\t\t],\n\t\t[1085,\t\t76,\t\t0\t\t],\n\t\t[1086,\t\t77,\t\t0\t\t],\n\t\t[1087,\t\t79,\t\t0\t\t],\n\t\t[1088,\t\t80,\t\t0\t\t],\n\t\t[1089,\t\t81,\t\t0\t\t],\n\t\t[1090,\t\t82,\t\t0\t\t],\n\t\t[1091,\t\t83,\t\t0\t\t],\n\t\t[1092,\t\t84,\t\t0\t\t],\n\t\t[1093,\t\t85,\t\t0\t\t],\n\t\t[1094,\t\t88,\t\t0\t\t],\n\t\t[1095,\t\t89,\t\t0\t\t],\n\t\t[1096,\t\t90,\t\t0\t\t],\n\t\t[1097,\t\t91,\t\t0\t\t],\n\t\t[1098,\t\t92,\t\t0\t\t],\n\t\t[1099,\t\t93,\t\t0\t\t],\n\t\t[1100,\t\t97,\t\t0\t\t],\n\t\t[1101,\t\t98,\t\t0\t\t],\n\t\t[1102,\t\t101,\t\t0\t\t],\n\t\t[1103,\t\t102,\t\t0\t\t],\n\t\t[1104,\t\t103,\t\t0\t\t],\n\t\t[1105,\t\t108,\t\t0\t\t],\n\t\t[1106,\t\t109,\t\t0\t\t],\n\t\t[1107,\t\t110,\t\t0\t\t],\n\t\t[1108,\t\t111,\t\t0\t\t],\n\t\t[1109,\t\t112,\t\t0\t\t],\n\t\t[1110,\t\t113,\t\t0\t\t],\n\t\t[1111,\t\t114,\t\t0\t\t],\n\t\t[1112,\t\t115,\t\t0\t\t],\n\t\t[1113,\t\t116,\t\t0\t\t],\n\t\t[1114,\t\t118,\t\t0\t\t],\n\t\t[1115,\t\t119,\t\t0\t\t],\n\t\t[1116,\t\t121,\t\t0\t\t],\n\t\t[1117,\t\t122,\t\t0\t\t],\n\t\t[1118,\t\t126,\t\t0\t\t],\n\t\t[1119,\t\t127,\t\t0\t\t],\n\t\t[1120,\t\t130,\t\t0\t\t],\n\t\t[1121,\t\t131,\t\t0\t\t],\n\t\t[1122,\t\t132,\t\t0\t\t],\n\t\t[1123,\t\t133,\t\t0\t\t],\n\t\t[1124,\t\t134,\t\t0\t\t],\n\t\t[1125,\t\t135,\t\t0\t\t],\n\t\t[1126,\t\t136,\t\t0\t\t],\n\t\t[1127,\t\t137,\t\t0\t\t],\n\t\t[1128,\t\t139,\t\t0\t\t],\n\t\t[1129,\t\t140,\t\t0\t\t],\n\t\t[1130,\t\t141,\t\t0\t\t],\n\t\t[1131,\t\t142,\t\t0\t\t],\n\t\t[1132,\t\t144,\t\t0\t\t],\n\t\t[1133,\t\t145,\t\t0\t\t],\n\t\t[1134,\t\t146,\t\t0\t\t],\n\t\t[1135,\t\t147,\t\t0\t\t],\n\t\t[1136,\t\t148,\t\t0\t\t],\n\t\t[1137,\t\t149,\t\t0\t\t],\n\t\t[1138,\t\t150,\t\t0\t\t],\n\t\t[1139,\t\t151,\t\t0\t\t],\n\t\t[1140,\t\t152,\t\t0\t\t],\n\t\t[1141,\t\t153,\t\t0\t\t],\n\t\t[1142,\t\t154,\t\t0\t\t],\n\t\t[1143,\t\t155,\t\t0\t\t],\n\t\t[1144,\t\t158,\t\t0\t\t],\n\t\t[1145,\t\t161,\t\t0\t\t],\n\t\t[1146,\t\t162,\t\t0\t\t],\n\t\t[1147,\t\t163,\t\t0\t\t],\n\t\t[1148,\t\t164,\t\t0\t\t],\n\t\t[1149,\t\t166,\t\t0\t\t],\n\t\t[1150,\t\t167,\t\t0\t\t],\n\t\t[1151,\t\t168,\t\t0\t\t],\n\t\t[1152,\t\t169,\t\t0\t\t],\n\t\t[1153,\t\t170,\t\t0\t\t],\n\t\t[1154,\t\t171,\t\t0\t\t],\n\t\t[1155,\t\t172,\t\t0\t\t],\n\t\t[1156,\t\t173,\t\t0\t\t],\n\t\t[1157,\t\t174,\t\t0\t\t],\n\t\t[1158,\t\t175,\t\t0\t\t],\n\t\t[1159,\t\t176,\t\t0\t\t],\n\t\t[1160,\t\t177,\t\t0\t\t],\n\t\t[1161,\t\t178,\t\t0\t\t],\n\t\t[1162,\t\t179,\t\t0\t\t],\n\t\t[1163,\t\t180,\t\t0\t\t],\n\t\t[1164,\t\t181,\t\t0\t\t],\n\t\t[1165,\t\t182,\t\t0\t\t],\n\t\t[1166,\t\t183,\t\t0\t\t],\n\t\t[1167,\t\t185,\t\t0\t\t],\n\t\t[1168,\t\t186,\t\t0\t\t],\n\t\t[1169,\t\t187,\t\t0\t\t],\n\t\t[1170,\t\t188,\t\t0\t\t],\n\t\t[1171,\t\t189,\t\t0\t\t],\n\t\t[1172,\t\t190,\t\t0\t\t],\n\t\t[1173,\t\t192,\t\t0\t\t],\n\t\t[1174,\t\t193,\t\t0\t\t],\n\t\t[1175,\t\t194,\t\t0\t\t],\n\t\t[1176,\t\t196,\t\t0\t\t],\n\t\t[1177,\t\t197,\t\t0\t\t],\n\t\t[1178,\t\t198,\t\t0\t\t],\n\t\t[1179,\t\t199,\t\t0\t\t],\n\t\t[1180,\t\t200,\t\t0\t\t],\n\t\t[1181,\t\t202,\t\t0\t\t],\n\t\t[1182,\t\t203,\t\t0\t\t],\n\t\t[1183,\t\t204,\t\t0\t\t],\n\t\t[1184,\t\t205,\t\t0\t\t],\n\t\t[1185,\t\t206,\t\t0\t\t],\n\t\t[1186,\t\t207,\t\t0\t\t],\n\t\t[1187,\t\t208,\t\t0\t\t],\n\t\t[1188,\t\t209,\t\t0\t\t],\n\t\t[1189,\t\t210,\t\t0\t\t],\n\t\t[1190,\t\t211,\t\t0\t\t],\n\t\t[1191,\t\t212,\t\t0\t\t],\n\t\t[1192,\t\t213,\t\t0\t\t],\n\t\t[1193,\t\t214,\t\t0\t\t],\n\t\t[1194,\t\t215,\t\t0\t\t],\n\t\t[1195,\t\t216,\t\t0\t\t],\n\t\t[1196,\t\t217,\t\t0\t\t],\n\t\t[1197,\t\t218,\t\t0\t\t],\n\t\t[1198,\t\t219,\t\t0\t\t],\n\t\t[1201,\t\t223,\t\t0\t\t],\n\t\t[1202,\t\t224,\t\t0\t\t],\n\t\t[1203,\t\t225,\t\t0\t\t],\n\t\t[1204,\t\t226,\t\t0\t\t],\n\t\t[1205,\t\t227,\t\t0\t\t],\n\t\t[1206,\t\t228,\t\t0\t\t],\n\t\t[1207,\t\t229,\t\t0\t\t],\n\t\t[1208,\t\t230,\t\t0\t\t],\n\t\t[1209,\t\t234,\t\t0\t\t],\n\t\t[1210,\t\t235,\t\t0\t\t],\n\t\t[1211,\t\t237,\t\t0\t\t],\n\t\t[1212,\t\t238,\t\t0\t\t],\n\t\t[1213,\t\t239,\t\t0\t\t],\n\t\t[1214,\t\t240,\t\t0\t\t],\n\t\t[1215,\t\t241,\t\t0\t\t],\n\t\t[1216,\t\t242,\t\t0\t\t],\n\t\t[1217,\t\t243,\t\t0\t\t],\n\t\t[1218,\t\t244,\t\t0\t\t],\n\t\t[1219,\t\t247,\t\t0\t\t],\n\t\t[1220,\t\t251,\t\t0\t\t],\n\t\t[1221,\t\t252,\t\t0\t\t],\n\t\t[1222,\t\t253,\t\t0\t\t],\n\t\t[1223,\t\t254,\t\t0\t\t],\n\t\t[1224,\t\t255,\t\t0\t\t],\n\t\t[1225,\t\t256,\t\t0\t\t],\n\t\t[1226,\t\t257,\t\t0\t\t],\n\t\t[1227,\t\t258,\t\t0\t\t],\n\t\t[1228,\t\t260,\t\t0\t\t],\n\t\t[1229,\t\t263,\t\t0\t\t],\n\t\t[1230,\t\t264,\t\t0\t\t],\n\t\t[1231,\t\t266,\t\t0\t\t],\n\t\t[1232,\t\t267,\t\t0\t\t],\n\t\t[1235,\t\t271,\t\t0\t\t],\n\t\t[1236,\t\t272,\t\t0\t\t],\n\t\t[1237,\t\t273,\t\t0\t\t],\n\t\t[1238,\t\t274,\t\t0\t\t],\n\t\t[1239,\t\t275,\t\t0\t\t],\n\t\t[1240,\t\t276,\t\t0\t\t],\n\t\t[1241,\t\t278,\t\t0\t\t],\n\t\t[1242,\t\t281,\t\t0\t\t],\n\t\t[1243,\t\t282,\t\t0\t\t],\n\t\t[1244,\t\t283,\t\t0\t\t],\n\t\t[1245,\t\t284,\t\t0\t\t],\n\t\t[1246,\t\t285,\t\t0\t\t],\n\t\t[1247,\t\t286,\t\t0\t\t],\n\t\t[1248,\t\t287,\t\t0\t\t],\n\t\t[1249,\t\t288,\t\t0\t\t],\n\t\t[1250,\t\t289,\t\t0\t\t],\n\t\t[1251,\t\t291,\t\t0\t\t],\n\t\t[1252,\t\t292,\t\t0\t\t],\n\t\t[1253,\t\t293,\t\t0\t\t],\n\t\t[1254,\t\t294,\t\t0\t\t],\n\t\t[1255,\t\t295,\t\t0\t\t],\n\t\t[1256,\t\t296,\t\t0\t\t],\n\t\t[1257,\t\t297,\t\t0\t\t],\n\t\t[1258,\t\t298,\t\t0\t\t],\n\t\t[1259,\t\t299,\t\t0\t\t],\n\t\t[1260,\t\t300,\t\t0\t\t],\n\t\t[1261,\t\t302,\t\t0\t\t],\n\t\t[1262,\t\t303,\t\t0\t\t],\n\t\t[1263,\t\t304,\t\t0\t\t],\n\t\t[1264,\t\t307,\t\t0\t\t],\n\t\t[1265,\t\t308,\t\t0\t\t],\n\t\t[1266,\t\t309,\t\t0\t\t],\n\t\t[1267,\t\t311,\t\t0\t\t],\n\t\t[1268,\t\t312,\t\t0\t\t],\n\t\t[1269,\t\t314,\t\t0\t\t],\n\t\t[1270,\t\t316,\t\t0\t\t],\n\t\t[1271,\t\t317,\t\t0\t\t],\n\t\t[1272,\t\t318,\t\t0\t\t],\n\t\t[1273,\t\t319,\t\t0\t\t],\n\t\t[1274,\t\t321,\t\t0\t\t],\n\t\t[1275,\t\t322,\t\t0\t\t],\n\t\t[1276,\t\t323,\t\t0\t\t],\n\t\t[1277,\t\t324,\t\t0\t\t],\n\t\t[1278,\t\t325,\t\t0\t\t],\n\t\t[1279,\t\t326,\t\t0\t\t],\n\t\t[1280,\t\t327,\t\t0\t\t],\n\t\t[1281,\t\t328,\t\t0\t\t],\n\t\t[1282,\t\t329,\t\t0\t\t],\n\t\t[1283,\t\t331,\t\t0\t\t],\n\t\t[1284,\t\t333,\t\t0\t\t],\n\t\t[1285,\t\t335,\t\t0\t\t],\n\t\t[1286,\t\t337,\t\t0\t\t],\n\t\t[1287,\t\t338,\t\t0\t\t],\n\t\t[1288,\t\t339,\t\t0\t\t],\n\t\t[1289,\t\t340,\t\t0\t\t],\n\t\t[1290,\t\t341,\t\t0\t\t],\n\t\t[1291,\t\t342,\t\t0\t\t],\n\t\t[1292,\t\t343,\t\t0\t\t],\n\t\t[1293,\t\t344,\t\t0\t\t],\n\t\t[1294,\t\t345,\t\t0\t\t],\n\t\t[1295,\t\t346,\t\t0\t\t],\n\t\t[1296,\t\t347,\t\t0\t\t],\n\t\t[1297,\t\t348,\t\t0\t\t],\n\t\t[1298,\t\t350,\t\t0\t\t],\n\t\t[1299,\t\t352,\t\t0\t\t],\n\t\t[1300,\t\t353,\t\t0\t\t],\n\t\t[1301,\t\t354,\t\t0\t\t],\n\t\t[1302,\t\t355,\t\t0\t\t],\n\t\t[1303,\t\t356,\t\t0\t\t],\n\t\t[1304,\t\t357,\t\t0\t\t],\n\t\t[1305,\t\t359,\t\t0\t\t],\n\t\t[1306,\t\t361,\t\t0\t\t],\n\t\t[1307,\t\t362,\t\t0\t\t],\n\t\t[1308,\t\t363,\t\t0\t\t],\n\t\t[1309,\t\t364,\t\t0\t\t],\n\t\t[1310,\t\t365,\t\t0\t\t],\n\t\t[1311,\t\t366,\t\t0\t\t],\n\t\t[1312,\t\t367,\t\t0\t\t],\n\t\t[1313,\t\t368,\t\t0\t\t],\n\t\t[1314,\t\t369,\t\t0\t\t],\n\t\t[1315,\t\t370,\t\t0\t\t],\n\t\t[1316,\t\t371,\t\t0\t\t],\n\t\t[1317,\t\t372,\t\t0\t\t],\n\t\t[1318,\t\t373,\t\t0\t\t],\n\t\t[1319,\t\t374,\t\t0\t\t],\n\t\t[1320,\t\t375,\t\t0\t\t],\n\t\t[1321,\t\t376,\t\t0\t\t],\n\t\t[1322,\t\t377,\t\t0\t\t],\n\t\t[1323,\t\t378,\t\t0\t\t],\n\t\t[1324,\t\t379,\t\t0\t\t],\n\t\t[1325,\t\t381,\t\t0\t\t],\n\t\t[1326,\t\t384,\t\t0\t\t],\n\t\t[1327,\t\t385,\t\t0\t\t],\n\t\t[1328,\t\t386,\t\t0\t\t],\n\t\t[1329,\t\t387,\t\t0\t\t],\n\t\t[1330,\t\t388,\t\t0\t\t],\n\t\t[1331,\t\t390,\t\t0\t\t],\n\t\t[1332,\t\t391,\t\t0\t\t],\n\t\t[1333,\t\t392,\t\t0\t\t],\n\t\t[1334,\t\t393,\t\t0\t\t],\n\t\t[1335,\t\t394,\t\t0\t\t],\n\t\t[1336,\t\t395,\t\t0\t\t],\n\t\t[1337,\t\t396,\t\t0\t\t],\n\t\t[1338,\t\t397,\t\t0\t\t],\n\t\t[1339,\t\t398,\t\t0\t\t],\n\t\t[1340,\t\t399,\t\t0\t\t],\n\t\t[1341,\t\t400,\t\t0\t\t],\n\t\t[1342,\t\t403,\t\t0\t\t],\n\t\t[1343,\t\t404,\t\t0\t\t],\n\t\t[1344,\t\t405,\t\t0\t\t],\n\t\t[1345,\t\t406,\t\t0\t\t],\n\t\t[1346,\t\t407,\t\t0\t\t],\n\t\t[1347,\t\t408,\t\t0\t\t],\n\t\t[1348,\t\t410,\t\t0\t\t],\n\t\t[1349,\t\t411,\t\t0\t\t],\n\t\t[1350,\t\t412,\t\t0\t\t],\n\t\t[1351,\t\t413,\t\t0\t\t],\n\t\t[1352,\t\t414,\t\t0\t\t],\n\t\t[1354,\t\t417,\t\t0\t\t],\n\t\t[1355,\t\t418,\t\t0\t\t],\n\t\t[1356,\t\t419,\t\t0\t\t],\n\t\t[1357,\t\t420,\t\t0\t\t],\n\t\t[1358,\t\t421,\t\t0\t\t],\n\t\t[1359,\t\t422,\t\t0\t\t],\n\t\t[1360,\t\t423,\t\t0\t\t],\n\t\t[1361,\t\t424,\t\t0\t\t],\n\t\t[1362,\t\t425,\t\t0\t\t],\n\t\t[1363,\t\t426,\t\t0\t\t],\n\t\t[1364,\t\t427,\t\t0\t\t],\n\t\t[1365,\t\t428,\t\t0\t\t],\n\t\t[1366,\t\t429,\t\t0\t\t],\n\t\t[1367,\t\t430,\t\t0\t\t],\n\t\t[1368,\t\t431,\t\t0\t\t],\n\t\t[1369,\t\t432,\t\t0\t\t],\n\t\t[1370,\t\t433,\t\t0\t\t],\n\t\t[1371,\t\t434,\t\t0\t\t],\n\t\t[1372,\t\t435,\t\t0\t\t],\n\t\t[1373,\t\t436,\t\t0\t\t],\n\t\t[1376,\t\t439,\t\t0\t\t],\n\t\t[1377,\t\t440,\t\t0\t\t],\n\t\t[1378,\t\t441,\t\t0\t\t],\n\t\t[1379,\t\t442,\t\t0\t\t],\n\t\t[1380,\t\t443,\t\t0\t\t],\n\t\t[1381,\t\t445,\t\t0\t\t],\n\t\t[1382,\t\t446,\t\t0\t\t],\n\t\t[1383,\t\t447,\t\t0\t\t],\n\t\t[1384,\t\t448,\t\t0\t\t],\n\t\t[1385,\t\t449,\t\t0\t\t],\n\t\t[1386,\t\t450,\t\t0\t\t],\n\t\t[1387,\t\t451,\t\t0\t\t],\n\t\t[1388,\t\t453,\t\t0\t\t],\n\t\t[1389,\t\t454,\t\t0\t\t],\n\t\t[1390,\t\t455,\t\t0\t\t],\n\t\t[1391,\t\t456,\t\t0\t\t],\n\t\t[1392,\t\t457,\t\t0\t\t],\n\t\t[1393,\t\t458,\t\t0\t\t],\n\t\t[1394,\t\t459,\t\t0\t\t],\n\t\t[1395,\t\t460,\t\t0\t\t],\n\t\t[1396,\t\t461,\t\t0\t\t],\n\t\t[1397,\t\t462,\t\t0\t\t],\n\t\t[1398,\t\t463,\t\t0\t\t],\n\t\t[1399,\t\t464,\t\t0\t\t],\n\t\t[1400,\t\t465,\t\t0\t\t],\n\t\t[1401,\t\t466,\t\t0\t\t],\n\t\t[1402,\t\t467,\t\t0\t\t],\n\t\t[1403,\t\t468,\t\t0\t\t],\n\t\t[1404,\t\t469,\t\t0\t\t],\n\t\t[1405,\t\t470,\t\t0\t\t],\n\t\t[1406,\t\t471,\t\t0\t\t],\n\t\t[1407,\t\t472,\t\t0\t\t],\n\t\t[1408,\t\t473,\t\t0\t\t],\n\t\t[1409,\t\t474,\t\t0\t\t],\n\t\t[1410,\t\t475,\t\t0\t\t],\n\t\t[1411,\t\t476,\t\t0\t\t],\n\t\t[1412,\t\t477,\t\t0\t\t],\n\t\t[1413,\t\t478,\t\t0\t\t],\n\t\t[1414,\t\t479,\t\t0\t\t],\n\t\t[1415,\t\t480,\t\t0\t\t],\n\t\t[1416,\t\t481,\t\t0\t\t],\n\t\t[1417,\t\t482,\t\t0\t\t],\n\t\t[1418,\t\t483,\t\t0\t\t],\n\t\t[1419,\t\t484,\t\t0\t\t],\n\t\t[1420,\t\t485,\t\t0\t\t],\n\t\t[1421,\t\t486,\t\t0\t\t],\n\t\t[1422,\t\t487,\t\t0\t\t],\n\t\t[1423,\t\t488,\t\t0\t\t],\n\t\t[1424,\t\t489,\t\t0\t\t],\n\t\t[1425,\t\t490,\t\t0\t\t],\n\t\t[1426,\t\t491,\t\t0\t\t],\n\t\t[1427,\t\t492,\t\t0\t\t],\n\t\t[1428,\t\t493,\t\t0\t\t],\n\t\t[1429,\t\t494,\t\t0\t\t],\n\t\t[1430,\t\t495,\t\t0\t\t],\n\t\t[1431,\t\t496,\t\t0\t\t],\n\t\t[1432,\t\t497,\t\t0\t\t],\n\t\t[1433,\t\t498,\t\t0\t\t],\n\t\t[1434,\t\t499,\t\t0\t\t],\n\t\t[1435,\t\t500,\t\t0\t\t],\n\t\t[1436,\t\t501,\t\t0\t\t],\n\t\t[1437,\t\t502,\t\t0\t\t],\n\t\t[1438,\t\t503,\t\t0\t\t],\n\t\t[1439,\t\t504,\t\t0\t\t],\n\t\t[1440,\t\t505,\t\t0\t\t],\n\t\t[1441,\t\t506,\t\t0\t\t],\n\t\t[1442,\t\t507,\t\t0\t\t],\n\t\t[1443,\t\t508,\t\t0\t\t],\n\t\t[1444,\t\t509,\t\t0\t\t],\n\t\t[1445,\t\t510,\t\t0\t\t],\n\t\t[1446,\t\t511,\t\t0\t\t],\n\t\t[1447,\t\t512,\t\t0\t\t],\n\t\t[1448,\t\t513,\t\t0\t\t],\n\t\t[1449,\t\t514,\t\t0\t\t],\n\t\t[1450,\t\t515,\t\t0\t\t],\n\t\t[1451,\t\t516,\t\t0\t\t],\n\t\t[1452,\t\t517,\t\t0\t\t],\n\t\t[1453,\t\t518,\t\t0\t\t],\n\t\t[1454,\t\t519,\t\t0\t\t],\n\t\t[1455,\t\t520,\t\t0\t\t],\n\t\t[1456,\t\t521,\t\t0\t\t],\n\t\t[1457,\t\t522,\t\t0\t\t],\n\t\t[1458,\t\t523,\t\t0\t\t],\n\t\t[1459,\t\t524,\t\t0\t\t],\n\t\t[1460,\t\t525,\t\t0\t\t],\n\t\t[1461,\t\t526,\t\t0\t\t],\n\t\t[1462,\t\t527,\t\t0\t\t],\n\t\t[1463,\t\t528,\t\t0\t\t],\n\t\t[1464,\t\t529,\t\t0\t\t],\n\t\t[1465,\t\t530,\t\t0\t\t],\n\t\t[1466,\t\t531,\t\t0\t\t],\n\t\t[1467,\t\t532,\t\t0\t\t],\n\t\t[1468,\t\t533,\t\t0\t\t],\n\t\t[1469,\t\t534,\t\t0\t\t],\n\t\t[1470,\t\t535,\t\t0\t\t],\n\t\t[1471,\t\t536,\t\t0\t\t],\n\t\t[1472,\t\t537,\t\t0\t\t],\n\t\t[1473,\t\t538,\t\t0\t\t],\n\t\t[1474,\t\t539,\t\t0\t\t],\n\t\t[1475,\t\t540,\t\t0\t\t],\n\t\t[1476,\t\t541,\t\t0\t\t],\n\t\t[1477,\t\t542,\t\t0\t\t],\n\t\t[1478,\t\t543,\t\t0\t\t],\n\t\t[1479,\t\t544,\t\t0\t\t],\n\t\t[1480,\t\t545,\t\t0\t\t],\n\t\t[1481,\t\t546,\t\t0\t\t],\n\t\t[1482,\t\t547,\t\t0\t\t],\n\t\t[1483,\t\t548,\t\t0\t\t],\n\t\t[1484,\t\t549,\t\t0\t\t],\n\t\t[1485,\t\t550,\t\t0\t\t],\n\t\t[1486,\t\t551,\t\t0\t\t],\n\t\t[1487,\t\t552,\t\t0\t\t],\n\t\t[1488,\t\t554,\t\t0\t\t],\n\t\t[1489,\t\t555,\t\t0\t\t],\n\t\t[1490,\t\t556,\t\t0\t\t],\n\t\t[1491,\t\t557,\t\t0\t\t],\n\t\t[1492,\t\t558,\t\t0\t\t],\n\t\t[1493,\t\t559,\t\t0\t\t],\n\t\t[1494,\t\t560,\t\t0\t\t],\n\t\t[1495,\t\t561,\t\t0\t\t],\n\t\t[1496,\t\t562,\t\t0\t\t],\n\t\t[1497,\t\t563,\t\t0\t\t],\n\t\t[1498,\t\t564,\t\t0\t\t],\n\t\t[1499,\t\t565,\t\t0\t\t],\n\t\t[1500,\t\t566,\t\t0\t\t],\n\t\t[1501,\t\t567,\t\t0\t\t],\n\t\t[1502,\t\t568,\t\t0\t\t],\n\t\t[1503,\t\t569,\t\t0\t\t],\n\t\t[1504,\t\t570,\t\t0\t\t],\n\t\t[1505,\t\t571,\t\t0\t\t],\n\t\t[1506,\t\t572,\t\t0\t\t],\n\t\t[1507,\t\t573,\t\t0\t\t],\n\t\t[1508,\t\t574,\t\t0\t\t],\n\t\t[1509,\t\t575,\t\t0\t\t],\n\t\t[1510,\t\t576,\t\t0\t\t],\n\t\t[1511,\t\t577,\t\t0\t\t],\n\t\t[1512,\t\t578,\t\t0\t\t],\n\t\t[1513,\t\t579,\t\t0\t\t],\n\t\t[1514,\t\t580,\t\t0\t\t],\n\t\t[1515,\t\t581,\t\t0\t\t],\n\t\t[1516,\t\t582,\t\t0\t\t],\n\t\t[1517,\t\t583,\t\t0\t\t],\n\t\t[1518,\t\t584,\t\t0\t\t],\n\t\t[1519,\t\t585,\t\t0\t\t],\n\t\t[1,\t\t490,\t\t0\t\t],\n\t\t[3,\t\t4,\t\t1\t\t],\n\t\t[491,\t\t6,\t\t0\t\t],\n\t\t[7,\t\t5,\t\t0\t\t],\n\t\t[8,\t\t9,\t\t0\t\t],\n\t\t[492,\t\t11,\t\t0\t\t],\n\t\t[11,\t\t493,\t\t0\t\t],\n\t\t[492,\t\t493,\t\t1\t\t],\n\t\t[494,\t\t14,\t\t0\t\t],\n\t\t[13,\t\t15,\t\t0\t\t],\n\t\t[16,\t\t5,\t\t0\t\t],\n\t\t[17,\t\t18,\t\t1\t\t],\n\t\t[17,\t\t12,\t\t0\t\t],\n\t\t[14,\t\t495,\t\t0\t\t],\n\t\t[494,\t\t19,\t\t0\t\t],\n\t\t[20,\t\t21,\t\t0\t\t],\n\t\t[20,\t\t22,\t\t1\t\t],\n\t\t[497,\t\t23,\t\t0\t\t],\n\t\t[23,\t\t499,\t\t1\t\t],\n\t\t[25,\t\t26,\t\t0\t\t],\n\t\t[25,\t\t22,\t\t0\t\t],\n\t\t[23,\t\t27,\t\t0\t\t],\n\t\t[28,\t\t23,\t\t0\t\t],\n\t\t[8,\t\t21,\t\t0\t\t],\n\t\t[9,\t\t29,\t\t0\t\t],\n\t\t[30,\t\t25,\t\t1\t\t],\n\t\t[31,\t\t32,\t\t1\t\t],\n\t\t[32,\t\t33,\t\t1\t\t],\n\t\t[34,\t\t35,\t\t0\t\t],\n\t\t[35,\t\t36,\t\t0\t\t],\n\t\t[490,\t\t6,\t\t1\t\t],\n\t\t[37,\t\t10,\t\t1\t\t],\n\t\t[10,\t\t38,\t\t0\t\t],\n\t\t[37,\t\t38,\t\t1\t\t],\n\t\t[39,\t\t40,\t\t1\t\t],\n\t\t[39,\t\t41,\t\t1\t\t],\n\t\t[42,\t\t41,\t\t1\t\t],\n\t\t[18,\t\t42,\t\t1\t\t],\n\t\t[492,\t\t43,\t\t1\t\t],\n\t\t[44,\t\t45,\t\t0\t\t],\n\t\t[44,\t\t505,\t\t0\t\t],\n\t\t[46,\t\t12,\t\t0\t\t],\n\t\t[47,\t\t48,\t\t0\t\t],\n\t\t[49,\t\t50,\t\t0\t\t],\n\t\t[31,\t\t33,\t\t1\t\t],\n\t\t[31,\t\t51,\t\t0\t\t],\n\t\t[52,\t\t53,\t\t1\t\t],\n\t\t[52,\t\t54,\t\t0\t\t],\n\t\t[506,\t\t55,\t\t0\t\t],\n\t\t[506,\t\t507,\t\t1\t\t],\n\t\t[57,\t\t506,\t\t0\t\t],\n\t\t[57,\t\t58,\t\t0\t\t],\n\t\t[58,\t\t506,\t\t0\t\t],\n\t\t[59,\t\t60,\t\t1\t\t],\n\t\t[508,\t\t62,\t\t0\t\t],\n\t\t[30,\t\t61,\t\t1\t\t],\n\t\t[63,\t\t506,\t\t0\t\t],\n\t\t[13,\t\t64,\t\t0\t\t],\n\t\t[65,\t\t66,\t\t1\t\t],\n\t\t[59,\t\t67,\t\t0\t\t],\n\t\t[61,\t\t67,\t\t0\t\t],\n\t\t[68,\t\t69,\t\t1\t\t],\n\t\t[70,\t\t69,\t\t1\t\t],\n\t\t[71,\t\t72,\t\t1\t\t],\n\t\t[73,\t\t74,\t\t1\t\t],\n\t\t[37,\t\t75,\t\t1\t\t],\n\t\t[72,\t\t75,\t\t0\t\t],\n\t\t[37,\t\t72,\t\t1\t\t],\n\t\t[76,\t\t77,\t\t1\t\t],\n\t\t[77,\t\t51,\t\t0\t\t],\n\t\t[73,\t\t72,\t\t1\t\t],\n\t\t[18,\t\t40,\t\t1\t\t],\n\t\t[492,\t\t45,\t\t1\t\t],\n\t\t[10,\t\t74,\t\t1\t\t],\n\t\t[45,\t\t511,\t\t1\t\t],\n\t\t[78,\t\t32,\t\t1\t\t],\n\t\t[79,\t\t80,\t\t0\t\t],\n\t\t[81,\t\t79,\t\t1\t\t],\n\t\t[34,\t\t82,\t\t0\t\t],\n\t\t[83,\t\t84,\t\t0\t\t],\n\t\t[83,\t\t499,\t\t0\t\t],\n\t\t[85,\t\t86,\t\t0\t\t],\n\t\t[87,\t\t86,\t\t1\t\t],\n\t\t[88,\t\t89,\t\t0\t\t],\n\t\t[90,\t\t86,\t\t1\t\t],\n\t\t[91,\t\t86,\t\t0\t\t],\n\t\t[86,\t\t92,\t\t0\t\t],\n\t\t[86,\t\t93,\t\t0\t\t],\n\t\t[94,\t\t86,\t\t1\t\t],\n\t\t[86,\t\t95,\t\t1\t\t],\n\t\t[513,\t\t517,\t\t0\t\t],\n\t\t[97,\t\t66,\t\t1\t\t],\n\t\t[42,\t\t98,\t\t0\t\t],\n\t\t[99,\t\t100,\t\t1\t\t],\n\t\t[42,\t\t101,\t\t0\t\t],\n\t\t[102,\t\t42,\t\t1\t\t],\n\t\t[103,\t\t87,\t\t0\t\t],\n\t\t[104,\t\t103,\t\t0\t\t],\n\t\t[105,\t\t87,\t\t0\t\t],\n\t\t[106,\t\t107,\t\t0\t\t],\n\t\t[108,\t\t107,\t\t0\t\t],\n\t\t[109,\t\t106,\t\t0\t\t],\n\t\t[110,\t\t111,\t\t1\t\t],\n\t\t[87,\t\t112,\t\t0\t\t],\n\t\t[113,\t\t87,\t\t0\t\t],\n\t\t[87,\t\t85,\t\t1\t\t],\n\t\t[110,\t\t114,\t\t1\t\t],\n\t\t[115,\t\t116,\t\t0\t\t],\n\t\t[117,\t\t118,\t\t0\t\t],\n\t\t[117,\t\t119,\t\t0\t\t],\n\t\t[117,\t\t120,\t\t1\t\t],\n\t\t[121,\t\t122,\t\t0\t\t],\n\t\t[123,\t\t124,\t\t0\t\t],\n\t\t[125,\t\t126,\t\t0\t\t],\n\t\t[127,\t\t119,\t\t0\t\t],\n\t\t[118,\t\t128,\t\t0\t\t],\n\t\t[121,\t\t119,\t\t0\t\t],\n\t\t[530,\t\t527,\t\t0\t\t],\n\t\t[125,\t\t130,\t\t0\t\t],\n\t\t[125,\t\t123,\t\t0\t\t],\n\t\t[131,\t\t132,\t\t0\t\t],\n\t\t[133,\t\t123,\t\t0\t\t],\n\t\t[524,\t\t134,\t\t0\t\t],\n\t\t[135,\t\t136,\t\t0\t\t],\n\t\t[123,\t\t131,\t\t0\t\t],\n\t\t[117,\t\t128,\t\t1\t\t],\n\t\t[137,\t\t521,\t\t0\t\t],\n\t\t[531,\t\t514,\t\t0\t\t],\n\t\t[139,\t\t521,\t\t0\t\t],\n\t\t[140,\t\t514,\t\t0\t\t],\n\t\t[522,\t\t141,\t\t0\t\t],\n\t\t[142,\t\t523,\t\t0\t\t],\n\t\t[530,\t\t526,\t\t0\t\t],\n\t\t[140,\t\t532,\t\t0\t\t],\n\t\t[142,\t\t144,\t\t0\t\t],\n\t\t[140,\t\t522,\t\t0\t\t],\n\t\t[145,\t\t146,\t\t0\t\t],\n\t\t[147,\t\t523,\t\t0\t\t],\n\t\t[144,\t\t523,\t\t0\t\t],\n\t\t[139,\t\t523,\t\t0\t\t],\n\t\t[140,\t\t141,\t\t0\t\t],\n\t\t[528,\t\t526,\t\t0\t\t],\n\t\t[528,\t\t148,\t\t0\t\t],\n\t\t[149,\t\t150,\t\t0\t\t],\n\t\t[145,\t\t528,\t\t0\t\t],\n\t\t[530,\t\t151,\t\t0\t\t],\n\t\t[524,\t\t152,\t\t0\t\t],\n\t\t[149,\t\t525,\t\t1\t\t],\n\t\t[139,\t\t514,\t\t0\t\t],\n\t\t[126,\t\t120,\t\t1\t\t],\n\t\t[530,\t\t153,\t\t0\t\t],\n\t\t[528,\t\t147,\t\t1\t\t],\n\t\t[528,\t\t154,\t\t0\t\t],\n\t\t[130,\t\t120,\t\t1\t\t],\n\t\t[528,\t\t155,\t\t1\t\t],\n\t\t[524,\t\t533,\t\t0\t\t],\n\t\t[524,\t\t149,\t\t0\t\t],\n\t\t[154,\t\t150,\t\t0\t\t],\n\t\t[157,\t\t110,\t\t1\t\t],\n\t\t[119,\t\t158,\t\t0\t\t],\n\t\t[159,\t\t60,\t\t0\t\t],\n\t\t[536,\t\t161,\t\t0\t\t],\n\t\t[115,\t\t151,\t\t0\t\t],\n\t\t[162,\t\t134,\t\t0\t\t],\n\t\t[115,\t\t526,\t\t0\t\t],\n\t\t[138,\t\t87,\t\t0\t\t],\n\t\t[123,\t\t163,\t\t0\t\t],\n\t\t[112,\t\t164,\t\t0\t\t],\n\t\t[112,\t\t165,\t\t0\t\t],\n\t\t[166,\t\t165,\t\t0\t\t],\n\t\t[167,\t\t537,\t\t0\t\t],\n\t\t[168,\t\t104,\t\t0\t\t],\n\t\t[531,\t\t520,\t\t0\t\t],\n\t\t[139,\t\t520,\t\t0\t\t],\n\t\t[520,\t\t169,\t\t0\t\t],\n\t\t[168,\t\t105,\t\t0\t\t],\n\t\t[520,\t\t170,\t\t0\t\t],\n\t\t[171,\t\t89,\t\t0\t\t],\n\t\t[521,\t\t172,\t\t0\t\t],\n\t\t[123,\t\t173,\t\t0\t\t],\n\t\t[521,\t\t174,\t\t0\t\t],\n\t\t[37,\t\t39,\t\t0\t\t],\n\t\t[530,\t\t175,\t\t0\t\t],\n\t\t[530,\t\t176,\t\t0\t\t],\n\t\t[88,\t\t530,\t\t0\t\t],\n\t\t[177,\t\t496,\t\t1\t\t],\n\t\t[178,\t\t525,\t\t0\t\t],\n\t\t[179,\t\t493,\t\t1\t\t],\n\t\t[180,\t\t181,\t\t1\t\t],\n\t\t[182,\t\t180,\t\t0\t\t],\n\t\t[179,\t\t181,\t\t0\t\t],\n\t\t[180,\t\t493,\t\t1\t\t],\n\t\t[183,\t\t30,\t\t0\t\t],\n\t\t[183,\t\t21,\t\t0\t\t],\n\t\t[538,\t\t185,\t\t0\t\t],\n\t\t[538,\t\t89,\t\t0\t\t],\n\t\t[184,\t\t186,\t\t0\t\t],\n\t\t[184,\t\t187,\t\t0\t\t],\n\t\t[520,\t\t172,\t\t0\t\t],\n\t\t[89,\t\t175,\t\t0\t\t],\n\t\t[185,\t\t89,\t\t0\t\t],\n\t\t[89,\t\t188,\t\t0\t\t],\n\t\t[189,\t\t190,\t\t0\t\t],\n\t\t[539,\t\t172,\t\t0\t\t],\n\t\t[504,\t\t192,\t\t0\t\t],\n\t\t[105,\t\t186,\t\t0\t\t],\n\t\t[105,\t\t187,\t\t0\t\t],\n\t\t[539,\t\t193,\t\t0\t\t],\n\t\t[187,\t\t194,\t\t0\t\t],\n\t\t[539,\t\t540,\t\t0\t\t],\n\t\t[539,\t\t196,\t\t0\t\t],\n\t\t[197,\t\t540,\t\t0\t\t],\n\t\t[110,\t\t198,\t\t0\t\t],\n\t\t[197,\t\t539,\t\t0\t\t],\n\t\t[199,\t\t537,\t\t0\t\t],\n\t\t[134,\t\t526,\t\t0\t\t],\n\t\t[200,\t\t193,\t\t0\t\t],\n\t\t[4,\t\t201,\t\t1\t\t],\n\t\t[202,\t\t86,\t\t0\t\t],\n\t\t[85,\t\t203,\t\t0\t\t],\n\t\t[147,\t\t204,\t\t0\t\t],\n\t\t[147,\t\t205,\t\t0\t\t],\n\t\t[123,\t\t206,\t\t0\t\t],\n\t\t[537,\t\t207,\t\t0\t\t],\n\t\t[165,\t\t208,\t\t0\t\t],\n\t\t[4,\t\t94,\t\t1\t\t],\n\t\t[4,\t\t2,\t\t0\t\t],\n\t\t[209,\t\t4,\t\t0\t\t],\n\t\t[119,\t\t163,\t\t0\t\t],\n\t\t[210,\t\t3,\t\t0\t\t],\n\t\t[99,\t\t211,\t\t0\t\t],\n\t\t[99,\t\t69,\t\t1\t\t],\n\t\t[212,\t\t99,\t\t0\t\t],\n\t\t[213,\t\t214,\t\t0\t\t],\n\t\t[510,\t\t215,\t\t0\t\t],\n\t\t[128,\t\t69,\t\t1\t\t],\n\t\t[216,\t\t69,\t\t1\t\t],\n\t\t[217,\t\t98,\t\t0\t\t],\n\t\t[504,\t\t218,\t\t0\t\t],\n\t\t[177,\t\t504,\t\t1\t\t],\n\t\t[219,\t\t209,\t\t0\t\t],\n\t\t[219,\t\t220,\t\t0\t\t],\n\t\t[94,\t\t95,\t\t1\t\t],\n\t\t[159,\t\t221,\t\t1\t\t],\n\t\t[34,\t\t161,\t\t0\t\t],\n\t\t[222,\t\t221,\t\t0\t\t],\n\t\t[211,\t\t52,\t\t1\t\t],\n\t\t[215,\t\t223,\t\t1\t\t],\n\t\t[224,\t\t215,\t\t0\t\t],\n\t\t[225,\t\t224,\t\t1\t\t],\n\t\t[224,\t\t223,\t\t0\t\t],\n\t\t[226,\t\t6,\t\t0\t\t],\n\t\t[7,\t\t3,\t\t1\t\t],\n\t\t[216,\t\t227,\t\t1\t\t],\n\t\t[228,\t\t229,\t\t0\t\t],\n\t\t[227,\t\t230,\t\t0\t\t],\n\t\t[231,\t\t53,\t\t1\t\t],\n\t\t[544,\t\t545,\t\t0\t\t],\n\t\t[234,\t\t235,\t\t1\t\t],\n\t\t[546,\t\t214,\t\t1\t\t],\n\t\t[233,\t\t227,\t\t0\t\t],\n\t\t[237,\t\t238,\t\t0\t\t],\n\t\t[212,\t\t100,\t\t0\t\t],\n\t\t[519,\t\t239,\t\t0\t\t],\n\t\t[238,\t\t519,\t\t0\t\t],\n\t\t[213,\t\t240,\t\t0\t\t],\n\t\t[241,\t\t242,\t\t1\t\t],\n\t\t[70,\t\t241,\t\t0\t\t],\n\t\t[509,\t\t213,\t\t0\t\t],\n\t\t[68,\t\t243,\t\t0\t\t],\n\t\t[243,\t\t244,\t\t0\t\t],\n\t\t[68,\t\t244,\t\t0\t\t],\n\t\t[544,\t\t547,\t\t1\t\t],\n\t\t[245,\t\t227,\t\t1\t\t],\n\t\t[246,\t\t208,\t\t0\t\t],\n\t\t[112,\t\t208,\t\t0\t\t],\n\t\t[165,\t\t247,\t\t0\t\t],\n\t\t[537,\t\t549,\t\t0\t\t],\n\t\t[537,\t\t550,\t\t0\t\t],\n\t\t[537,\t\t551,\t\t0\t\t],\n\t\t[110,\t\t251,\t\t0\t\t],\n\t\t[510,\t\t252,\t\t1\t\t],\n\t\t[529,\t\t253,\t\t1\t\t],\n\t\t[237,\t\t239,\t\t1\t\t],\n\t\t[254,\t\t238,\t\t1\t\t],\n\t\t[69,\t\t255,\t\t0\t\t],\n\t\t[510,\t\t225,\t\t1\t\t],\n\t\t[256,\t\t257,\t\t0\t\t],\n\t\t[258,\t\t190,\t\t0\t\t],\n\t\t[258,\t\t259,\t\t0\t\t],\n\t\t[260,\t\t261,\t\t1\t\t],\n\t\t[554,\t\t553,\t\t1\t\t],\n\t\t[515,\t\t263,\t\t0\t\t],\n\t\t[14,\t\t264,\t\t1\t\t],\n\t\t[116,\t\t555,\t\t0\t\t],\n\t\t[151,\t\t116,\t\t0\t\t],\n\t\t[111,\t\t114,\t\t1\t\t],\n\t\t[77,\t\t111,\t\t0\t\t],\n\t\t[266,\t\t525,\t\t0\t\t],\n\t\t[267,\t\t120,\t\t1\t\t],\n\t\t[268,\t\t269,\t\t0\t\t],\n\t\t[556,\t\t271,\t\t0\t\t],\n\t\t[556,\t\t272,\t\t0\t\t],\n\t\t[529,\t\t273,\t\t0\t\t],\n\t\t[128,\t\t274,\t\t0\t\t],\n\t\t[34,\t\t275,\t\t0\t\t],\n\t\t[503,\t\t276,\t\t0\t\t],\n\t\t[503,\t\t504,\t\t1\t\t],\n\t\t[177,\t\t218,\t\t1\t\t],\n\t\t[277,\t\t278,\t\t1\t\t],\n\t\t[557,\t\t558,\t\t1\t\t],\n\t\t[557,\t\t559,\t\t1\t\t],\n\t\t[559,\t\t558,\t\t1\t\t],\n\t\t[277,\t\t78,\t\t1\t\t],\n\t\t[277,\t\t279,\t\t1\t\t],\n\t\t[78,\t\t279,\t\t0\t\t],\n\t\t[281,\t\t282,\t\t0\t\t],\n\t\t[283,\t\t161,\t\t1\t\t],\n\t\t[268,\t\t161,\t\t1\t\t],\n\t\t[256,\t\t284,\t\t0\t\t],\n\t\t[515,\t\t516,\t\t1\t\t],\n\t\t[263,\t\t516,\t\t0\t\t],\n\t\t[516,\t\t285,\t\t0\t\t],\n\t\t[63,\t\t286,\t\t0\t\t],\n\t\t[287,\t\t516,\t\t0\t\t],\n\t\t[8,\t\t102,\t\t1\t\t],\n\t\t[8,\t\t101,\t\t1\t\t],\n\t\t[80,\t\t288,\t\t0\t\t],\n\t\t[80,\t\t289,\t\t0\t\t],\n\t\t[276,\t\t560,\t\t0\t\t],\n\t\t[37,\t\t290,\t\t0\t\t],\n\t\t[290,\t\t74,\t\t1\t\t],\n\t\t[512,\t\t291,\t\t0\t\t],\n\t\t[78,\t\t292,\t\t1\t\t],\n\t\t[199,\t\t548,\t\t0\t\t],\n\t\t[491,\t\t293,\t\t0\t\t],\n\t\t[4,\t\t294,\t\t0\t\t],\n\t\t[490,\t\t541,\t\t1\t\t],\n\t\t[491,\t\t295,\t\t0\t\t],\n\t\t[491,\t\t296,\t\t0\t\t],\n\t\t[295,\t\t297,\t\t0\t\t],\n\t\t[508,\t\t161,\t\t0\t\t],\n\t\t[117,\t\t123,\t\t0\t\t],\n\t\t[133,\t\t117,\t\t0\t\t],\n\t\t[71,\t\t74,\t\t1\t\t],\n\t\t[74,\t\t278,\t\t1\t\t],\n\t\t[298,\t\t515,\t\t0\t\t],\n\t\t[5,\t\t299,\t\t0\t\t],\n\t\t[32,\t\t292,\t\t1\t\t],\n\t\t[5,\t\t29,\t\t1\t\t],\n\t\t[503,\t\t560,\t\t0\t\t],\n\t\t[300,\t\t301,\t\t1\t\t],\n\t\t[51,\t\t300,\t\t0\t\t],\n\t\t[244,\t\t302,\t\t1\t\t],\n\t\t[31,\t\t302,\t\t1\t\t],\n\t\t[51,\t\t282,\t\t1\t\t],\n\t\t[303,\t\t304,\t\t0\t\t],\n\t\t[305,\t\t304,\t\t0\t\t],\n\t\t[305,\t\t259,\t\t0\t\t],\n\t\t[306,\t\t307,\t\t1\t\t],\n\t\t[305,\t\t308,\t\t0\t\t],\n\t\t[305,\t\t309,\t\t0\t\t],\n\t\t[310,\t\t309,\t\t1\t\t],\n\t\t[306,\t\t309,\t\t1\t\t],\n\t\t[311,\t\t280,\t\t0\t\t],\n\t\t[280,\t\t278,\t\t1\t\t],\n\t\t[311,\t\t32,\t\t1\t\t],\n\t\t[13,\t\t312,\t\t1\t\t],\n\t\t[313,\t\t314,\t\t0\t\t],\n\t\t[312,\t\t313,\t\t1\t\t],\n\t\t[547,\t\t566,\t\t1\t\t],\n\t\t[245,\t\t315,\t\t1\t\t],\n\t\t[312,\t\t316,\t\t0\t\t],\n\t\t[312,\t\t314,\t\t0\t\t],\n\t\t[554,\t\t546,\t\t1\t\t],\n\t\t[262,\t\t216,\t\t1\t\t],\n\t\t[317,\t\t233,\t\t0\t\t],\n\t\t[318,\t\t317,\t\t0\t\t],\n\t\t[231,\t\t52,\t\t1\t\t],\n\t\t[319,\t\t567,\t\t0\t\t],\n\t\t[557,\t\t321,\t\t0\t\t],\n\t\t[277,\t\t65,\t\t1\t\t],\n\t\t[322,\t\t288,\t\t1\t\t],\n\t\t[322,\t\t323,\t\t0\t\t],\n\t\t[277,\t\t324,\t\t1\t\t],\n\t\t[324,\t\t325,\t\t0\t\t],\n\t\t[277,\t\t325,\t\t0\t\t],\n\t\t[326,\t\t327,\t\t0\t\t],\n\t\t[328,\t\t326,\t\t1\t\t],\n\t\t[328,\t\t327,\t\t1\t\t],\n\t\t[326,\t\t329,\t\t0\t\t],\n\t\t[568,\t\t329,\t\t1\t\t],\n\t\t[568,\t\t326,\t\t0\t\t],\n\t\t[332,\t\t78,\t\t1\t\t],\n\t\t[333,\t\t306,\t\t0\t\t],\n\t\t[332,\t\t333,\t\t0\t\t],\n\t\t[332,\t\t334,\t\t0\t\t],\n\t\t[66,\t\t334,\t\t1\t\t],\n\t\t[330,\t\t335,\t\t1\t\t],\n\t\t[336,\t\t66,\t\t0\t\t],\n\t\t[330,\t\t336,\t\t1\t\t],\n\t\t[68,\t\t70,\t\t0\t\t],\n\t\t[509,\t\t337,\t\t1\t\t],\n\t\t[324,\t\t288,\t\t0\t\t],\n\t\t[338,\t\t559,\t\t0\t\t],\n\t\t[339,\t\t559,\t\t0\t\t],\n\t\t[339,\t\t340,\t\t1\t\t],\n\t\t[559,\t\t340,\t\t1\t\t],\n\t\t[341,\t\t292,\t\t0\t\t],\n\t\t[557,\t\t342,\t\t0\t\t],\n\t\t[558,\t\t343,\t\t0\t\t],\n\t\t[502,\t\t340,\t\t1\t\t],\n\t\t[72,\t\t32,\t\t1\t\t],\n\t\t[344,\t\t345,\t\t0\t\t],\n\t\t[346,\t\t47,\t\t0\t\t],\n\t\t[46,\t\t47,\t\t0\t\t],\n\t\t[346,\t\t345,\t\t0\t\t],\n\t\t[347,\t\t328,\t\t0\t\t],\n\t\t[347,\t\t348,\t\t1\t\t],\n\t\t[571,\t\t348,\t\t1\t\t],\n\t\t[347,\t\t572,\t\t0\t\t],\n\t\t[571,\t\t570,\t\t1\t\t],\n\t\t[14,\t\t350,\t\t0\t\t],\n\t\t[350,\t\t573,\t\t0\t\t],\n\t\t[15,\t\t351,\t\t1\t\t],\n\t\t[352,\t\t15,\t\t0\t\t],\n\t\t[15,\t\t335,\t\t1\t\t],\n\t\t[232,\t\t227,\t\t0\t\t],\n\t\t[565,\t\t544,\t\t1\t\t],\n\t\t[235,\t\t567,\t\t1\t\t],\n\t\t[567,\t\t286,\t\t0\t\t],\n\t\t[353,\t\t519,\t\t0\t\t],\n\t\t[354,\t\t353,\t\t0\t\t],\n\t\t[355,\t\t354,\t\t0\t\t],\n\t\t[354,\t\t356,\t\t0\t\t],\n\t\t[357,\t\t358,\t\t0\t\t],\n\t\t[574,\t\t359,\t\t0\t\t],\n\t\t[235,\t\t575,\t\t0\t\t],\n\t\t[167,\t\t361,\t\t0\t\t],\n\t\t[528,\t\t362,\t\t0\t\t],\n\t\t[363,\t\t344,\t\t0\t\t],\n\t\t[259,\t\t364,\t\t1\t\t],\n\t\t[54,\t\t56,\t\t0\t\t],\n\t\t[365,\t\t364,\t\t0\t\t],\n\t\t[231,\t\t366,\t\t0\t\t],\n\t\t[30,\t\t367,\t\t0\t\t],\n\t\t[61,\t\t367,\t\t1\t\t],\n\t\t[254,\t\t368,\t\t0\t\t],\n\t\t[254,\t\t369,\t\t0\t\t],\n\t\t[254,\t\t370,\t\t0\t\t],\n\t\t[99,\t\t358,\t\t0\t\t],\n\t\t[354,\t\t519,\t\t0\t\t],\n\t\t[571,\t\t371,\t\t0\t\t],\n\t\t[207,\t\t372,\t\t0\t\t],\n\t\t[57,\t\t373,\t\t0\t\t],\n\t\t[209,\t\t374,\t\t0\t\t],\n\t\t[375,\t\t376,\t\t0\t\t],\n\t\t[376,\t\t377,\t\t0\t\t],\n\t\t[16,\t\t49,\t\t0\t\t],\n\t\t[318,\t\t377,\t\t0\t\t],\n\t\t[378,\t\t297,\t\t0\t\t],\n\t\t[562,\t\t379,\t\t0\t\t],\n\t\t[576,\t\t563,\t\t0\t\t],\n\t\t[576,\t\t381,\t\t0\t\t],\n\t\t[577,\t\t576,\t\t1\t\t],\n\t\t[244,\t\t383,\t\t0\t\t],\n\t\t[244,\t\t306,\t\t1\t\t],\n\t\t[383,\t\t306,\t\t1\t\t],\n\t\t[380,\t\t306,\t\t0\t\t],\n\t\t[252,\t\t225,\t\t0\t\t],\n\t\t[220,\t\t76,\t\t0\t\t],\n\t\t[542,\t\t384,\t\t0\t\t],\n\t\t[385,\t\t384,\t\t0\t\t],\n\t\t[542,\t\t385,\t\t0\t\t],\n\t\t[386,\t\t385,\t\t0\t\t],\n\t\t[387,\t\t578,\t\t0\t\t],\n\t\t[332,\t\t388,\t\t1\t\t],\n\t\t[382,\t\t332,\t\t1\t\t],\n\t\t[382,\t\t388,\t\t0\t\t],\n\t\t[579,\t\t578,\t\t0\t\t],\n\t\t[577,\t\t387,\t\t1\t\t],\n\t\t[144,\t\t390,\t\t0\t\t],\n\t\t[37,\t\t49,\t\t0\t\t],\n\t\t[391,\t\t233,\t\t0\t\t],\n\t\t[392,\t\t310,\t\t0\t\t],\n\t\t[260,\t\t393,\t\t0\t\t],\n\t\t[394,\t\t230,\t\t0\t\t],\n\t\t[395,\t\t282,\t\t1\t\t],\n\t\t[395,\t\t244,\t\t0\t\t],\n\t\t[25,\t\t396,\t\t1\t\t],\n\t\t[81,\t\t74,\t\t0\t\t],\n\t\t[278,\t\t80,\t\t1\t\t],\n\t\t[81,\t\t278,\t\t1\t\t],\n\t\t[569,\t\t570,\t\t0\t\t],\n\t\t[397,\t\t552,\t\t0\t\t],\n\t\t[542,\t\t398,\t\t0\t\t],\n\t\t[398,\t\t385,\t\t0\t\t],\n\t\t[399,\t\t499,\t\t0\t\t],\n\t\t[83,\t\t399,\t\t0\t\t],\n\t\t[498,\t\t400,\t\t0\t\t],\n\t\t[518,\t\t239,\t\t1\t\t],\n\t\t[575,\t\t543,\t\t0\t\t],\n\t\t[401,\t\t360,\t\t0\t\t],\n\t\t[580,\t\t581,\t\t0\t\t],\n\t\t[401,\t\t402,\t\t0\t\t],\n\t\t[403,\t\t231,\t\t0\t\t],\n\t\t[189,\t\t360,\t\t1\t\t],\n\t\t[234,\t\t404,\t\t0\t\t],\n\t\t[235,\t\t404,\t\t1\t\t],\n\t\t[235,\t\t580,\t\t0\t\t],\n\t\t[216,\t\t259,\t\t0\t\t],\n\t\t[405,\t\t259,\t\t0\t\t],\n\t\t[405,\t\t318,\t\t0\t\t],\n\t\t[406,\t\t230,\t\t0\t\t],\n\t\t[542,\t\t407,\t\t0\t\t],\n\t\t[23,\t\t408,\t\t0\t\t],\n\t\t[577,\t\t348,\t\t0\t\t],\n\t\t[562,\t\t564,\t\t1\t\t],\n\t\t[582,\t\t507,\t\t0\t\t],\n\t\t[27,\t\t410,\t\t0\t\t],\n\t\t[501,\t\t27,\t\t0\t\t],\n\t\t[27,\t\t411,\t\t0\t\t],\n\t\t[411,\t\t410,\t\t0\t\t],\n\t\t[403,\t\t360,\t\t0\t\t],\n\t\t[412,\t\t360,\t\t0\t\t],\n\t\t[326,\t\t413,\t\t0\t\t],\n\t\t[414,\t\t413,\t\t0\t\t],\n\t\t[6,\t\t297,\t\t0\t\t],\n\t\t[554,\t\t580,\t\t1\t\t],\n\t\t[262,\t\t401,\t\t1\t\t],\n\t\t[499,\t\t556,\t\t1\t\t],\n\t\t[224,\t\t229,\t\t0\t\t],\n\t\t[583,\t\t507,\t\t0\t\t],\n\t\t[415,\t\t307,\t\t0\t\t],\n\t\t[416,\t\t507,\t\t0\t\t],\n\t\t[284,\t\t561,\t\t0\t\t],\n\t\t[543,\t\t417,\t\t0\t\t],\n\t\t[418,\t\t506,\t\t0\t\t],\n\t\t[220,\t\t157,\t\t0\t\t],\n\t\t[295,\t\t419,\t\t0\t\t],\n\t\t[295,\t\t420,\t\t0\t\t],\n\t\t[541,\t\t62,\t\t0\t\t],\n\t\t[52,\t\t421,\t\t0\t\t],\n\t\t[60,\t\t160,\t\t0\t\t],\n\t\t[535,\t\t161,\t\t0\t\t],\n\t\t[267,\t\t282,\t\t0\t\t],\n\t\t[52,\t\t365,\t\t0\t\t],\n\t\t[28,\t\t27,\t\t0\t\t],\n\t\t[30,\t\t201,\t\t1\t\t],\n\t\t[422,\t\t81,\t\t0\t\t],\n\t\t[119,\t\t425,\t\t0\t\t],\n\t\t[423,\t\t425,\t\t0\t\t],\n\t\t[424,\t\t425,\t\t0\t\t],\n\t\t[426,\t\t428,\t\t0\t\t],\n\t\t[427,\t\t428,\t\t0\t\t],\n\t\t[19,\t\t428,\t\t1\t\t],\n\t\t[45,\t\t429,\t\t0\t\t],\n\t\t[44,\t\t429,\t\t0\t\t],\n\t\t[505,\t\t429,\t\t0\t\t],\n\t\t[231,\t\t431,\t\t1\t\t],\n\t\t[190,\t\t431,\t\t1\t\t],\n\t\t[430,\t\t431,\t\t0\t\t],\n\t\t[286,\t\t433,\t\t0\t\t],\n\t\t[432,\t\t433,\t\t0\t\t],\n\t\t[506,\t\t433,\t\t0\t\t],\n\t\t[23,\t\t434,\t\t0\t\t],\n\t\t[400,\t\t434,\t\t0\t\t],\n\t\t[500,\t\t434,\t\t0\t\t],\n\t\t[32,\t\t436,\t\t0\t\t],\n\t\t[435,\t\t436,\t\t0\t\t],\n\t\t[78,\t\t436,\t\t1\t\t],\n\t\t[86,\t\t438,\t\t1\t\t],\n\t\t[437,\t\t438,\t\t0\t\t],\n\t\t[221,\t\t438,\t\t0\t\t],\n\t\t[207,\t\t439,\t\t0\t\t],\n\t\t[516,\t\t439,\t\t0\t\t],\n\t\t[513,\t\t439,\t\t0\t\t],\n\t\t[181,\t\t441,\t\t1\t\t],\n\t\t[440,\t\t441,\t\t0\t\t],\n\t\t[504,\t\t441,\t\t1\t\t],\n\t\t[135,\t\t442,\t\t0\t\t],\n\t\t[109,\t\t442,\t\t0\t\t],\n\t\t[112,\t\t442,\t\t0\t\t],\n\t\t[113,\t\t443,\t\t0\t\t],\n\t\t[132,\t\t443,\t\t0\t\t],\n\t\t[107,\t\t443,\t\t0\t\t],\n\t\t[444,\t\t445,\t\t0\t\t],\n\t\t[112,\t\t445,\t\t0\t\t],\n\t\t[109,\t\t445,\t\t0\t\t],\n\t\t[119,\t\t447,\t\t1\t\t],\n\t\t[100,\t\t447,\t\t1\t\t],\n\t\t[446,\t\t447,\t\t0\t\t],\n\t\t[124,\t\t448,\t\t0\t\t],\n\t\t[125,\t\t448,\t\t0\t\t],\n\t\t[131,\t\t448,\t\t0\t\t],\n\t\t[449,\t\t450,\t\t0\t\t],\n\t\t[173,\t\t450,\t\t0\t\t],\n\t\t[184,\t\t450,\t\t0\t\t],\n\t\t[144,\t\t451,\t\t0\t\t],\n\t\t[140,\t\t451,\t\t0\t\t],\n\t\t[514,\t\t451,\t\t0\t\t],\n\t\t[537,\t\t585,\t\t1\t\t],\n\t\t[141,\t\t585,\t\t0\t\t],\n\t\t[584,\t\t585,\t\t0\t\t],\n\t\t[522,\t\t454,\t\t0\t\t],\n\t\t[144,\t\t454,\t\t0\t\t],\n\t\t[453,\t\t454,\t\t0\t\t],\n\t\t[199,\t\t456,\t\t0\t\t],\n\t\t[140,\t\t456,\t\t0\t\t],\n\t\t[455,\t\t456,\t\t0\t\t],\n\t\t[537,\t\t456,\t\t0\t\t],\n\t\t[538,\t\t457,\t\t0\t\t],\n\t\t[153,\t\t457,\t\t0\t\t],\n\t\t[176,\t\t457,\t\t0\t\t],\n\t\t[524,\t\t459,\t\t0\t\t],\n\t\t[458,\t\t459,\t\t0\t\t],\n\t\t[134,\t\t459,\t\t0\t\t],\n\t\t[460,\t\t461,\t\t0\t\t],\n\t\t[150,\t\t461,\t\t0\t\t],\n\t\t[149,\t\t461,\t\t0\t\t],\n\t\t[521,\t\t463,\t\t0\t\t],\n\t\t[462,\t\t463,\t\t0\t\t],\n\t\t[538,\t\t463,\t\t0\t\t],\n\t\t[110,\t\t464,\t\t0\t\t],\n\t\t[90,\t\t464,\t\t0\t\t],\n\t\t[165,\t\t464,\t\t0\t\t],\n\t\t[458,\t\t465,\t\t0\t\t],\n\t\t[134,\t\t465,\t\t0\t\t],\n\t\t[524,\t\t465,\t\t0\t\t],\n\t\t[466,\t\t467,\t\t0\t\t],\n\t\t[110,\t\t467,\t\t0\t\t],\n\t\t[165,\t\t467,\t\t0\t\t],\n\t\t[468,\t\t469,\t\t0\t\t],\n\t\t[541,\t\t469,\t\t0\t\t],\n\t\t[490,\t\t469,\t\t0\t\t],\n\t\t[263,\t\t471,\t\t0\t\t],\n\t\t[470,\t\t471,\t\t0\t\t],\n\t\t[534,\t\t471,\t\t0\t\t],\n\t\t[136,\t\t472,\t\t0\t\t],\n\t\t[110,\t\t472,\t\t0\t\t],\n\t\t[251,\t\t472,\t\t0\t\t],\n\t\t[226,\t\t474,\t\t0\t\t],\n\t\t[473,\t\t474,\t\t0\t\t],\n\t\t[257,\t\t474,\t\t0\t\t],\n\t\t[6,\t\t474,\t\t1\t\t],\n\t\t[299,\t\t475,\t\t1\t\t],\n\t\t[3,\t\t475,\t\t0\t\t],\n\t\t[210,\t\t475,\t\t0\t\t],\n\t\t[297,\t\t476,\t\t0\t\t],\n\t\t[296,\t\t476,\t\t0\t\t],\n\t\t[295,\t\t476,\t\t0\t\t],\n\t\t[313,\t\t478,\t\t1\t\t],\n\t\t[477,\t\t478,\t\t0\t\t],\n\t\t[245,\t\t478,\t\t0\t\t],\n\t\t[479,\t\t481,\t\t0\t\t],\n\t\t[565,\t\t481,\t\t0\t\t],\n\t\t[480,\t\t481,\t\t0\t\t],\n\t\t[415,\t\t482,\t\t0\t\t],\n\t\t[56,\t\t482,\t\t0\t\t],\n\t\t[409,\t\t482,\t\t0\t\t],\n\t\t[483,\t\t484,\t\t0\t\t],\n\t\t[3,\t\t484,\t\t0\t\t],\n\t\t[301,\t\t484,\t\t0\t\t],\n\t\t[233,\t\t485,\t\t0\t\t],\n\t\t[392,\t\t485,\t\t0\t\t],\n\t\t[391,\t\t485,\t\t0\t\t],\n\t\t[579,\t\t488,\t\t0\t\t],\n\t\t[486,\t\t488,\t\t0\t\t],\n\t\t[487,\t\t488,\t\t0\t\t],\n\t\t[270,\t\t489,\t\t0\t\t],\n\t\t[331,\t\t489,\t\t0\t\t],\n\t\t[396,\t\t489,\t\t1\t\t],\n\t\t[519,\t\t253,\t\t0\t\t],\n\t\t[382,\t\t349,\t\t1\t\t],\n\t\t[349,\t\t351,\t\t0\t\t],\n\t\t[459,\t\t465,\t\t0\t\t],\n\t\t[549,\t\t550,\t\t0\t\t],\n\t\t[550,\t\t551,\t\t0\t\t],\n\t\t[194,\t\t195,\t\t0\t\t],\n\t\t[247,\t\t248,\t\t0\t\t],\n\t\t[2,\t\t294,\t\t0\t\t],\n\t\t[549,\t\t551,\t\t0\t\t],\n\t\t[54,\t\t365,\t\t0\t\t],\n\t\t[131,\t\t265,\t\t0\t\t],\n\t\t[91,\t\t92,\t\t0\t\t],\n\t\t[247,\t\t249,\t\t0\t\t],\n\t\t[186,\t\t191,\t\t0\t\t],\n\t\t[129,\t\t173,\t\t0\t\t],\n\t\t[96,\t\t202,\t\t0\t\t],\n\t\t[53,\t\t320,\t\t0\t\t],\n\t\t[24,\t\t396,\t\t0\t\t],\n\t\t[133,\t\t156,\t\t0\t\t],\n\t\t[442,\t\t452,\t\t0\t\t],\n\t\t[445,\t\t452,\t\t0\t\t],\n\t\t[247,\t\t250,\t\t0\t\t],\n\t\t[187,\t\t195,\t\t0\t\t],\n\t\t[216,\t\t236,\t\t0\t\t],\n\t\t[244,\t\t389,\t\t0\t\t],\n\t\t[394,\t\t406,\t\t0\t\t],\n\t\t[442,\t\t445,\t\t0\t\t],\n\t\t[442,\t\t444,\t\t0\t\t],\n\t\t[198,\t\t472,\t\t0\t\t],\n\t\t[464,\t\t467,\t\t0\t\t],\n\t\t[198,\t\t251,\t\t0\t\t],\n\t\t[112,\t\t143,\t\t0\t\t],\n\t\t[2,\t\t490,\t\t0\t\t],\n\t\t[5,\t\t491,\t\t0\t\t],\n\t\t[10,\t\t492,\t\t0\t\t],\n\t\t[12,\t\t493,\t\t0\t\t],\n\t\t[13,\t\t494,\t\t0\t\t],\n\t\t[15,\t\t495,\t\t0\t\t],\n\t\t[18,\t\t496,\t\t0\t\t],\n\t\t[20,\t\t497,\t\t0\t\t],\n\t\t[22,\t\t498,\t\t0\t\t],\n\t\t[24,\t\t499,\t\t0\t\t],\n\t\t[26,\t\t500,\t\t0\t\t],\n\t\t[30,\t\t501,\t\t0\t\t],\n\t\t[32,\t\t502,\t\t0\t\t],\n\t\t[37,\t\t503,\t\t0\t\t],\n\t\t[42,\t\t504,\t\t0\t\t],\n\t\t[46,\t\t505,\t\t0\t\t],\n\t\t[52,\t\t506,\t\t0\t\t],\n\t\t[56,\t\t507,\t\t0\t\t],\n\t\t[61,\t\t508,\t\t0\t\t],\n\t\t[68,\t\t509,\t\t0\t\t],\n\t\t[69,\t\t510,\t\t0\t\t],\n\t\t[74,\t\t511,\t\t0\t\t],\n\t\t[78,\t\t512,\t\t0\t\t],\n\t\t[86,\t\t513,\t\t0\t\t],\n\t\t[87,\t\t514,\t\t0\t\t],\n\t\t[94,\t\t515,\t\t0\t\t],\n\t\t[95,\t\t516,\t\t0\t\t],\n\t\t[96,\t\t517,\t\t0\t\t],\n\t\t[99,\t\t518,\t\t0\t\t],\n\t\t[100,\t\t519,\t\t0\t\t],\n\t\t[104,\t\t520,\t\t0\t\t],\n\t\t[105,\t\t521,\t\t0\t\t],\n\t\t[106,\t\t522,\t\t0\t\t],\n\t\t[107,\t\t523,\t\t0\t\t],\n\t\t[117,\t\t524,\t\t0\t\t],\n\t\t[120,\t\t525,\t\t0\t\t],\n\t\t[123,\t\t526,\t\t0\t\t],\n\t\t[124,\t\t527,\t\t0\t\t],\n\t\t[125,\t\t528,\t\t0\t\t],\n\t\t[128,\t\t529,\t\t0\t\t],\n\t\t[129,\t\t530,\t\t0\t\t],\n\t\t[138,\t\t531,\t\t0\t\t],\n\t\t[143,\t\t532,\t\t0\t\t],\n\t\t[156,\t\t533,\t\t0\t\t],\n\t\t[157,\t\t534,\t\t0\t\t],\n\t\t[159,\t\t535,\t\t0\t\t],\n\t\t[160,\t\t536,\t\t0\t\t],\n\t\t[165,\t\t537,\t\t0\t\t],\n\t\t[184,\t\t538,\t\t0\t\t],\n\t\t[191,\t\t539,\t\t0\t\t],\n\t\t[195,\t\t540,\t\t0\t\t],\n\t\t[201,\t\t541,\t\t0\t\t],\n\t\t[220,\t\t542,\t\t0\t\t],\n\t\t[231,\t\t543,\t\t0\t\t],\n\t\t[232,\t\t544,\t\t0\t\t],\n\t\t[233,\t\t545,\t\t0\t\t],\n\t\t[236,\t\t546,\t\t0\t\t],\n\t\t[245,\t\t547,\t\t0\t\t],\n\t\t[246,\t\t548,\t\t0\t\t],\n\t\t[248,\t\t549,\t\t0\t\t],\n\t\t[249,\t\t550,\t\t0\t\t],\n\t\t[250,\t\t551,\t\t0\t\t],\n\t\t[259,\t\t552,\t\t0\t\t],\n\t\t[261,\t\t553,\t\t0\t\t],\n\t\t[262,\t\t554,\t\t0\t\t],\n\t\t[265,\t\t555,\t\t0\t\t],\n\t\t[270,\t\t556,\t\t0\t\t],\n\t\t[277,\t\t557,\t\t0\t\t],\n\t\t[279,\t\t558,\t\t0\t\t],\n\t\t[280,\t\t559,\t\t0\t\t],\n\t\t[290,\t\t560,\t\t0\t\t],\n\t\t[301,\t\t561,\t\t0\t\t],\n\t\t[305,\t\t562,\t\t0\t\t],\n\t\t[306,\t\t563,\t\t0\t\t],\n\t\t[310,\t\t564,\t\t0\t\t],\n\t\t[313,\t\t565,\t\t0\t\t],\n\t\t[315,\t\t566,\t\t0\t\t],\n\t\t[320,\t\t567,\t\t0\t\t],\n\t\t[330,\t\t568,\t\t0\t\t],\n\t\t[332,\t\t569,\t\t0\t\t],\n\t\t[334,\t\t570,\t\t0\t\t],\n\t\t[336,\t\t571,\t\t0\t\t],\n\t\t[349,\t\t572,\t\t0\t\t],\n\t\t[351,\t\t573,\t\t0\t\t],\n\t\t[358,\t\t574,\t\t0\t\t],\n\t\t[360,\t\t575,\t\t0\t\t],\n\t\t[380,\t\t576,\t\t0\t\t],\n\t\t[382,\t\t577,\t\t0\t\t],\n\t\t[383,\t\t578,\t\t0\t\t],\n\t\t[389,\t\t579,\t\t0\t\t],\n\t\t[401,\t\t580,\t\t0\t\t],\n\t\t[402,\t\t581,\t\t0\t\t],\n\t\t[409,\t\t582,\t\t0\t\t],\n\t\t[415,\t\t583,\t\t0\t\t],\n\t\t[444,\t\t584,\t\t0\t\t],\n\t\t[452,\t\t585,\t\t0\t\t]\n\t])\n\tppc[\"parameters\"] = {\n\t\t\"x_trans_sg\": 0.003, \n\t\t\"x_trans_fm\": 0.001, \n\t\t\"x_trans_fl\": 0.001, \n\t\t\"d_l\": 1e-3, \n\t\t\"d_l_perturb\": 1e-5, \n\t\t\"w_1_ij\": 1, \n\t\t\"w_2_ij\": 1, \n\t\t\"w_3_ij\": 1, \n\t\t\"w_4_ij\": 1, \n\t\t\"b_r\": 238, \n\t\t\"b_c\": 248 }\n\treturn ppc", "from numpy import array\ndef case_de_103():\n\tppc = {\"version\": '2'}\n\tppc[\"baseMVA\"] = 100.0\n\tppc[\"bus\"] = array([\n\t\t[75, 2, 121.51, 24.3, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[502, 2, 268.78, 53.76, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[44, 2, 165.65, 33.13, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[492, 2, 91.44, 18.29, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[180, 2, 53.05, 10.61, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[46, 1, 0, 0, 0, 0, 5, -7.787774860459645e+16,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[21, 2, 1064.85, 212.97, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[33, 2, 219.23, 43.85, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[559, 2, 81.12, 16.22, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[18, 1, 0, 0, 0, 0, 5, -6.69730734444609e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[73, 2, 97.49, 19.5, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[503, 2, 82.32, 16.46, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[511, 2, 120.52, 24.1, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[72, 2, 304.52, 60.9, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[288, 2, 71.16, 14.23, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[339, 2, 177.74, 35.55, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[321, 2, 229.2, 45.84, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[8, 2, 176.08, 35.22, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[292, 2, 145.2, 29.04, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[343, 2, 129.28, 25.86, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[12, 1, 0, 0, 0, 0, 5, -1.916397677278266e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[340, 2, 150.27, 30.05, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[177, 2, 30.93, 6.19, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[497, 2, 1123.12, 224.62, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[102, 2, 162.93, 32.59, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[311, 2, 223.96, 44.79, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[429, 2, 383.34, 76.67, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[32, 1, 0, 0, 0, 0, 5, -2.282448594427011e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[22, 1, 0, 0, 0, 0, 5, -2.1069673103384982e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[101, 2, 84.18, 16.84, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[71, 2, 185.93, 37.19, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[558, 2, 151.57, 30.31, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[217, 2, 45.86, 9.17, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[322, 2, 29.18, 5.84, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[278, 2, 169.55, 33.91, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[498, 2, 52.67, 10.53, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[47, 2, 382.34, 76.47, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[346, 2, 351.87, 70.37, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[74, 1, 0, 0, 0, 0, 5, -2.405934249771567e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[557, 2, 257.05, 51.41, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[42, 1, 0, 0, 0, 0, 5, -8.936648406394546e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[39, 3, 75.21, 15.04, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[45, 2, 87.93, 17.59, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[493, 2, 117.86, 23.57, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[98, 2, 118.88, 23.78, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[435, 2, 169.83, 33.97, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[338, 2, 287.38, 57.48, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[79, 2, 117.29, 23.46, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[78, 1, 0, 0, 0, 0, 5, -1.1997133719944286e+19,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[512, 2, 79.61, 15.92, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[276, 2, 217.19, 43.44, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[569, 2, 210.34, 42.07, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[37, 1, 0, 0, 0, 0, 5, -9.720320406176698e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[179, 2, 60.35, 12.07, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[325, 2, 174.82, 34.96, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[20, 1, 0, 0, 0, 0, 5, -1.711302020151994e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[496, 2, 8.98, 1.8, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[279, 1, 0, 0, 0, 0, 5, 403318111553146.8,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[436, 2, 90.67, 18.13, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[345, 2, 354.44, 70.89, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[505, 2, 382.34, 76.47, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[290, 1, 0, 0, 0, 0, 5, -8.444245269545231e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[363, 2, 358.68, 71.74, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[11, 2, 104.33, 20.87, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[277, 1, 0, 0, 0, 0, 5, -539.5458,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[441, 2, 66.85, 13.37, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[280, 1, 0, 0, 0, 0, 5, -2.4500931120819984e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[504, 2, 53.91, 10.78, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[181, 2, 40.04, 8.01, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[291, 2, 73.65, 14.73, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[344, 2, 324.15, 64.83, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[40, 2, 78.56, 15.71, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[80, 2, 124.58, 24.92, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[183, 2, 542.96, 108.59, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[440, 2, 87.19, 17.44, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[43, 2, 129.48, 25.9, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[10, 1, 0, 0, 0, 0, 5, -1.3417152969216095e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[81, 2, 140.64, 28.13, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[560, 2, 126.73, 25.35, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[341, 2, 135.85, 27.17, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[17, 2, 100.23, 20.05, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[41, 2, 84.43, 16.89, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[192, 2, 63.62, 12.72, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[342, 2, 235.66, 47.13, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[218, 2, 139.73, 27.95, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[65, 2, 6.21, 1.24, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[289, 2, 111.92, 22.38, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[324, 2, 536.66, 107.33, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[48, 2, 262.81, 52.56, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[332, 1, 0, 0, 0, 0, 0, -2.8760099502790296e+19,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[422, 2, 87.5, 17.5, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[23, 2, 139.43, 27.89, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[570, 2, 328.38, 65.68, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[38, 2, 229.68, 45.94, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[31, 2, 174.85, 34.97, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[49, 2, 66.48, 13.3, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[182, 2, 1.81, 0.36, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[9, 2, 119.08, 23.82, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[323, 2, 3.04, 0.61, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[400, 2, 63.65, 12.73, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[30, 1, 0, 0, 0, 0, 0, -8.632174675422519e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],\n\t\t[25, 2, 66.69, 13.34, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ]\n\t])\n\tppc[\"gen\"] = array([\n\t\t[102, 0, 0, 33.95, -8.49, 1.0, 100, 1, 67.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 101.85, 13.58, 20.37, 20.37, 27.16 ],\n\t\t[493, 0, 0, 75.0, -18.75, 1.0, 100, 1, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 225.0, 30.0, 45.0, 45.0, 60.0 ],\n\t\t[493, 0, 0, 15.0, -3.75, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 45.0, 6.0, 9.0, 9.0, 12.0 ],\n\t\t[177, 0, 0, 16.35, -4.09, 1.0, 100, 1, 32.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 49.05, 6.54, 9.81, 9.81, 13.08 ],\n\t\t[180, 0, 0, 12.7, -3.18, 1.0, 100, 1, 25.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 38.1, 5.08, 7.62, 7.62, 10.16 ],\n\t\t[180, 0, 0, 166.75, -41.69, 1.0, 100, 1, 333.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 500.25, 66.7, 100.05, 100.05, 133.4 ],\n\t\t[180, 0, 0, 14.45, -3.61, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 43.35, 5.78, 8.67, 8.67, 11.56 ],\n\t\t[183, 0, 0, 11.25, -2.81, 1.0, 100, 1, 22.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 33.75, 4.5, 6.75, 6.75, 9.0 ],\n\t\t[183, 0, 0, 383.0, -95.75, 1.0, 100, 1, 766.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1149.0, 153.2, 229.8, 229.8, 306.4 ],\n\t\t[183, 0, 0, 19.0, -4.75, 1.0, 100, 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 57.0, 7.6, 11.4, 11.4, 15.2 ],\n\t\t[183, 0, 0, 12.0, -3.0, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 36.0, 4.8, 7.2, 7.2, 9.6 ],\n\t\t[496, 0, 0, 26.4, -6.6, 1.0, 100, 1, 52.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 79.2, 10.56, 15.84, 15.84, 21.12 ],\n\t\t[21, 0, 0, 63.5, -15.88, 1.0, 100, 1, 127.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 190.5, 25.4, 38.1, 38.1, 50.8 ],\n\t\t[21, 0, 0, 97.0, -24.25, 1.0, 100, 1, 194.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 291.0, 38.8, 58.2, 58.2, 77.6 ],\n\t\t[21, 0, 0, 8.2, -2.05, 1.0, 100, 1, 16.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 24.6, 3.28, 4.92, 4.92, 6.56 ],\n\t\t[217, 0, 0, 54.0, -13.5, 1.0, 100, 1, 108.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 162.0, 21.6, 32.4, 32.4, 43.2 ],\n\t\t[217, 0, 0, 254.0, -63.5, 1.0, 100, 1, 508.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 762.0, 101.6, 152.4, 152.4, 203.2 ],\n\t\t[217, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],\n\t\t[498, 0, 0, 149.25, -37.31, 1.0, 100, 1, 298.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 447.75, 59.7, 89.55, 89.55, 119.4 ],\n\t\t[557, 0, 0, 45.4, -11.35, 1.0, 100, 1, 90.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 136.2, 18.16, 27.24, 27.24, 36.32 ],\n\t\t[558, 0, 0, 37.0, -9.25, 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 111.0, 14.8, 22.2, 22.2, 29.6 ],\n\t\t[559, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],\n\t\t[288, 0, 0, 7.85, -1.96, 1.0, 100, 1, 15.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 23.55, 3.14, 4.71, 4.71, 6.28 ],\n\t\t[289, 0, 0, 552.5, -138.12, 1.0, 100, 1, 1105.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1657.5, 221.0, 331.5, 331.5, 442.0 ],\n\t\t[560, 0, 0, 10.15, -2.54, 1.0, 100, 1, 20.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 30.45, 4.06, 6.09, 6.09, 8.12 ],\n\t\t[560, 0, 0, 108.0, -27.0, 1.0, 100, 1, 216.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 324.0, 43.2, 64.8, 64.8, 86.4 ],\n\t\t[560, 0, 0, 29.5, -7.38, 1.0, 100, 1, 59.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 88.5, 11.8, 17.7, 17.7, 23.6 ],\n\t\t[292, 0, 0, 6.75, -1.69, 1.0, 100, 1, 13.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 20.25, 2.7, 4.05, 4.05, 5.4 ],\n\t\t[292, 0, 0, 5.6, -1.4, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 16.8, 2.24, 3.36, 3.36, 4.48 ],\n\t\t[31, 0, 0, 97.7, -24.42, 1.0, 100, 1, 195.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 293.1, 39.08, 58.62, 58.62, 78.16 ],\n\t\t[311, 0, 0, 437.5, -109.38, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1312.5, 175.0, 262.5, 262.5, 350.0 ],\n\t\t[321, 0, 0, 6.45, -1.61, 1.0, 100, 1, 12.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 19.35, 2.58, 3.87, 3.87, 5.16 ],\n\t\t[324, 0, 0, 159.9, -39.98, 1.0, 100, 1, 319.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 479.7, 63.96, 95.94, 95.94, 127.92 ],\n\t\t[325, 0, 0, 13.45, -3.36, 1.0, 100, 1, 26.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 40.35, 5.38, 8.07, 8.07, 10.76 ],\n\t\t[502, 0, 0, 54.55, -13.64, 1.0, 100, 1, 109.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 163.65, 21.82, 32.73, 32.73, 43.64 ],\n\t\t[33, 0, 0, 15.9, -3.98, 1.0, 100, 1, 31.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 47.7, 6.36, 9.54, 9.54, 12.72 ],\n\t\t[570, 0, 0, 26.4, -6.6, 1.0, 100, 1, 52.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 79.2, 10.56, 15.84, 15.84, 21.12 ],\n\t\t[570, 0, 0, 44.5, -11.12, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 133.5, 17.8, 26.7, 26.7, 35.6 ],\n\t\t[338, 0, 0, 149.8, -37.45, 1.0, 100, 1, 299.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 449.4, 59.92, 89.88, 89.88, 119.84 ],\n\t\t[338, 0, 0, 41.25, -10.31, 1.0, 100, 1, 82.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 123.75, 16.5, 24.75, 24.75, 33.0 ],\n\t\t[339, 0, 0, 67.0, -16.75, 1.0, 100, 1, 134.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 201.0, 26.8, 40.2, 40.2, 53.6 ],\n\t\t[339, 0, 0, 79.5, -19.88, 1.0, 100, 1, 159.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 238.5, 31.8, 47.7, 47.7, 63.6 ],\n\t\t[339, 0, 0, 55.5, -13.88, 1.0, 100, 1, 111.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 166.5, 22.2, 33.3, 33.3, 44.4 ],\n\t\t[339, 0, 0, 21.35, -5.34, 1.0, 100, 1, 42.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 64.05, 8.54, 12.81, 12.81, 17.08 ],\n\t\t[339, 0, 0, 29.0, -7.25, 1.0, 100, 1, 58.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 87.0, 11.6, 17.4, 17.4, 23.2 ],\n\t\t[340, 0, 0, 9.75, -2.44, 1.0, 100, 1, 19.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 29.25, 3.9, 5.85, 5.85, 7.8 ],\n\t\t[342, 0, 0, 34.98, -8.74, 1.0, 100, 1, 69.95, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 104.93, 13.99, 20.98, 20.98, 27.98 ],\n\t\t[345, 0, 0, 105.5, -26.38, 1.0, 100, 1, 211.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 316.5, 42.2, 63.3, 63.3, 84.4 ],\n\t\t[345, 0, 0, 44.5, -11.12, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 133.5, 17.8, 26.7, 26.7, 35.6 ],\n\t\t[345, 0, 0, 163.5, -40.88, 1.0, 100, 1, 327.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 490.5, 65.4, 98.1, 98.1, 130.8 ],\n\t\t[346, 0, 0, 229.45, -57.36, 1.0, 100, 1, 458.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 688.35, 91.78, 137.67, 137.67, 183.56 ],\n\t\t[363, 0, 0, 40.9, -10.22, 1.0, 100, 1, 81.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 122.7, 16.36, 24.54, 24.54, 32.72 ],\n\t\t[363, 0, 0, 344.0, -86.0, 1.0, 100, 1, 688.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1032.0, 137.6, 206.4, 206.4, 275.2 ],\n\t\t[363, 0, 0, 18.0, -4.5, 1.0, 100, 1, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 54.0, 7.2, 10.8, 10.8, 14.4 ],\n\t\t[503, 0, 0, 26.0, -6.5, 1.0, 100, 1, 52.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 78.0, 10.4, 15.6, 15.6, 20.8 ],\n\t\t[503, 0, 0, 680.0, -170.0, 1.0, 100, 1, 1360.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 2040.0, 272.0, 408.0, 408.0, 544.0 ],\n\t\t[503, 0, 0, 29.2, -7.3, 1.0, 100, 1, 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 87.6, 11.68, 17.52, 17.52, 23.36 ],\n\t\t[39, 0, 0, 1149.65, -287.41, 1.0, 100, 1, 2299.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 3448.95, 459.86, 689.79, 689.79, 919.72 ],\n\t\t[40, 0, 0, 24.0, -6.0, 1.0, 100, 1, 48.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 72.0, 9.6, 14.4, 14.4, 19.2 ],\n\t\t[400, 0, 0, 44.0, -11.0, 1.0, 100, 1, 88.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 132.0, 17.6, 26.4, 26.4, 35.2 ],\n\t\t[400, 0, 0, 30.0, -7.5, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 90.0, 12.0, 18.0, 18.0, 24.0 ],\n\t\t[400, 0, 0, 79.0, -19.75, 1.0, 100, 1, 158.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 237.0, 31.6, 47.4, 47.4, 63.2 ],\n\t\t[422, 0, 0, 37.0, -9.25, 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 111.0, 14.8, 22.2, 22.2, 29.6 ],\n\t\t[43, 0, 0, 98.0, -24.5, 1.0, 100, 1, 196.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 294.0, 39.2, 58.8, 58.8, 78.4 ],\n\t\t[43, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],\n\t\t[429, 0, 0, 82.0, -20.5, 1.0, 100, 1, 164.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 246.0, 32.8, 49.2, 49.2, 65.6 ],\n\t\t[44, 0, 0, 13.0, -3.25, 1.0, 100, 1, 26.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 39.0, 5.2, 7.8, 7.8, 10.4 ],\n\t\t[435, 0, 0, 91.0, -22.75, 1.0, 100, 1, 182.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 273.0, 36.4, 54.6, 54.6, 72.8 ],\n\t\t[435, 0, 0, 30.75, -7.69, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 92.25, 12.3, 18.45, 18.45, 24.6 ],\n\t\t[436, 0, 0, 13.25, -3.31, 1.0, 100, 1, 26.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 39.75, 5.3, 7.95, 7.95, 10.6 ],\n\t\t[440, 0, 0, 7.35, -1.84, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 22.05, 2.94, 4.41, 4.41, 5.88 ],\n\t\t[441, 0, 0, 37.5, -9.38, 1.0, 100, 1, 75.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 112.5, 15.0, 22.5, 22.5, 30.0 ],\n\t\t[45, 0, 0, 148.0, -37.0, 1.0, 100, 1, 296.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 444.0, 59.2, 88.8, 88.8, 118.4 ],\n\t\t[45, 0, 0, 11.55, -2.89, 1.0, 100, 1, 23.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 34.65, 4.62, 6.93, 6.93, 9.24 ],\n\t\t[47, 0, 0, 222.0, -55.5, 1.0, 100, 1, 444.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 666.0, 88.8, 133.2, 133.2, 177.6 ],\n\t\t[47, 0, 0, 15.85, -3.96, 1.0, 100, 1, 31.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 47.55, 6.34, 9.51, 9.51, 12.68 ],\n\t\t[49, 0, 0, 176.0, -44.0, 1.0, 100, 1, 352.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 528.0, 70.4, 105.6, 105.6, 140.8 ],\n\t\t[49, 0, 0, 33.0, -8.25, 1.0, 100, 1, 66.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 99.0, 13.2, 19.8, 19.8, 26.4 ],\n\t\t[49, 0, 0, 18.75, -4.69, 1.0, 100, 1, 37.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 56.25, 7.5, 11.25, 11.25, 15.0 ],\n\t\t[65, 0, 0, 82.25, -20.56, 1.0, 100, 1, 164.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 246.75, 32.9, 49.35, 49.35, 65.8 ],\n\t\t[71, 0, 0, 24.5, -6.12, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 73.5, 9.8, 14.7, 14.7, 19.6 ],\n\t\t[71, 0, 0, 80.55, -20.14, 1.0, 100, 1, 161.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 241.65, 32.22, 48.33, 48.33, 64.44 ],\n\t\t[71, 0, 0, 4.95, -1.24, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 14.85, 1.98, 2.97, 2.97, 3.96 ],\n\t\t[72, 0, 0, 450.0, -112.5, 1.0, 100, 1, 900.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1350.0, 180.0, 270.0, 270.0, 360.0 ],\n\t\t[72, 0, 0, 75.5, -18.88, 1.0, 100, 1, 151.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 226.5, 30.2, 45.3, 45.3, 60.4 ],\n\t\t[72, 0, 0, 60.0, -15.0, 1.0, 100, 1, 120.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 180.0, 24.0, 36.0, 36.0, 48.0 ],\n\t\t[511, 0, 0, 61.0, -15.25, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 183.0, 24.4, 36.6, 36.6, 48.8 ],\n\t\t[511, 0, 0, 11.65, -2.91, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 34.95, 4.66, 6.99, 6.99, 9.32 ],\n\t\t[75, 0, 0, 24.5, -6.12, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 73.5, 9.8, 14.7, 14.7, 19.6 ],\n\t\t[79, 0, 0, 375.0, -93.75, 1.0, 100, 1, 750.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1125.0, 150.0, 225.0, 225.0, 300.0 ],\n\t\t[79, 0, 0, 9.35, -2.34, 1.0, 100, 1, 18.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 28.05, 3.74, 5.61, 5.61, 7.48 ],\n\t\t[81, 0, 0, 1417.5, -354.38, 1.0, 100, 1, 2835.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 4252.5, 567.0, 850.5, 850.5, 1134.0 ],\n\t\t[81, 0, 0, 62.25, -15.56, 1.0, 100, 1, 124.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 186.75, 24.9, 37.35, 37.35, 49.8 ],\n\t\t[218, 0, 0, 17.03, -4.26, 1.0, 100, 1, 34.05, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 51.08, 6.81, 10.22, 10.22, 13.62 ],\n\t\t[498, 0, 0, 723.6, -180.9, 1.0, 100, 1, 1447.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2170.8, 289.44, 434.16, 434.16, 578.88 ],\n\t\t[8, 0, 0, 14.98, -3.74, 1.0, 100, 1, 29.96, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 44.94, 5.99, 8.99, 8.99, 11.98 ],\n\t\t[9, 0, 0, 12.62, -3.16, 1.0, 100, 1, 25.24, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 37.86, 5.05, 7.57, 7.57, 10.1 ],\n\t\t[11, 0, 0, 10.5, -2.62, 1.0, 100, 1, 20.99, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 31.49, 4.2, 6.3, 6.3, 8.4 ],\n\t\t[17, 0, 0, 2.51, -0.63, 1.0, 100, 1, 5.02, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 7.52, 1.0, 1.5, 1.5, 2.01 ],\n\t\t[21, 0, 0, 7.3, -1.82, 1.0, 100, 1, 14.59, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 21.89, 2.92, 4.38, 4.38, 5.84 ],\n\t\t[23, 0, 0, 16.29, -4.07, 1.0, 100, 1, 32.59, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 48.88, 6.52, 9.78, 9.78, 13.04 ],\n\t\t[25, 0, 0, 37.22, -9.31, 1.0, 100, 1, 74.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 111.66, 14.89, 22.33, 22.33, 29.78 ],\n\t\t[31, 0, 0, 1.8, -0.45, 1.0, 100, 1, 3.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 5.4, 0.72, 1.08, 1.08, 1.44 ],\n\t\t[33, 0, 0, 4.25, -1.06, 1.0, 100, 1, 8.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 12.75, 1.7, 2.55, 2.55, 3.4 ],\n\t\t[38, 0, 0, 0.19, -0.05, 1.0, 100, 1, 0.38, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.57, 0.08, 0.11, 0.11, 0.15 ],\n\t\t[39, 0, 0, 61.5, -15.37, 1.0, 100, 1, 123.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 184.5, 24.6, 36.9, 36.9, 49.2 ],\n\t\t[40, 0, 0, 62.52, -15.63, 1.0, 100, 1, 125.05, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 187.57, 25.01, 37.51, 37.51, 50.02 ],\n\t\t[41, 0, 0, 125.88, -31.47, 1.0, 100, 1, 251.76, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 377.64, 50.35, 75.53, 75.53, 100.71 ],\n\t\t[43, 0, 0, 43.99, -11.0, 1.0, 100, 1, 87.97, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 131.96, 17.59, 26.39, 26.39, 35.19 ],\n\t\t[44, 0, 0, 0.2, -0.05, 1.0, 100, 1, 0.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.61, 0.08, 0.12, 0.12, 0.16 ],\n\t\t[45, 0, 0, 4.58, -1.15, 1.0, 100, 1, 9.17, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 13.75, 1.83, 2.75, 2.75, 3.67 ],\n\t\t[47, 0, 0, 0.12, -0.03, 1.0, 100, 1, 0.23, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.35, 0.05, 0.07, 0.07, 0.09 ],\n\t\t[48, 0, 0, 0.3, -0.07, 1.0, 100, 1, 0.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.9, 0.12, 0.18, 0.18, 0.24 ],\n\t\t[49, 0, 0, 14.46, -3.62, 1.0, 100, 1, 28.93, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 43.39, 5.79, 8.68, 8.68, 11.57 ],\n\t\t[65, 0, 0, 0.83, -0.21, 1.0, 100, 1, 1.66, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.48, 0.33, 0.5, 0.5, 0.66 ],\n\t\t[71, 0, 0, 26.41, -6.6, 1.0, 100, 1, 52.81, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 79.22, 10.56, 15.84, 15.84, 21.13 ],\n\t\t[72, 0, 0, 23.1, -5.77, 1.0, 100, 1, 46.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 69.29, 9.24, 13.86, 13.86, 18.48 ],\n\t\t[73, 0, 0, 19.42, -4.85, 1.0, 100, 1, 38.84, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 58.26, 7.77, 11.65, 11.65, 15.54 ],\n\t\t[75, 0, 0, 26.18, -6.54, 1.0, 100, 1, 52.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 78.54, 10.47, 15.71, 15.71, 20.94 ],\n\t\t[79, 0, 0, 1.07, -0.27, 1.0, 100, 1, 2.13, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 3.2, 0.43, 0.64, 0.64, 0.85 ],\n\t\t[80, 0, 0, 0.7, -0.18, 1.0, 100, 1, 1.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.1, 0.28, 0.42, 0.42, 0.56 ],\n\t\t[81, 0, 0, 6.31, -1.58, 1.0, 100, 1, 12.63, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 18.94, 2.53, 3.79, 3.79, 5.05 ],\n\t\t[98, 0, 0, 17.56, -4.39, 1.0, 100, 1, 35.12, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 52.68, 7.02, 10.54, 10.54, 14.05 ],\n\t\t[101, 0, 0, 56.27, -14.07, 1.0, 100, 1, 112.54, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 168.82, 22.51, 33.76, 33.76, 45.02 ],\n\t\t[102, 0, 0, 46.82, -11.71, 1.0, 100, 1, 93.64, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 140.46, 18.73, 28.09, 28.09, 37.46 ],\n\t\t[177, 0, 0, 46.44, -11.61, 1.0, 100, 1, 92.89, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 139.33, 18.58, 27.87, 27.87, 37.15 ],\n\t\t[179, 0, 0, 35.56, -8.89, 1.0, 100, 1, 71.12, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 106.68, 14.22, 21.34, 21.34, 28.45 ],\n\t\t[180, 0, 0, 13.26, -3.32, 1.0, 100, 1, 26.53, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 39.79, 5.31, 7.96, 7.96, 10.61 ],\n\t\t[181, 0, 0, 30.34, -7.58, 1.0, 100, 1, 60.67, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 91.01, 12.13, 18.2, 18.2, 24.27 ],\n\t\t[182, 0, 0, 3.04, -0.76, 1.0, 100, 1, 6.07, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 9.11, 1.21, 1.82, 1.82, 2.43 ],\n\t\t[183, 0, 0, 15.27, -3.82, 1.0, 100, 1, 30.54, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 45.81, 6.11, 9.16, 9.16, 12.22 ],\n\t\t[192, 0, 0, 38.68, -9.67, 1.0, 100, 1, 77.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 116.03, 15.47, 23.21, 23.21, 30.94 ],\n\t\t[217, 0, 0, 43.41, -10.85, 1.0, 100, 1, 86.82, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 130.24, 17.36, 26.05, 26.05, 34.73 ],\n\t\t[218, 0, 0, 21.0, -5.25, 1.0, 100, 1, 42.01, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 63.01, 8.4, 12.6, 12.6, 16.8 ],\n\t\t[276, 0, 0, 29.37, -7.34, 1.0, 100, 1, 58.73, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 88.1, 11.75, 17.62, 17.62, 23.49 ],\n\t\t[278, 0, 0, 4.74, -1.18, 1.0, 100, 1, 9.48, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 14.22, 1.9, 2.84, 2.84, 3.79 ],\n\t\t[288, 0, 0, 3.15, -0.79, 1.0, 100, 1, 6.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 9.45, 1.26, 1.89, 1.89, 2.52 ],\n\t\t[289, 0, 0, 0.87, -0.22, 1.0, 100, 1, 1.75, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.62, 0.35, 0.52, 0.52, 0.7 ],\n\t\t[291, 0, 0, 1.28, -0.32, 1.0, 100, 1, 2.55, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 3.83, 0.51, 0.77, 0.77, 1.02 ],\n\t\t[292, 0, 0, 0.81, -0.2, 1.0, 100, 1, 1.63, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.44, 0.33, 0.49, 0.49, 0.65 ],\n\t\t[311, 0, 0, 1.39, -0.35, 1.0, 100, 1, 2.78, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 4.17, 0.56, 0.83, 0.83, 1.11 ],\n\t\t[321, 0, 0, 2.91, -0.73, 1.0, 100, 1, 5.83, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 8.74, 1.17, 1.75, 1.75, 2.33 ],\n\t\t[322, 0, 0, 5.52, -1.38, 1.0, 100, 1, 11.05, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 16.57, 2.21, 3.31, 3.31, 4.42 ],\n\t\t[323, 0, 0, 0.43, -0.11, 1.0, 100, 1, 0.86, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1.29, 0.17, 0.26, 0.26, 0.34 ],\n\t\t[324, 0, 0, 3.15, -0.79, 1.0, 100, 1, 6.31, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 9.46, 1.26, 1.89, 1.89, 2.52 ],\n\t\t[325, 0, 0, 9.15, -2.29, 1.0, 100, 1, 18.31, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 27.46, 3.66, 5.49, 5.49, 7.32 ],\n\t\t[338, 0, 0, 2.73, -0.68, 1.0, 100, 1, 5.46, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 8.19, 1.09, 1.64, 1.64, 2.18 ],\n\t\t[339, 0, 0, 5.26, -1.32, 1.0, 100, 1, 10.52, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 15.78, 2.1, 3.16, 3.16, 4.21 ],\n\t\t[340, 0, 0, 1.53, -0.38, 1.0, 100, 1, 3.07, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 4.6, 0.61, 0.92, 0.92, 1.23 ],\n\t\t[341, 0, 0, 0.12, -0.03, 1.0, 100, 1, 0.23, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.35, 0.05, 0.07, 0.07, 0.09 ],\n\t\t[342, 0, 0, 5.32, -1.33, 1.0, 100, 1, 10.63, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 15.95, 2.13, 3.19, 3.19, 4.25 ],\n\t\t[343, 0, 0, 1.56, -0.39, 1.0, 100, 1, 3.13, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 4.69, 0.63, 0.94, 0.94, 1.25 ],\n\t\t[344, 0, 0, 0.13, -0.03, 1.0, 100, 1, 0.26, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.4, 0.05, 0.08, 0.08, 0.11 ],\n\t\t[345, 0, 0, 0.21, -0.05, 1.0, 100, 1, 0.42, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.63, 0.08, 0.13, 0.13, 0.17 ],\n\t\t[346, 0, 0, 0.18, -0.04, 1.0, 100, 1, 0.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.54, 0.07, 0.11, 0.11, 0.14 ],\n\t\t[363, 0, 0, 0.19, -0.05, 1.0, 100, 1, 0.37, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.56, 0.07, 0.11, 0.11, 0.15 ],\n\t\t[400, 0, 0, 68.3, -17.07, 1.0, 100, 1, 136.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 204.9, 27.32, 40.98, 40.98, 54.64 ],\n\t\t[422, 0, 0, 0.54, -0.14, 1.0, 100, 1, 1.08, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1.62, 0.22, 0.32, 0.32, 0.43 ],\n\t\t[429, 0, 0, 0.04, -0.01, 1.0, 100, 1, 0.08, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.13, 0.02, 0.03, 0.03, 0.03 ],\n\t\t[435, 0, 0, 6.26, -1.56, 1.0, 100, 1, 12.52, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 18.77, 2.5, 3.75, 3.75, 5.01 ],\n\t\t[436, 0, 0, 1.21, -0.3, 1.0, 100, 1, 2.42, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 3.63, 0.48, 0.73, 0.73, 0.97 ],\n\t\t[440, 0, 0, 38.08, -9.52, 1.0, 100, 1, 76.17, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 114.25, 15.23, 22.85, 22.85, 30.47 ],\n\t\t[441, 0, 0, 32.25, -8.06, 1.0, 100, 1, 64.49, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 96.74, 12.9, 19.35, 19.35, 25.8 ],\n\t\t[492, 0, 0, 41.69, -10.42, 1.0, 100, 1, 83.37, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 125.06, 16.67, 25.01, 25.01, 33.35 ],\n\t\t[493, 0, 0, 9.7, -2.43, 1.0, 100, 1, 19.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 29.1, 3.88, 5.82, 5.82, 7.76 ],\n\t\t[496, 0, 0, 52.97, -13.24, 1.0, 100, 1, 105.93, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 158.9, 21.19, 31.78, 31.78, 42.37 ],\n\t\t[497, 0, 0, 2.63, -0.66, 1.0, 100, 1, 5.26, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 7.89, 1.05, 1.58, 1.58, 2.1 ],\n\t\t[498, 0, 0, 574.35, -143.59, 1.0, 100, 1, 1148.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1723.05, 229.74, 344.61, 344.61, 459.48 ],\n\t\t[502, 0, 0, 4.63, -1.16, 1.0, 100, 1, 9.26, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 13.89, 1.85, 2.78, 2.78, 3.7 ],\n\t\t[503, 0, 0, 37.55, -9.39, 1.0, 100, 1, 75.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 112.66, 15.02, 22.53, 22.53, 30.04 ],\n\t\t[504, 0, 0, 18.02, -4.51, 1.0, 100, 1, 36.05, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 54.07, 7.21, 10.81, 10.81, 14.42 ],\n\t\t[505, 0, 0, 0.03, -0.01, 1.0, 100, 1, 0.07, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.1, 0.01, 0.02, 0.02, 0.03 ],\n\t\t[511, 0, 0, 9.18, -2.29, 1.0, 100, 1, 18.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 27.54, 3.67, 5.51, 5.51, 7.34 ],\n\t\t[512, 0, 0, 3.83, -0.96, 1.0, 100, 1, 7.67, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 11.5, 1.53, 2.3, 2.3, 3.07 ],\n\t\t[557, 0, 0, 4.85, -1.21, 1.0, 100, 1, 9.71, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 14.56, 1.94, 2.91, 2.91, 3.88 ],\n\t\t[558, 0, 0, 9.09, -2.27, 1.0, 100, 1, 18.18, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 27.28, 3.64, 5.46, 5.46, 7.27 ],\n\t\t[559, 0, 0, 3.23, -0.81, 1.0, 100, 1, 6.47, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 9.7, 1.29, 1.94, 1.94, 2.59 ],\n\t\t[560, 0, 0, 32.61, -8.15, 1.0, 100, 1, 65.21, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 97.82, 13.04, 19.56, 19.56, 26.09 ],\n\t\t[569, 0, 0, 0.83, -0.21, 1.0, 100, 1, 1.65, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.48, 0.33, 0.5, 0.5, 0.66 ],\n\t\t[570, 0, 0, 4.77, -1.19, 1.0, 100, 1, 9.53, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 14.3, 1.91, 2.86, 2.86, 3.81 ],\n\t\t[8, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[9, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[11, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[17, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[21, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[23, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[25, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[31, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[33, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[38, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[39, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[40, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[41, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[43, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[44, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[45, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[47, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[48, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[49, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[65, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[71, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[72, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[73, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[75, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[79, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[80, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[81, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[98, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[101, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[102, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[177, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[179, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[180, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[181, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[182, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[183, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[192, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[217, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[218, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[276, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[278, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[288, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[289, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[291, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[292, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[311, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[321, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[322, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[323, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[324, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[325, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[338, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[339, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[340, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[341, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[342, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[343, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[344, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[345, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[346, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[363, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[400, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[422, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[429, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[435, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[436, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[440, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[441, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[492, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[493, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[496, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[497, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[498, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[502, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[503, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[504, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[505, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[511, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[512, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[557, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[558, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[559, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[560, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[569, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t\t[570, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n\t])\n\tppc[\"branch\"] = array([\n\t\t[8, 9, 0.00024379, 0.00243793, 0.35006327, 2395, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[492, 11, 0.0045562, 0.01822479, 0.04820045, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[11, 493, 0.00757174, 0.03028694, 0.0801021, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[492, 493, 0.01130413, 0.04521653, 0.11958747, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[17, 18, 0.00462352, 0.04623523, 0.9335989, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[17, 12, 0.0005602, 0.00560203, 0.1131183, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[20, 21, 0.00108334, 0.01083345, 0.09722357, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[20, 22, 0.00099339, 0.00993386, 0.3566014, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[497, 23, 0.0005476, 0.00219041, 0.00579315, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[25, 22, 0.00035578, 0.00355783, 0.03192931, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[8, 21, 0.00098947, 0.00989474, 0.0887992, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[31, 32, 0.00299776, 0.02997761, 0.60531903, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[32, 33, 0.00167622, 0.01676223, 0.33846928, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[37, 10, 0.00240464, 0.0240464, 0.48555384, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[10, 38, 0.00068488, 0.0068488, 0.13829351, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[37, 38, 0.00143783, 0.01437835, 1.16133176, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[39, 40, 0.00452163, 0.0452163, 0.91302431, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[39, 41, 0.0017467, 0.01746699, 0.35269996, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[42, 41, 0.00311454, 0.03114543, 0.6289001, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[18, 42, 0.00343975, 0.03439751, 0.69456727, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[492, 43, 0.00910612, 0.03642446, 0.09633445, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[492, 43, 0.00909587, 0.03638347, 0.09622603, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[44, 45, 0.00640579, 0.02562314, 0.0677674, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[44, 505, 0.00151537, 0.00606149, 0.01603126, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[46, 12, 0.00029449, 0.00294494, 0.1057163, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[46, 12, 0.00029482, 0.00294823, 0.10583438, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[47, 48, 0.00053442, 0.00534418, 0.01199019, 299, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[31, 33, 0.0013476, 0.01347599, 0.27211226, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[71, 72, 0.00088786, 0.00887864, 0.31872128, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[73, 74, 0.00125295, 0.01252955, 0.25300129, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[37, 75, 0.00274591, 0.02745914, 0.5544652, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[72, 75, 0.00066887, 0.00668871, 0.24010838, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[37, 72, 0.00362221, 0.03622207, 0.73140949, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[73, 72, 0.00254751, 0.02547507, 0.51440208, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[18, 40, 0.00130277, 0.0130277, 0.26306019, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[492, 45, 0.00771758, 0.0308703, 0.18370115, 520, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[10, 74, 0.00301674, 0.03016736, 0.60915055, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[45, 511, 0.02050843, 0.08203372, 0.05424015, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[78, 32, 0.00134588, 0.0134588, 0.48313778, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[79, 80, 0.00076233, 0.00762327, 0.06841417, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[79, 80, 0.00076174, 0.00761738, 0.06836134, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[81, 79, 0.00215305, 0.02153047, 0.19322279, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[81, 79, 0.00215357, 0.02153566, 0.1932694, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[42, 98, 0.00061861, 0.00618611, 0.22206638, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[42, 98, 0.00061835, 0.00618352, 0.22197315, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[42, 101, 0.00081653, 0.00816534, 0.29311568, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[102, 42, 0.0012403, 0.01240305, 0.44523901, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[37, 39, 0.00065102, 0.00651021, 0.23370076, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[177, 496, 0.00932496, 0.03729983, 0.09864961, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[177, 496, 0.00931603, 0.03726413, 0.09855518, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[179, 493, 0.01426992, 0.05707967, 0.15096279, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[180, 181, 0.01025686, 0.04102744, 0.10850827, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[182, 180, 0.00433818, 0.01735273, 0.04589403, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[179, 181, 0.00489306, 0.01957223, 0.05176412, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[180, 493, 0.0166914, 0.06676562, 0.17657993, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[183, 30, 0.00049645, 0.00496451, 0.17821369, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[183, 21, 0.00025687, 0.00256873, 0.36884485, 2395, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[183, 21, 0.00051295, 0.0051295, 0.18413654, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[183, 30, 0.00049609, 0.00496087, 0.17808317, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[504, 192, 0.00015355, 0.00061421, 0.00162446, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[504, 192, 0.00015421, 0.00061686, 0.00163145, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[217, 98, 0.00012787, 0.00127874, 0.04590362, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[504, 218, 0.00687025, 0.02748099, 0.07268099, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[177, 504, 0.01763702, 0.0705481, 0.18658373, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[37, 39, 0.00086777, 0.00867775, 0.1752243, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[20, 22, 0.00099413, 0.00994131, 0.35686864, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[20, 21, 0.00108314, 0.01083137, 0.09720492, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[503, 276, 0.00335322, 0.01341289, 0.03547406, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[503, 276, 0.00335372, 0.01341488, 0.03547931, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[503, 504, 0.03215471, 0.12861884, 0.34016769, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[503, 504, 0.03216364, 0.12865455, 0.34026211, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[177, 218, 0.01082595, 0.0433038, 0.11452874, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[182, 180, 0.00433157, 0.01732628, 0.04582409, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[277, 278, 0.00143837, 0.01438366, 0.51633804, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[277, 278, 0.00143823, 0.01438227, 0.51628832, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[557, 558, 0.01085322, 0.04341289, 0.25833884, 520, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[557, 559, 0.00853967, 0.03415868, 0.09034196, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[559, 558, 0.01118579, 0.04474314, 0.11833547, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[277, 78, 0.00358577, 0.03585769, 0.32180078, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[277, 279, 0.00213909, 0.02139093, 0.19197048, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[78, 279, 0.0015812, 0.01581198, 0.14190284, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[8, 102, 0.00151001, 0.01510007, 0.5420555, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[8, 101, 0.00192469, 0.01924688, 0.69091598, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[80, 288, 0.00159713, 0.01597126, 0.14333228, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[80, 288, 0.00159681, 0.01596814, 0.14330432, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[80, 289, 0.00013825, 0.0013825, 0.027916, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[80, 289, 0.00014241, 0.00142405, 0.02875502, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[80, 289, 0.00015471, 0.00154709, 0.03123945, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[80, 289, 0.00015129, 0.00151293, 0.03054959, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[276, 560, 0.00889322, 0.03557289, 0.02352056, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[276, 560, 0.00889157, 0.03556628, 0.02351619, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[37, 290, 0.00112768, 0.01127678, 0.22770489, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[290, 74, 0.00414344, 0.04143444, 0.83665966, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[290, 74, 0.00414319, 0.0414319, 0.83660839, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[37, 290, 0.0011259, 0.011259, 0.22734598, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[512, 291, 0.00265967, 0.01063868, 0.02813689, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[512, 291, 0.00266496, 0.01065983, 0.02819285, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[78, 292, 0.0011638, 0.01163804, 0.23499969, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[78, 292, 0.001163, 0.01162996, 0.23483654, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[71, 74, 0.00390452, 0.03904524, 0.78841612, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[74, 278, 0.00154224, 0.01542244, 0.55362774, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[74, 278, 0.00154245, 0.01542452, 0.55370232, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[32, 292, 0.00096794, 0.00967936, 0.34746542, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[503, 560, 0.00378512, 0.0151405, 0.16017272, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[311, 280, 0.00034337, 0.00343369, 0.1232611, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[280, 278, 0.00097498, 0.00974977, 0.78748387, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[311, 32, 0.00241133, 0.02411334, 0.48690559, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[557, 321, 0.00500298, 0.0200119, 0.05292694, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[277, 65, 0.00188585, 0.01885849, 0.38079775, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[322, 288, 0.001309, 0.01309003, 0.26431871, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[322, 323, 0.00035076, 0.00350762, 0.07082712, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[322, 323, 0.00037006, 0.0037006, 0.0747239, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[277, 324, 0.00197195, 0.01971953, 0.39818407, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[324, 325, 0.00110351, 0.01103509, 0.2228246, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[277, 325, 0.00086657, 0.00866574, 0.17498191, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[332, 78, 0.00129444, 0.01294437, 0.26137749, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[322, 288, 0.001309, 0.01309003, 0.26431871, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[324, 288, 0.00126274, 0.01262742, 0.1133234, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[338, 559, 0.00230702, 0.0092281, 0.09762492, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[339, 559, 0.00890149, 0.03560595, 0.02354242, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[339, 340, 0.02177884, 0.08711537, 0.23040041, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[559, 340, 0.05245818, 0.20983273, 0.13874, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[341, 292, 9.329e-05, 0.00093294, 0.07535316, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[338, 559, 0.00461405, 0.0184562, 0.04881246, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[557, 342, 0.00302595, 0.0121038, 0.03201181, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[558, 343, 0.00266256, 0.01065025, 0.11266997, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[502, 340, 0.01086926, 0.04347702, 0.11498688, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[502, 340, 0.01086876, 0.04347504, 0.11498163, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[72, 32, 0.00135107, 0.01351073, 0.48500226, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[72, 32, 0.001351, 0.01351004, 0.4849774, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[344, 345, 5.763e-05, 0.00057629, 0.04654687, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[346, 47, 0.0001134, 0.001134, 0.04070792, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[46, 47, 8.975e-05, 0.00089751, 0.0322183, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[346, 345, 7.218e-05, 0.00072178, 0.02591013, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[363, 344, 2.663e-05, 0.00026627, 0.00955859, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[332, 78, 0.00129421, 0.01294206, 0.26133088, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[37, 49, 0.0016876, 0.01687604, 0.15145211, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[37, 49, 0.0016883, 0.01688296, 0.15151426, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[81, 74, 0.00150357, 0.01503566, 0.13493589, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[81, 74, 0.00150416, 0.01504155, 0.13498871, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[278, 80, 0.00325679, 0.03256787, 0.29227666, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[278, 80, 0.0032572, 0.03257202, 0.29231395, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[81, 278, 0.00421184, 0.04211842, 0.37798704, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[81, 278, 0.0042108, 0.04210803, 0.37789381, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[569, 570, 0.00813488, 0.0325395, 0.08605961, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[498, 400, 0.00303355, 0.01213421, 0.03209225, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[557, 342, 0.00300992, 0.01203967, 0.0318422, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[557, 321, 0.00500231, 0.02000926, 0.05291995, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[277, 65, 0.00188603, 0.01886034, 0.38083504, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[8, 21, 0.00098975, 0.00989751, 0.08882406, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[311, 32, 0.00241182, 0.02411819, 0.48700348, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[422, 81, 0.0002536, 0.00253601, 0.02275915, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[422, 81, 0.00023449, 0.00234488, 0.02104382, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[422, 81, 0.00023272, 0.00232722, 0.02088534, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[422, 81, 0.00023518, 0.0023518, 0.02110597, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[422, 81, 0.00023269, 0.00232687, 0.02088223, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[422, 81, 0.0002536, 0.00253601, 0.02275915, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[45, 429, 0.00640579, 0.02562314, 0.0677674, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[44, 429, 1.322e-05, 5.289e-05, 0.00013989, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[505, 429, 0.00150314, 0.00601256, 0.01590186, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[32, 436, 0.00044813, 0.0044813, 0.16086776, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[435, 436, 6.634e-05, 0.00066343, 0.02381569, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[78, 436, 0.00089768, 0.0089768, 0.32224515, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[181, 441, 0.01020132, 0.04080529, 0.10792074, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[440, 441, 3.306e-05, 0.00013223, 0.00034972, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[504, 441, 0.01479025, 0.05916099, 0.15646741, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],\n\t\t[10, 492, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[12, 493, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[18, 496, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[20, 497, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[22, 498, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[32, 502, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[37, 503, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[42, 504, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[46, 505, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[74, 511, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[78, 512, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[277, 557, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[279, 558, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[280, 559, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[290, 560, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],\n\t\t[332, 569, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ]\n\t])\n\tppc[\"gencost\"] = array([\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t28.0, 0, 42.0, 21.0, 33.6, 16.8 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t100.0, 0, 150.0, 75.0, 120.0, 60.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t25.0, 0, 37.5, 18.75, 30.0, 15.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t100.0, 0, 150.0, 75.0, 120.0, 60.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t25.0, 0, 37.5, 18.75, 30.0, 15.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t25.0, 0, 37.5, 18.75, 30.0, 15.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t100.0, 0, 150.0, 75.0, 120.0, 60.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t100.0, 0, 150.0, 75.0, 120.0, 60.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t100.0, 0, 150.0, 75.0, 120.0, 60.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t32.0, 0, 48.0, 24.0, 38.4, 19.2 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t25.0, 0, 37.5, 18.75, 30.0, 15.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t100.0, 0, 150.0, 75.0, 120.0, 60.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t25.0, 0, 37.5, 18.75, 30.0, 15.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t8.0, 0, 12.0, 6.0, 9.6, 4.8 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],\n\t\t[2, 0, 0, 3, 0,\t\t100.0, 0, 150.0, 75.0, 120.0, 60.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],\n\t\t[2, 0, 0, 3, 0,\t\t3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t25.0, 0, 37.5, 18.75, 30.0, 15.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t26.0, 0, 39.0, 19.5, 31.2, 15.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t25.0, 0, 37.5, 18.75, 30.0, 15.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t100.0, 0, 150.0, 75.0, 120.0, 60.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t50.0, 0, 75.0, 37.5, 60.0, 30.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],\n\t\t[2, 0, 0, 3, 0,\t\t10.0, 0, 15.0, 7.5, 12.0, 6.0 ],\n\t\t[2, 0, 0, 3, 0,\t\t32.0, 0, 48.0, 24.0, 38.4, 19.2 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 1.5, 0.75, 1.2, 0.6 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t\t[2, 0, 0, 3, 0,\t\t0.0, 0, 0.75, 0.375, 0.6, 0.3 ],\n\t])\n\treturn ppc" ]
[ [ "numpy.array" ], [ "numpy.array" ], [ "numpy.array" ], [ "numpy.array" ], [ "numpy.array" ], [ "numpy.array" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
nestauk/la_funding_analysis
[ "bc338583817174f47f2cff2105f4a20a89df4c99" ]
[ "la_funding_analysis/pipeline/cleaning.py" ]
[ "# File: pipeline/cleaning.py\n\"\"\"Functions to clean datasets.\nCalling each function returns a clean version of the associated dataset.\n\"\"\"\n\nimport numpy as np\nimport pandas as pd\n\nfrom la_funding_analysis.getters.local_authority_data import (\n get_epc,\n get_grants,\n get_imd,\n get_old_parties,\n get_parties_models,\n get_fuel_poverty,\n)\nfrom la_funding_analysis.utils.name_cleaners import (\n clean_names,\n model_type,\n strip_and_titlecase,\n)\n\n\ndef get_clean_fuel_poverty():\n \"\"\"Gets and cleans fuel poverty dataset.\"\"\"\n fuel_poverty = get_fuel_poverty()\n #\n fuel_poverty = fuel_poverty.rename(\n columns={\n \"Area Codes\": \"code\",\n \"Area name\": \"region_1\",\n \"Unnamed: 2\": \"region_2\",\n \"Unnamed: 3\": \"region_3\",\n \"Number of households1\": \"total_households\",\n \"Number of households in fuel poverty1\": \"fp_households\",\n \"Proportion of households fuel poor (%)\": \"fp_proportion\",\n }\n )\n #\n # Remove trailing spaces and fix capitalisation in region columns\n fuel_poverty[\"region_1\"] = fuel_poverty[\"region_1\"].apply(strip_and_titlecase)\n fuel_poverty[\"region_2\"] = fuel_poverty[\"region_2\"].apply(strip_and_titlecase)\n fuel_poverty[\"region_3\"] = fuel_poverty[\"region_3\"].apply(strip_and_titlecase)\n #\n # Merge the different 'region' columns into one and apply clean_names -\n # this allows for joining onto data in which local authorities\n # are only referred to by name and not ID\n fuel_poverty[\"clean_name\"] = (\n fuel_poverty[\"region_1\"]\n .fillna(fuel_poverty[\"region_2\"])\n .fillna(fuel_poverty[\"region_3\"])\n .apply(clean_names)\n )\n # Fill in NaN values in region columns so that all region_3 rows\n # have associated region_1 and region_2 data,\n # and all region_2 rows have associated region_1 data.\n # First copy region_1 values into region_2 then forward-fill region_2 -\n # the 'region_1's stop the filling from going too far\n fuel_poverty[\"region_2\"] = (\n fuel_poverty[\"region_2\"].fillna(fuel_poverty[\"region_1\"]).ffill()\n )\n # Set the copied-over values in region_2 back to NaN\n fuel_poverty[\"region_2\"].loc[~fuel_poverty[\"region_1\"].isna()] = np.nan\n # Then forward-fill region_1\n fuel_poverty[\"region_1\"] = fuel_poverty[\"region_1\"].ffill()\n # Filter out all of the region_1 rows - they are not local authorities\n fuel_poverty = fuel_poverty[~fuel_poverty[\"region_2\"].isna()]\n # Additionally remove all Met Counties and Inner/Outer London -\n # these are rows that contain (Met County) or Inner/Outer London in region_2\n # and have NA region_3\n def not_la_condition(string):\n return (\"(Met County)\" in string) | (string in [\"Inner London\", \"Outer London\"])\n\n #\n #\n not_las = [not_la_condition(string) for string in fuel_poverty[\"region_2\"]]\n no_region_3 = list(fuel_poverty.region_3.isna())\n both = [a and b for a, b in zip(not_las, no_region_3)]\n fuel_poverty = fuel_poverty.drop(fuel_poverty[both].index)\n #\n # Append rows for Greater London Authority and\n # Greater Manchester Combined Authority -\n # these are not LAs but some grants went to them\n combined_authorities = pd.DataFrame(\n [\n [\n np.nan,\n \"London\",\n \"Greater London Authority\",\n np.nan,\n np.nan,\n np.nan,\n np.nan,\n \"Greater London Authority\",\n ],\n [\n np.nan,\n \"North West\",\n \"Greater Manchester Combined Authority\",\n np.nan,\n np.nan,\n np.nan,\n np.nan,\n \"Greater Manchester Combined Authority\",\n ],\n ],\n columns=fuel_poverty.columns,\n )\n #\n fuel_poverty = fuel_poverty.append(combined_authorities, ignore_index=True)\n #\n return fuel_poverty\n\n\ndef get_clean_parties_models():\n \"\"\"Gets and cleans current LA majority party and model (e.g. county, district) data.\"\"\"\n parties_models = get_parties_models()\n #\n parties_models = parties_models.rename(\n columns={\n \"model (C=county, D=district, 1=all-up, 3=thirds, etc.)\": \"model\",\n }\n )\n # 'Buckinghamshire' row in this dataset is incorrect -\n # it is labelled as a County council but it has become unitary\n # Manually replace with the correct data\n # Source: http://opencouncildata.co.uk/council.php?c=413&y=0\n parties_models.loc[2] = [\"Buckinghamshire\", \"U1\", \"CON\"]\n #\n # Rename models to full names\n parties_models[\"model\"] = parties_models[\"model\"].apply(model_type)\n #\n # Apply clean_names to all names in parties/models data\n parties_models[\"clean_name\"] = parties_models[\"name\"].apply(clean_names)\n parties_models = parties_models.drop(columns=\"name\")\n #\n return parties_models\n\n\ndef get_clean_old_parties():\n \"\"\"Gets and cleans data about political majorities as of August 2020.\"\"\"\n op = get_old_parties()\n op[\"clean_name\"] = op[\"Authority\"].apply(clean_names)\n op[\"old_majority\"] = [string.upper() for string in op[\"Control\"]]\n op = op.drop(columns=[\"Authority\", \"Control\"]).reset_index(drop=True)\n return op\n\n\ndef get_clean_imd():\n \"\"\"Gets and cleans IMD data.\"\"\"\n imd = get_imd()\n imd = imd.rename(\n columns={\n \"Reference area\": \"full_name\",\n \" Local concentration\": \"imd_concentration\",\n }\n )\n #\n imd[\"clean_name\"] = imd[\"full_name\"].apply(clean_names)\n imd = imd.drop(columns=\"full_name\")\n #\n return imd\n\n\ndef get_clean_grants():\n \"\"\"Gets and cleans data on grants received by LAs.\"\"\"\n grants = get_grants()\n grants = grants.rename(\n columns={\n \"Local authority\": \"full_name\",\n \"GHG LADS 1a\": \"GHG_1a_individuals\",\n \"1a Consortium Leads\": \"GHG_1a_leads\",\n \"1a Consortium bodies\": \"GHG_1a_bodies\",\n \"GHG LADS 1b\": \"GHG_1b_individuals\",\n \"1b Consortium leads\": \"GHG_1b_leads\",\n \"1b Consortium bodies\": \"GHG_1b_bodies\",\n \"Social Housing Decarbonisation Fund - Demonstrator \": \"SHDDF\",\n \"Total\": \"total_grants\",\n }\n )\n #\n # Some regions appear twice in the grants data\n duplicate_strings = [\"Greenwich\", \"Lewisham\", \"Redbridge\"]\n regex_exp = \"|\".join(duplicate_strings)\n clean_grants = grants[~grants[\"full_name\"].str.contains(regex_exp, regex=True)]\n #\n for string in duplicate_strings:\n duplicate_df = grants[grants[\"full_name\"].str.contains(string)]\n replacement_row = duplicate_df.iloc[0] + duplicate_df.iloc[1]\n replacement_row[\"full_name\"] = string\n clean_grants = clean_grants.append(replacement_row, ignore_index=True)\n #\n # Babergh and Mid Suffolk are shown in one row in the grants data,\n # but they are actually two different LAs - the stated grants\n # apply to both individually\n babergh_ms = clean_grants[\n [(\"Babergh and Mid Suffolk\" in name) for name in clean_grants[\"full_name\"]]\n ]\n babergh = babergh_ms.copy()\n babergh[\"full_name\"] = \"Babergh\"\n ms = babergh_ms.copy()\n ms[\"full_name\"] = \"Mid Suffolk\"\n clean_grants = (\n clean_grants[\n [\n (\"Babergh and Mid Suffolk\" not in name)\n for name in clean_grants[\"full_name\"]\n ]\n ]\n .append(babergh)\n .append(ms)\n .reset_index(drop=True)\n )\n #\n # As before, apply clean_names in order to join data\n clean_grants[\"clean_name\"] = clean_grants[\"full_name\"].apply(clean_names)\n clean_grants = clean_grants.drop(columns=\"full_name\")\n #\n return clean_grants\n\n\ndef get_clean_epc():\n \"\"\"Processes EPC dataset to obtain median EPC for each LA\n and counts/proportions of improvable social housing.\n \"\"\"\n epc = get_epc()\n #\n # Calculate median energy rating for each LA:\n epc_medians = (\n epc.groupby(\"LOCAL_AUTHORITY\")[\"CURRENT_ENERGY_EFFICIENCY\"]\n .apply(np.median)\n .reset_index(name=\"median_energy_efficiency\")\n )\n #\n # Calculate proportions of 'improvable' social housing\n # (socially rented dwellings that are currently EPC D or below,\n # and have the potential to be C or above)\n #\n # There are two different strings signifying socially rented\n # in the TENURE column of the EPC data:\n epc_social = epc.loc[epc[\"TENURE\"].isin([\"rental (social)\", \"Rented (social)\"])]\n #\n epc_social[\"is_improvable\"] = (\n epc_social[\"CURRENT_ENERGY_RATING\"].isin([\"G\", \"F\", \"E\", \"D\"])\n ) & (epc_social[\"POTENTIAL_ENERGY_RATING\"].isin([\"C\", \"B\", \"A\"]))\n #\n # Find the numbers of improvable / not improvable social houses in each LA\n potential_counts = (\n epc_social.groupby([\"LOCAL_AUTHORITY\", \"is_improvable\"])[\n [\"LOCAL_AUTHORITY\", \"is_improvable\"]\n ]\n .size()\n .reset_index(name=\"count\")\n .pivot(index=\"LOCAL_AUTHORITY\", columns=\"is_improvable\", values=\"count\")\n .rename(columns={True: \"total_improvable\", False: \"total_not_improvable\"})\n )\n # Calculate proportions\n potential_counts.columns.name = None\n potential_counts[\"total_social\"] = potential_counts.sum(axis=1)\n potential_counts[\"prop_improvable\"] = (\n potential_counts[\"total_improvable\"] / potential_counts[\"total_social\"]\n )\n potential_counts = potential_counts.reset_index()[\n [\"LOCAL_AUTHORITY\", \"total_improvable\", \"prop_improvable\"]\n ]\n # Join to medians\n clean_epc = epc_medians.merge(potential_counts, on=\"LOCAL_AUTHORITY\").rename(\n columns={\"LOCAL_AUTHORITY\": \"code\"}\n )\n #\n return clean_epc\n" ]
[ [ "pandas.DataFrame" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] } ]
HwangDongJun/Federated_Learning_using_Websockets
[ "87c2873ae9b6a651750d08f4cd0ad5757893ce88" ]
[ "federated_learning_without_transfer_learning/ntf_client_fit_model.py" ]
[ "# Setup library\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport os\nimport numpy as np\nimport PIL.Image as Image\nfrom PIL import ImageFile\nimport tensorflow as tf\nimport tensorflow_hub as hub\nfrom tensorflow.keras import layers\nimport matplotlib.pylab as plt\nimport efficientnet.tfkeras as efn\n\nos.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\"\nos.environ[\"CUDA_VISIBLE_DEVICES\"]=\"\"\n\ngpus = tf.config.experimental.list_physical_devices('GPU')\nif gpus:\n\ttry:\n\t\tfor gpu in gpus:\n\t\t\ttf.config.experimental.set_memory_growth(gpu, True)\n\t\tlogical_gpus = tf.config.experimental.list_logical_devices('GPU')\n\t\tprint(len(gpus), \"Physical GPUs,\", len(logical_gpus), \"Logical GPUs\")\n\texcept RuntimeError as e:\n\t\tprint(e)\n\n\nclass transfer_learning_fit(object):\n\tdef __init__(self, config, weights):\n\t\tself.weights = weights\n\t\tself.image_shape = (config['image_shape'], config['image_shape'])\n\t\tself.batch_size = config['batch_size']\n\t\tself.learning_rate = config['learning_rate']\n\t\tself.epochs = config['epochs']\n\t\tself.optimizer = config['optimizer']\n\t\tself.model_link = config['model']\n\t\tself.class_names = np.array(['book', 'laptop', 'phone', 'wash', 'water'])\n\n\t\ttf.random.set_seed(2020)\n\n\tdef image_generator(self):\n\t\timage_gen_train = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\trotation_range=15,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\thorizontal_flip=True,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tbrightness_range=[0.7,1.0])\n\t\timage_gen_val = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255)\n\t\treturn image_gen_train, image_gen_val\n\n\tdef gen_train_val_data(self):\n\t\tgen_train, gen_val = self.image_generator()\n\n\t\ttrain_data_dir = os.path.abspath('INPUT YOUR TRANING DATA SET PATH')\n\t\ttrain_data_gen = gen_train.flow_from_directory(directory=str(train_data_dir),\n\t\t\t\t\t\t\t\t\t\t\tbatch_size=self.batch_size,\n\t\t\t\t\t\t\t\t\t\t\tcolor_mode='rgb',\n\t\t\t\t\t\t\t\t\t\t\tshuffle=True,\n\t\t\t\t\t\t\t\t\t\t\ttarget_size=self.image_shape,\n\t\t\t\t\t\t\t\t\t\t\tclasses=list(self.class_names))\n\n\t\treturn train_data_gen\n\n\tdef select_optimizer(self, opti, lr):\n\t\tif opti == 'adam':\n\t\t\treturn tf.keras.optimizers.Adam(learning_rate=lr)\n\n\tdef set_model(self, vector_layer):\n\t\t#efficient_net = efn.EfficientNetB0(\n\t\t#\tweights=None,\n\t\t#\tinput_shape=self.image_shape+(3,),\n\t\t#\tinclude_top=False,\n\t\t#\tpooling='max'\n\t\t#)\n\n\t\t#model = tf.keras.Sequential([\n\t\t#\tefficient_net,\n\t\t#\tlayers.Dense(5, activation='softmax')\n\t\t#])\n\n\t\tmobilenet_v2 = tf.keras.applications.MobileNetV2(\n\t\t\tweights=None,\n\t\t\tinput_shape=self.image_shape+(3,),\n\t\t\tinclude_top=False,\n\t\t\tpooling='max'\n\t\t)\n\n\t\tmodel = tf.keras.Sequential([\n\t\t\tmobilenet_v2,\n\t\t\tlayers.Dense(5, activation='softmax')\n\t\t])\n\n\t\treturn model\n\n\tdef build_model(self):\n\t\tfeature_vector_url = self.model_link\n\t\tfeature_vector_layer = hub.KerasLayer(feature_vector_url,\n\t\t\t\t\t\t\t\t\t\tinput_shape=self.image_shape+(3,))\n\t\t\n\t\tfeature_vector_layer.trainable = True\n\n\t\tmade_model = self.set_model(feature_vector_layer)\n\n\t\tprint(made_model.summary())\n\n\t\tmade_model.compile(\n\t\t\toptimizer=self.select_optimizer(self.optimizer, self.learning_rate),\n\t\t\tloss='categorical_crossentropy',\n\t\t\tmetrics=['acc'])\n\n\t\treturn made_model, feature_vector_layer\n\n\tdef train_model_tosave(self, weight):\n\t\tcallback = tf.keras.callbacks.EarlyStopping(monitor='loss', patience=3)\n\n\t\tif weight == list():\n\t\t\tlocal_model, feature_layer = self.build_model()\n\t\t\tgen_train_data = self.gen_train_val_data()\n\t\t\tlocal_model.fit_generator(gen_train_data, epochs=self.epochs, callbacks=[callback])\n\t\telse:\n\t\t\tlocal_model, feature_layer = self.build_model()\n\t\t\tgen_train_data = self.gen_train_val_data()\n\t\t\tlocal_model.set_weights(weight)\n\t\t\tlocal_model.fit_generator(gen_train_data, epochs=self.epochs, callbacks=[callback])\n\t\t\t\n\t\treturn local_model.get_weights()\n\n\tdef get_weight_finetune_model(self, expath, feature_layer, gtrain_data):\n\t\treloaded_model = tf.keras.models.load_model(expath)\n\t\t\n\t\tfeature_layer.trainable = True\n\n\t\tcallback = tf.keras.callbacks.EarlyStopping(monitor='loss', patience=3)\n\n\t\treloaded_model.compile(\n\t\t\toptimizer=self.select_optimizer(self.optimizer, self.learning_rate*0.1),\n\t\t\tloss='categorical_crossentropy',\n\t\t\tmetrics=['accuracy'])\n\t\treloaded_model.fit_generator(gtrain_data, epochs=self.epochs+(self.epochs*2),\n\t\t\t\t\t\tinitial_epoch=self.epochs, callbacks=[callback])\n\n\t\treturn reloaded_model.get_weights() # Dense layer weight는 제외하고 반환\n\n\tdef manage_train(self):\n\t\tget_weights = list()\n\t\ttraining_weight = self.train_model_tosave(self.weights)\n\t\t\t\n\t\treturn training_weight\n" ]
[ [ "tensorflow.keras.models.load_model", "tensorflow.config.experimental.list_logical_devices", "tensorflow.keras.preprocessing.image.ImageDataGenerator", "tensorflow.config.experimental.set_memory_growth", "tensorflow.keras.layers.Dense", "tensorflow.config.experimental.list_physical_devices", "tensorflow.keras.callbacks.EarlyStopping", "tensorflow.keras.optimizers.Adam", "numpy.array", "tensorflow.random.set_seed", "tensorflow.keras.applications.MobileNetV2" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "2.7", "2.2", "2.3", "2.4", "2.5", "2.6" ] } ]
pedersor/google-research
[ "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6", "6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6" ]
[ "representation_batch_rl/representation_batch_rl/cql_pixels.py", "ml_debiaser/randomized_threshold.py", "goemotions/analyze_data.py", "depth_and_motion_learning/losses/loss_aggregator.py", "mol_dqn/chemgraph/dqn/molecules_test.py", "task_set/tasks/synthetic_sequence.py", "supcon/losses_test.py", "non_semantic_speech_benchmark/distillation/models.py", "correct_batch_effects_wdn/transform.py", "motion_blur/train/train.py", "task_set/tasks/mlp_vae.py", "opt_list/opt_list/torch_opt_list.py", "hal/agent/DQN/dqn.py", "assessment_plan_modeling/ap_parsing/data_lib_test.py", "vatt/modeling/heads/mlp_lib.py", "supcon/models.py", "dreamfields/dreamfields/lib.py", "drfact/model_fns.py", "smith/input_fns.py", "tunas/cost_model_lib.py" ]
[ "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Implementation of DDPG.\"\"\"\n\nimport typing\n\nfrom dm_env import specs as dm_env_specs\nimport numpy as np\nimport tensorflow as tf\nfrom tf_agents.specs.tensor_spec import TensorSpec\n\nfrom representation_batch_rl.batch_rl import critic\nfrom representation_batch_rl.batch_rl.encoders import ConvStack\nfrom representation_batch_rl.batch_rl.encoders import ImageEncoder\nfrom representation_batch_rl.batch_rl.encoders import make_impala_cnn_network\nfrom representation_batch_rl.representation_batch_rl import tf_utils\n\n\nclass CQL(object):\n \"\"\"Class performing CQL training.\"\"\"\n\n def __init__(self,\n observation_spec,\n action_spec,\n actor_lr = 1e-4,\n critic_lr = 3e-4,\n discount = 0.99,\n tau = 0.005,\n target_entropy = 0.0,\n reg = 0.0,\n num_cql_actions = 10,\n bc_pretraining_steps = 40_000,\n min_q_weight = 10.0,\n num_augmentations = 1,\n rep_learn_keywords = 'outer',\n batch_size = 256):\n \"\"\"Creates networks.\n\n Args:\n observation_spec: environment observation spec.\n action_spec: Action spec.\n actor_lr: Actor learning rate.\n critic_lr: Critic learning rate.\n discount: MDP discount.\n tau: Soft target update parameter.\n target_entropy: Target entropy.\n reg: Coefficient for out of distribution regularization.\n num_cql_actions: Number of actions to sample for CQL loss.\n bc_pretraining_steps: Use BC loss instead of CQL loss for N steps.\n min_q_weight: CQL alpha.\n num_augmentations: Num of random crops\n rep_learn_keywords: Representation learning loss to add.\n batch_size: Batch size\n \"\"\"\n self.num_augmentations = num_augmentations\n self.batch_size = batch_size\n self.rep_learn_keywords = rep_learn_keywords.split('__')\n\n critic_kwargs = {}\n\n if observation_spec.shape == (64, 64, 3):\n # IMPALA for Procgen\n def conv_stack():\n return make_impala_cnn_network(\n depths=[16, 32, 32], use_batch_norm=False, dropout_rate=0.)\n\n state_dim = 256\n else:\n # Reduced architecture for DMC\n def conv_stack():\n return ConvStack(observation_spec.shape)\n state_dim = 50\n\n conv_stack_critic = conv_stack()\n conv_target_stack_critic = conv_stack()\n\n if observation_spec.shape == (64, 64, 3):\n conv_stack_critic.output_size = state_dim\n conv_target_stack_critic.output_size = state_dim\n # Combine and stop_grad some of the above conv stacks\n critic_kwargs['encoder'] = ImageEncoder(\n conv_stack_critic, feature_dim=state_dim, bprop_conv_stack=True)\n # Note: the target critic does not share any weights.\n critic_kwargs['encoder_target'] = ImageEncoder(\n conv_target_stack_critic, feature_dim=state_dim, bprop_conv_stack=True)\n\n if self.num_augmentations == 0:\n dummy_state = tf.constant(\n np.zeros(shape=[1] + list(observation_spec.shape)))\n else: # account for padding of +4 everywhere and then cropping out 68\n dummy_state = tf.constant(np.zeros(shape=[1, 68, 68, 3]))\n\n @tf.function\n def init_models():\n critic_kwargs['encoder'](dummy_state)\n critic_kwargs['encoder_target'](dummy_state)\n\n init_models()\n\n hidden_dims = (256, 256)\n # self.actor = policies.CategoricalPolicy(state_dim, action_spec,\n # hidden_dims=hidden_dims, encoder=actor_kwargs['encoder'])\n action_dim = action_spec.maximum.item() + 1\n\n self.action_dim = action_dim\n\n self.log_alpha = tf.Variable(tf.math.log(1.0), trainable=True)\n self.log_cql_alpha = self.log_alpha\n self.alpha_optimizer = tf.keras.optimizers.Adam(learning_rate=actor_lr)\n\n self.critic = critic.Critic(\n state_dim,\n action_dim,\n hidden_dims=hidden_dims,\n encoder=critic_kwargs['encoder'],\n discrete_actions=True,\n linear='linear_Q' in self.rep_learn_keywords)\n self.critic_target = critic.Critic(\n state_dim,\n action_dim,\n hidden_dims=hidden_dims,\n encoder=critic_kwargs['encoder_target'],\n discrete_actions=True,\n linear='linear_Q' in self.rep_learn_keywords)\n\n @tf.function\n def init_models2():\n \"\"\"This function initializes all auxiliary networks (state and action encoders) with dummy input (Procgen-specific, 68x68x3, 15 actions).\n \"\"\"\n dummy_state = tf.zeros((1, 68, 68, 3), dtype=tf.float32)\n phi_s = self.critic.encoder(dummy_state)\n phi_a = tf.eye(15, dtype=tf.float32)\n if 'linear_Q' in self.rep_learn_keywords:\n _ = self.critic.critic1.state_encoder(phi_s)\n _ = self.critic.critic2.state_encoder(phi_s)\n _ = self.critic.critic1.action_encoder(phi_a)\n _ = self.critic.critic2.action_encoder(phi_a)\n _ = self.critic_target.critic1.state_encoder(phi_s)\n _ = self.critic_target.critic2.state_encoder(phi_s)\n _ = self.critic_target.critic1.action_encoder(phi_a)\n _ = self.critic_target.critic2.action_encoder(phi_a)\n\n init_models2()\n\n critic.soft_update(self.critic, self.critic_target, tau=1.0)\n self.critic_optimizer = tf.keras.optimizers.Adam(learning_rate=critic_lr)\n self.tau = tau\n\n self.reg = reg\n self.target_entropy = target_entropy\n self.discount = discount\n\n self.num_cql_actions = num_cql_actions\n self.bc_pretraining_steps = bc_pretraining_steps\n self.min_q_weight = min_q_weight\n\n self.bc = None\n\n self.model_dict = {\n 'critic': self.critic,\n 'critic_target': self.critic_target,\n 'critic_optimizer': self.critic_optimizer,\n 'alpha_optimizer': self.alpha_optimizer\n }\n\n @property\n def alpha(self):\n return tf.constant(0.)\n\n @property\n def cql_alpha(self):\n return tf.exp(self.log_cql_alpha)\n\n def fit_critic(self, states, actions,\n next_states, next_actions, rewards,\n discounts):\n \"\"\"Updates critic parameters.\n\n Args:\n states: Batch of states.\n actions: Batch of actions.\n next_states: Batch of next states.\n next_actions: Batch of next actions from training policy.\n rewards: Batch of rewards.\n discounts: Batch of masks indicating the end of the episodes.\n\n Returns:\n Dictionary with information to track.\n \"\"\"\n action_indices = tf.stack(\n [tf.range(tf.shape(actions)[0], dtype=tf.int64), actions], axis=-1)\n next_action_indices = tf.stack(\n [tf.range(tf.shape(next_actions)[0], dtype=tf.int64), next_actions],\n axis=-1)\n\n if self.num_augmentations > 1:\n target_q = 0.\n for i in range(self.num_augmentations):\n next_q1_i, next_q2_i = self.critic_target(next_states[i], actions=None)\n target_q_i = tf.expand_dims(\n rewards, 1) + self.discount * tf.expand_dims(\n discounts, 1) * tf.minimum(next_q1_i, next_q2_i)\n target_q += target_q_i\n target_q /= self.num_augmentations\n elif self.num_augmentations == 1:\n next_q1, next_q2 = self.critic_target(\n next_states[0], actions=None, stop_grad_features=False)\n target_q = tf.expand_dims(\n rewards, 1) + self.discount * tf.expand_dims(\n discounts, 1) * tf.minimum(next_q1, next_q2)\n else:\n next_q1, next_q2 = self.critic_target(next_states, actions=None)\n target_q = tf.expand_dims(rewards, 1) + self.discount * tf.expand_dims(\n discounts, 1) * tf.minimum(next_q1, next_q2)\n\n target_q = tf.gather_nd(target_q, indices=next_action_indices)\n\n with tf.GradientTape(watch_accessed_variables=False) as tape:\n tape.watch(self.critic.trainable_variables)\n\n if self.num_augmentations > 1:\n critic_loss = 0.\n q1 = 0.\n q2 = 0.\n for i in range(self.num_augmentations):\n q1_i, q2_i = self.critic(states[i], actions=None)\n critic_loss_i = (\n tf.losses.mean_squared_error(\n target_q, tf.gather_nd(q1_i, indices=action_indices)) +\n tf.losses.mean_squared_error(\n target_q, tf.gather_nd(q2_i, indices=action_indices)))\n q1 += q1_i\n q2 += q2_i\n critic_loss += critic_loss_i\n q1 /= self.num_augmentations\n q2 /= self.num_augmentations\n critic_loss /= self.num_augmentations\n elif self.num_augmentations == 1:\n q1, q2 = self.critic(states[0], actions=None)\n critic_loss = (\n tf.losses.mean_squared_error(\n target_q, tf.gather_nd(q1, indices=action_indices)) +\n tf.losses.mean_squared_error(\n target_q, tf.gather_nd(q2, indices=action_indices)))\n else:\n # Ensure num_augmentations is non-negative\n assert self.num_augmentations == 0\n q1, q2 = self.critic(states, actions=None)\n critic_loss = (\n tf.losses.mean_squared_error(\n target_q, tf.gather_nd(q1, indices=action_indices)) +\n tf.losses.mean_squared_error(\n target_q, tf.gather_nd(q2, indices=action_indices)))\n q = tf.minimum(q1, q2)\n cql_logsumexp = tf.reduce_logsumexp(q, 1)\n cql_loss = tf.reduce_mean(cql_logsumexp -\n tf.gather_nd(q, indices=action_indices))\n\n critic_loss += (self.reg * cql_loss)\n\n critic_grads = tape.gradient(critic_loss, self.critic.trainable_variables)\n\n self.critic_optimizer.apply_gradients(\n zip(critic_grads, self.critic.trainable_variables))\n\n critic.soft_update(self.critic, self.critic_target, tau=self.tau)\n\n return {\n 'q1': tf.reduce_mean(q1),\n 'q2': tf.reduce_mean(q2),\n 'critic_loss': critic_loss,\n 'cql_loss': cql_loss\n }\n\n @tf.function\n def update_step(self,\n replay_buffer_iter,\n train_target='both'):\n \"\"\"Performs a single training step for critic and embedding.\n\n Args:\n replay_buffer_iter: A tensorflow graph iteratable object.\n train_target: string specifying whether update RL and or representation\n\n Returns:\n Dictionary with losses to track.\n \"\"\"\n del train_target\n transition = next(replay_buffer_iter)\n numpy_dataset = isinstance(replay_buffer_iter, np.ndarray)\n # observation: n_batch x n_timesteps x 1 x H*W*3*n_frames x 1 ->\n # n_batch x H x W x 3*n_frames\n if not numpy_dataset:\n states = transition.observation[:, 0]\n next_states = transition.observation[:, 1]\n actions = transition.action[:, 0]\n rewards = transition.reward[:, 0]\n discounts = transition.discount[:, 0]\n\n if transition.observation.dtype == tf.uint8:\n states = tf.cast(states, tf.float32) / 255.\n next_states = tf.cast(next_states, tf.float32) / 255.\n else:\n states, actions, rewards, next_states, discounts = transition\n\n if self.num_augmentations > 0:\n states, next_states = tf_utils.image_aug(\n states,\n next_states,\n img_pad=4,\n num_augmentations=self.num_augmentations,\n obs_dim=64,\n channels=3,\n cropped_shape=[self.batch_size, 68, 68, 3])\n\n next_actions = self.act(next_states, data_aug=True)\n\n critic_dict = self.fit_critic(states, actions, next_states, next_actions,\n rewards, discounts)\n\n return critic_dict\n\n @tf.function\n def act(self, states, data_aug=False):\n \"\"\"Act with batch of states.\n\n Args:\n states: tf.tensor n_batch x 64 x 64 x 3\n data_aug: bool, whether to use stochastic data aug (else deterministic)\n\n Returns:\n action: tf.tensor\n \"\"\"\n if data_aug and self.num_augmentations > 0:\n states = states[0]\n if self.num_augmentations > 0:\n # use pad of 2 to bump 64 to 68 with 2 + 64 + 2 on each side\n img_pad = 2\n paddings = tf.constant(\n [[0, 0], [img_pad, img_pad], [img_pad, img_pad], [0, 0]],\n dtype=tf.int32)\n states = tf.cast(\n tf.pad(tf.cast(states * 255., tf.int32), paddings, 'SYMMETRIC'),\n tf.float32) / 255.\n\n q1, q2 = self.critic(states, actions=None)\n q = tf.minimum(q1, q2)\n actions = tf.argmax(q, -1)\n return actions\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Utils for debiasing ML models.\"\"\"\n\nimport math\n\nimport numpy as np\n\n\nclass RandomizedThreshold:\n \"\"\"Threshold optimizer (RTO) to debias models via postprocessing.\n\n See: https://arxiv.org/abs/2106.12887.\n\n This is a solver to the following optimiation problem:\n minimize gamma/2 ||x||^2 - y^Tx\n s.t. x satisfies DP constraint with tolerance eps and parameter rho.\n\n There are no assumptions about y in this code but, in general, y should be the\n predictions of the original classifier.\n \"\"\"\n\n def __init__(self, gamma=1.0, eps=0.0, rho=None):\n \"\"\"Instantiate object.\n\n Args:\n gamma: The regularization parameter gamma (for randomization). Set this to\n 1 if the goal is to minmize changes to the original scores.\n eps: Tolerance parameter for bias between 0 and 1 inclusive.\n rho: The rho parameter in the post-hoc rule. If None, rho = E[y].\n \"\"\"\n if eps < 0:\n raise ValueError('eps must be non-negative.')\n\n if gamma <= 0:\n raise ValueError('gamma must be a strictly positive number.')\n\n if rho is not None and rho <= 0:\n raise ValueError('rho must be either None or a strictly positive number.')\n\n self.num_groups = 1\n self.gamma = gamma\n self.eps = eps\n self.rho = rho\n self.avrg_y_score = 0\n\n # model paramters (Lagrange dual variables)\n self.lambdas = []\n self.mus = []\n\n def fit(self, y_orig, group_feature, sgd_steps,\n full_gradient_epochs=1_000, verbose=True, batch_size=256,\n ignore_warnings=False):\n \"\"\"Debias predictions w.r.t. the sensitive class in each demographic group.\n\n This procedure takes as input a vector y=y_orig and solves the optimization\n problem subject to the statistical parity constraint.\n minimize_x gamma/2 ||x||^2 - y^Tx\n s.t. x satisfies DP constraints with tolerance eps and parameter rho.\n\n IMPORTANT: If this is used for postprocessing a classifier,\n the scores y_orig need to be rescaled linearly to [-1, +1].\n\n Training proceeds in two rounds. First is SGD. Second is full gradient\n descent. Full gradient descent is recommended when debiasing deep neural\n nets because the scores are concentrated around the extremes\n so high preciseion might be needed. Because the loss is smooth, the lr\n in full gradient method does not need tuning. It can be set to gamma / 2.0.\n\n Args:\n y_orig: A vector of the original probability scores. If this is used for\n debiasing binary classifiers, y_orig = 2 * p(y=1) -1.\n group_feature: An array containing the group id of each instance starting\n from group 0 to group K-1.\n sgd_steps: Number of minibatch steps in SGD.\n full_gradient_epochs: Number of epochs in full gradient descent phase.\n verbose: Set to True to display progress.\n batch_size: Size of minibatches in SGD.\n ignore_warnings: Set to True to suppress warnings.\n\n Returns:\n None.\n \"\"\"\n if min(y_orig) >= 0:\n self.yscale = 'positive'\n else:\n self.yscale = 'negative'\n\n y_orig = np.array(y_orig)\n num_groups = len(set(group_feature)) # number of demographic groups\n\n if (min(y_orig) < -1 or max(y_orig) > 1) and not ignore_warnings:\n print('Warning: the scores y_orig are not in the range [-1, +1].'\n 'To suppress this message, set ignore_warnings=True.')\n\n if self.yscale == 'positive' and not ignore_warnings:\n print('Warning: if this is for postprocessing a binary classifier, '\n 'the scores need to be rescaled to [-1, +1]. To suppress this '\n 'message, set ignore_warnings=True.')\n if min(group_feature) != 0 or (max(group_feature) != num_groups - 1):\n raise ValueError('group_feature should be in {0, 1, .. K-1} where '\n 'K is the nubmer of groups. Some groups are missing.')\n\n self.num_groups = num_groups\n eps0 = self.eps / 2.0\n gamma = self.gamma\n\n # Store group membership ids in a dictionary.\n xk_groups = {}\n for k in range(num_groups):\n xk_groups[k] = []\n for i in range(len(group_feature)):\n xk_groups[group_feature[i]].append(i)\n\n for k in xk_groups:\n assert xk_groups[k] # All groups must be non-empty.\n\n self.avrg_y_score = float(sum(y_orig))/len(y_orig)\n if self.rho is None:\n if self.yscale == 'positive':\n self.rho = self.avrg_y_score\n else:\n self.rho = self.avrg_y_score / 2.0 + 0.5\n\n # The parameters we optimize in the algorithm are lambdas and mus.\n # lambdas_final and mus_final are running averages (final output).\n lambdas = np.zeros((num_groups,))\n mus = np.zeros((num_groups,))\n lambdas_final = np.zeros((num_groups,)) # running averages\n mus_final = np.zeros((num_groups,)) # running averages\n\n # SGD is carried out in each group separately due to decomposition of the\n # optimization problem.\n num_samples_sgd = sgd_steps * batch_size\n lr = gamma * math.sqrt(1.0 / num_samples_sgd)\n\n # Begin the projected SGD phase.\n if verbose:\n print('SGD phase started:')\n for k in range(num_groups):\n if verbose:\n print('Group %d.\\t\\t%02d%%'%(k, int(100*k/num_groups)), end='\\r')\n\n idx = np.array(list(xk_groups[k])) # instance IDs in group k\n group_size = len(idx)\n for _ in range(sgd_steps):\n # Using random.randint is 10x faster than random.choice.\n batch_ids = np.random.randint(0, group_size, batch_size)\n batch_ids = idx[batch_ids]\n\n # The code below is a faster implementation of:\n # xi_arg = y_orig[batch_ids] - (lambdas[k] - mus[k])\n # xi_gradient = xi_arg/gamma\n # xi_gradient = np.maximum(xi_gradient, 0.)\n # xi_gradient = np.minimum(xi_gradient, 1.)\n\n lambda_minus_mu = lambdas[k] - mus[k]\n xi_arg = np.maximum(y_orig[batch_ids], lambda_minus_mu)\n xi_arg = np.minimum(xi_arg, gamma + lambda_minus_mu)\n mean_xi = (np.mean(xi_arg) - lambda_minus_mu) / gamma\n\n lambda_gradient = eps0 + self.rho - mean_xi\n mu_gradient = eps0 - self.rho + mean_xi\n\n # stochastic gradient descent\n if eps0 > 1e-3:\n lambdas[k] = max(0, lambdas[k] - lr * batch_size * lambda_gradient)\n mus[k] = max(0, mus[k] - lr * batch_size * mu_gradient)\n else:\n # If self.eps=0, we can drop mus and optimize lambdas only but\n # lambdas will not be constrained to be non-negative in this case.\n lambdas[k] = lambdas[k] - lr * batch_size * lambda_gradient\n\n # lambdas_final and mus_final are running averages.\n lambdas_final[k] += lambdas[k] / sgd_steps\n mus_final[k] += mus[k] / sgd_steps\n\n # Now switch to full gradient descent.\n # Because the objective is smooth, lr=gamma/2 works.\n if verbose and full_gradient_epochs:\n print('\\nFull gradient descent phase started:')\n for k in range(num_groups):\n if verbose:\n print('Group {}.'.format(k))\n\n idx = np.array(list(xk_groups[k]))\n for _ in range(full_gradient_epochs):\n lambda_minus_mu = lambdas_final[k] - mus_final[k]\n xi_arg = np.maximum(y_orig[idx], lambda_minus_mu)\n xi_arg = np.minimum(xi_arg, gamma + lambda_minus_mu)\n mean_xi = (np.mean(xi_arg) - lambda_minus_mu) / gamma\n\n full_grad_lambda = eps0 + self.rho - mean_xi\n full_grad_mu = eps0 - self.rho + mean_xi\n\n if eps0 > 1e-3:\n lambdas_final[k] = max(0,\n lambdas_final[k] - 0.5*gamma*full_grad_lambda)\n mus_final[k] = max(0, mus_final[k] - 0.5*gamma*full_grad_mu)\n else:\n lambdas_final[k] = lambdas_final[k] - 0.5*gamma*full_grad_lambda\n\n self.lambdas = lambdas_final\n self.mus = mus_final\n\n def predict(self, y_orig, group_feature, ignore_warnings=False):\n \"\"\"Debiases the predictions.\n\n Given the original scores y, post-process them according to the learned\n model such that the predictions satisfy the desired fairness criteria.\n\n Args:\n y_orig: Original classifier scores. If this is for postprocessing binary\n classifiers, y_orig = 2 * p(y=1) -1.\n group_feature: An array containing the group id of each instance starting\n from group 0 to group K-1.\n ignore_warnings: Set to True to suppress warnings.\n\n Returns:\n y_new_prob: y_new_prob[i] is the probability of predicting the positive\n class for the instance i.\n \"\"\"\n if (((min(y_orig) >= 0 and self.yscale == 'negative') or\n (min(y_orig) < 0 and self.yscale == 'positive')) and\n not ignore_warnings):\n print('Warning: the scores seem to have a difference scale from the '\n 'training data. '\n 'If the data is scaled in [0, 1], e.g. for preprocessing, or '\n 'in [-1, +1], e.g. for postprocessing, make sure the test labels '\n 'are scaled similarly.')\n\n num_examples = len(y_orig) # number of training examples\n gamma = self.gamma\n lambdas = self.lambdas\n mus = self.mus\n\n y_new_prob = np.zeros((num_examples,))\n for i in range(num_examples):\n k = group_feature[i]\n if y_orig[i] < (lambdas[k]-mus[k]):\n y_new_prob[i] = 0\n elif y_orig[i] < (lambdas[k]-mus[k]) + gamma:\n y_new_prob[i] = (1.0/gamma)*(y_orig[i]-(lambdas[k]-mus[k]))\n else:\n y_new_prob[i] = 1.0\n\n return y_new_prob\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Copyright 2021 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Script for analyzing the annotations of GoEmotions.\n\nThe analysis includes calculating high-level statistics as well as correlation\namong emotion labels.\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nimport json\nimport os\nfrom absl import app\nfrom absl import flags\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nfrom scipy.cluster.hierarchy import dendrogram\nfrom scipy.cluster.hierarchy import linkage\nfrom scipy.spatial.distance import pdist\nimport seaborn as sns\n\nFLAGS = flags.FLAGS\n\nflags.DEFINE_string(\"data\", \"data/full_dataset\",\n \"Directory containing full dataset.\")\n\nflags.DEFINE_string(\"plot_dir\", \"plots\",\n \"Directory for saving plots and analyses.\")\n\nflags.DEFINE_string(\"emotion_file\", \"data/emotions.txt\",\n \"File containing list of emotions.\")\nflags.DEFINE_string(\"sentiment_dict\", \"data/sentiment_dict.json\",\n \"Sentiment dictionary.\")\n\n\ndef CheckAgreement(ex, min_agreement, all_emotions, max_agreement=100):\n \"\"\"Return the labels that at least min_agreement raters agree on.\"\"\"\n sum_ratings = ex[all_emotions].sum(axis=0)\n agreement = ((sum_ratings >= min_agreement) & (sum_ratings <= max_agreement))\n return \",\".join(sum_ratings.index[agreement].tolist())\n\n\ndef CountLabels(labels):\n if (not isinstance(labels, float)) and labels:\n return len(labels.split(\",\"))\n return 0\n\n\ndef main(_):\n print(\"Loading data...\")\n dfs = []\n for filename in os.listdir(FLAGS.data):\n if filename.endswith(\".csv\"):\n dfs.append(\n pd.read_csv(os.path.join(FLAGS.data, filename), encoding=\"utf-8\"))\n data = pd.concat(dfs)\n print(\"%d Examples\" % (len(set(data[\"id\"]))))\n print(\"%d Annotations\" % len(data))\n\n if not os.path.isdir(FLAGS.plot_dir):\n os.makedirs(FLAGS.plot_dir)\n\n with open(FLAGS.emotion_file, \"r\") as f:\n all_emotions = f.read().splitlines()\n all_emotions_neutral = all_emotions + [\"neutral\"]\n print(\"%d emotion Categories\" % len(all_emotions))\n\n print(\"%d unique raters\" % len(data[\"rater_id\"].unique()))\n print(\"%.3f marked unclear\" %\n (data[\"example_very_unclear\"].sum() / len(data)))\n\n # Since the ones marked as difficult have no labels, exclude those\n data = data[data[all_emotions_neutral].sum(axis=1) != 0]\n\n print(\"Distribution of number of labels per example:\")\n print(data[all_emotions_neutral].sum(axis=1).value_counts() / len(data))\n print(\"%.2f with more than 3 labels\" %\n ((data[all_emotions_neutral].sum(axis=1) > 3).sum() /\n len(data))) # more than 3 labels\n\n print(\"Label distributions:\")\n print((data[all_emotions_neutral].sum(axis=0).sort_values(ascending=False) /\n len(data) * 100).round(2))\n\n print(\"Plotting label correlations...\")\n ratings = data.groupby(\"id\")[all_emotions].mean()\n\n # Compute the correlation matrix\n corr = ratings.corr()\n\n # Generate a mask for the upper triangle\n mask = np.zeros_like(corr, dtype=np.bool)\n mask[np.triu_indices_from(mask)] = True\n\n # Set up the matplotlib figure\n fig, _ = plt.subplots(figsize=(11, 9))\n\n # Generate a custom diverging colormap\n cmap = sns.diverging_palette(220, 10, as_cmap=True)\n\n # Draw the heatmap with the mask and correct aspect ratio\n sns.heatmap(\n corr,\n mask=mask,\n cmap=cmap,\n vmax=.3,\n center=0,\n square=True,\n linewidths=.5,\n cbar_kws={\"shrink\": .5})\n fig.savefig(\n FLAGS.plot_dir + \"/correlations.pdf\",\n dpi=500,\n format=\"pdf\",\n bbox_inches=\"tight\")\n\n print(\"Plotting hierarchical relations...\")\n z = linkage(\n pdist(ratings.T, metric=\"correlation\"),\n method=\"ward\",\n optimal_ordering=True)\n fig = plt.figure(figsize=(11, 4), dpi=400)\n plt.xlabel(\"\")\n plt.ylabel(\"\")\n dendrogram(\n z,\n labels=ratings.columns,\n leaf_rotation=90., # rotates the x axis labels\n leaf_font_size=12, # font size for the x axis labels\n color_threshold=1.05,\n )\n fig.savefig(\n FLAGS.plot_dir + \"/hierarchical_clustering.pdf\",\n dpi=600,\n format=\"pdf\",\n bbox_inches=\"tight\")\n\n sent_color_map = {\n \"positive\": \"#BEECAF\",\n \"negative\": \"#94bff5\",\n \"ambiguous\": \"#FFFC9E\"\n }\n with open(FLAGS.sentiment_dict) as f:\n sent_dict = json.loads(f.read())\n sent_colors = {}\n for e in all_emotions:\n if e in sent_dict[\"positive\"]:\n sent_colors[e] = sent_color_map[\"positive\"]\n elif e in sent_dict[\"negative\"]:\n sent_colors[e] = sent_color_map[\"negative\"]\n else:\n sent_colors[e] = sent_color_map[\"ambiguous\"]\n\n # Generate a mask for the upper triangle\n mask = np.zeros_like(corr, dtype=np.bool)\n mask[np.diag_indices(mask.shape[0])] = True\n\n # Generate a custom diverging colormap\n cmap = sns.diverging_palette(220, 10, as_cmap=True)\n\n row_colors = pd.Series(\n corr.columns, index=corr.columns, name=\"sentiment\").map(sent_colors)\n\n # Draw the heatmap with the mask and correct aspect ratio\n g = sns.clustermap(\n corr,\n mask=mask,\n cmap=cmap,\n vmax=.3,\n vmin=-0.3,\n center=0,\n row_linkage=z,\n col_linkage=z,\n col_colors=row_colors,\n linewidths=.1,\n cbar_kws={\n \"ticks\": [-.3, -.15, 0, .15, .3],\n \"use_gridspec\": False,\n \"orientation\": \"horizontal\",\n },\n figsize=(10, 10))\n\n g.ax_row_dendrogram.set_visible(False)\n g.cax.set_position([.34, -0.05, .5, .03])\n\n for label in sent_color_map:\n g.ax_col_dendrogram.bar(\n 0, 0, color=sent_color_map[label], label=label, linewidth=0)\n\n g.ax_col_dendrogram.legend(\n title=\"Sentiment\", loc=\"center\", bbox_to_anchor=(1.1, .5))\n\n g.savefig(FLAGS.plot_dir + \"/hierarchical_corr.pdf\", dpi=600, format=\"pdf\")\n\n print(\"Calculating agreements...\")\n unique_labels = data.groupby(\"id\").apply(CheckAgreement, 1,\n all_emotions_neutral).to_dict()\n data[\"unique_labels\"] = data[\"id\"].map(unique_labels)\n agree_dict_2 = data.groupby(\"id\").apply(CheckAgreement, 2,\n all_emotions_neutral).to_dict()\n data[\"agree_2\"] = data[\"id\"].map(agree_dict_2)\n agree_dict = data.groupby(\"id\").apply(CheckAgreement, 3,\n all_emotions_neutral).to_dict()\n data[\"agree_3\"] = data[\"id\"].map(agree_dict)\n agree_dict = data.groupby(\"id\").apply(CheckAgreement, 1, all_emotions_neutral,\n 1).to_dict()\n data[\"no_agree\"] = data[\"id\"].map(agree_dict)\n\n filtered_2 = data[data[\"agree_2\"].str.len() > 0]\n print(\n \"%d (%d%%) of the examples have 2+ raters agreeing on at least one emotion label\"\n % (len(filtered_2[\"id\"].unique()), (len(filtered_2) / len(data) * 100)))\n\n filtered_3 = data[data[\"agree_3\"].str.len() > 0]\n print(\n \"%d (%d%%) of the examples have 3+ raters agreeing on at least one emotion label\"\n % (len(filtered_3[\"id\"].unique()), (len(filtered_3) / len(data) * 100)))\n\n print(\"Plotting number of labels...\")\n data[\"num_unique_prefilter\"] = data[\"unique_labels\"].apply(CountLabels)\n data[\"num_unique_postfilter\"] = data[\"agree_2\"].apply(CountLabels)\n unique_ex = data.drop_duplicates(\"id\")\n df = pd.DataFrame({\n \"count\":\n unique_ex[\"num_unique_prefilter\"].tolist() +\n unique_ex[\"num_unique_postfilter\"].tolist(),\n \"type\": [\"pre-filter\"] * len(unique_ex) + [\"post-filter\"] * len(unique_ex)\n })\n\n fig = plt.figure(dpi=600)\n ax = sns.countplot(\n data=df, x=\"count\", hue=\"type\", palette=[\"skyblue\", \"navy\"])\n plt.xlim(-.5, 7.5)\n plt.legend(loc=\"center right\", fontsize=\"x-large\")\n plt.ylabel(\"Number of Examples\", fontsize=\"x-large\")\n plt.xlabel(\"Number of Labels\", fontsize=\"x-large\")\n plt.draw()\n labels = [item.get_text() for item in ax.get_yticklabels()]\n ax.set_yticklabels([\"%dk\" % (int(int(label) / 1000)) for label in labels])\n plt.tight_layout()\n\n fig.savefig(\n FLAGS.plot_dir + \"/number_of_labels.pdf\",\n dpi=600,\n format=\"pdf\",\n bbox_inches=\"tight\")\n\n print(\"Proportion of agreement per label:\")\n print(\n filtered_2[all_emotions_neutral].sum(axis=0).sort_values(ascending=False)\n / len(data))\n\n\nif __name__ == \"__main__\":\n app.run(main)\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Classes for computing a training losses given inputs and predictions.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport abc\n\nimport six\nimport tensorflow.compat.v1 as tf\n\nfrom depth_and_motion_learning import consistency_losses\nfrom depth_and_motion_learning import intrinsics_utils\nfrom depth_and_motion_learning import transform_depth_map\nfrom depth_and_motion_learning.losses import regularizers\nfrom depth_and_motion_learning.parameter_container import ParameterContainer\n\n\nclass LossAggregator(six.with_metaclass(abc.ABCMeta, object)):\n \"\"\"A base class for calculating losses from a set of tensors.\"\"\"\n\n def __init__(self, endpoints, weights_overrides=None, params_overrides=None):\n \"\"\"Creates an instance.\n\n Args:\n endpoints: A dictionary mapping strings to tf.Tensors, from which the loss\n is to be computed.\n weights_overrides: A dictionary or containing overrides for\n self._default_weights\n params_overrides: A dictionary or containing overrides for\n self._default_params\n \"\"\"\n self._weights = ParameterContainer.from_defaults_and_overrides(\n self._default_weights, weights_overrides, is_strict=True)\n self._params = ParameterContainer.from_defaults_and_overrides(\n self._default_params, params_overrides, is_strict=True)\n self._losses = {k: tf.convert_to_tensor(0.0) for k in self._default_weights}\n self._endpoints = endpoints\n self._output_endpoints = {}\n self._calculate()\n\n @abc.abstractmethod\n def _calculate(self):\n \"\"\"Populate self._losses and self._output_endpoints.\n\n To be implemented by subclasses.\n \"\"\"\n pass\n\n @property\n def _default_weights(self):\n \"\"\"A dictionary that maps loss names (strings) to their weights (floats).\"\"\"\n return {}\n\n @property\n def _default_params(self):\n \"\"\"A dictionary containing other parameters, if needed bysub-classes.\"\"\"\n return {}\n\n @property\n def losses(self):\n return self._losses\n\n @property\n def output_endpoints(self):\n return self._output_endpoints\n\n\nclass DepthMotionFieldLossAggregator(LossAggregator):\n \"\"\"A LossAgregator for depth maps and 3D motion fields.\"\"\"\n\n @property\n def _default_weights(self):\n return {\n 'rgb_consistency': 1.0,\n 'ssim': 3.0,\n 'depth_consistency': 0.0,\n 'depth_smoothing': 0.01,\n 'depth_supervision': 1.0,\n 'rotation_cycle_consistency': 1e-3,\n 'translation_cycle_consistency': 1e-2,\n 'depth_variance': 1e-6,\n 'motion_smoothing': 1e-3,\n 'motion_drift': 0.0\n }\n\n @property\n def _default_params(self):\n return {\n 'target_depth_stop_gradient': True,\n 'scale_normalization': False,\n 'num_scales': 1,\n }\n\n def _calculate(self):\n # On tpu we strive to stack tensors together and perform ops once on the\n # entire stack, to save time HBM memory. We thus stack the batch-of-first-\n # frames and the batch-of-second frames, for both depth and RGB. The batch\n # dimension of rgb_stack and gt_depth_stack are thus twice the original\n # batch size.\n\n # Create stacks for features that need to be scaled into pyramids for\n # multi-scale training.\n rgb_stack_ = tf.concat(self._endpoints['rgb'], axis=0)\n flipped_rgb_stack_ = tf.concat(self._endpoints['rgb'][::-1], axis=0)\n predicted_depth_stack_ = tf.concat(\n self._endpoints['predicted_depth'], axis=0)\n flipped_predicted_depth_stack_ = tf.concat(\n self._endpoints['predicted_depth'][::-1], axis=0)\n residual_translation_ = tf.concat(\n self._endpoints['residual_translation'], axis=0)\n flipped_residual_translation_ = tf.concat(\n self._endpoints['residual_translation'][::-1], axis=0)\n intrinsics_mat_ = tf.concat(self._endpoints['intrinsics_mat'], axis=0)\n\n # Create pyramids from each stack to support multi-scale training.\n num_scales = self._params.num_scales\n rgb_pyramid = _get_pyramid(rgb_stack_, num_scales=num_scales)\n flipped_rgb_pyramid = _get_pyramid(\n flipped_rgb_stack_, num_scales=num_scales)\n predicted_depth_pyramid = _get_pyramid(\n predicted_depth_stack_, num_scales=num_scales)\n flipped_predicted_depth_pyramid = _get_pyramid(\n flipped_predicted_depth_stack_, num_scales=num_scales)\n residual_translation_pyramid = _get_pyramid(\n residual_translation_, num_scales=num_scales)\n flipped_residual_translation_pyramid = _get_pyramid(\n flipped_residual_translation_, num_scales=num_scales)\n intrinsics_mat_pyramid = _get_intrinsics_mat_pyramid(\n intrinsics_mat_, num_scales=num_scales)\n validity_mask_ = self._endpoints.get('validity_mask')\n if validity_mask_ is not None:\n validity_mask_ = tf.concat(validity_mask_, axis=0)\n validity_mask_pyramid = _get_pyramid(\n validity_mask_, num_scales, _min_pool2d)\n else:\n validity_mask_pyramid = [None] * num_scales\n\n if 'groundtruth_depth' in self._endpoints:\n gt_depth_stack_ = tf.concat(self._endpoints['groundtruth_depth'], axis=0)\n gt_depth_pyramid = _get_pyramid(gt_depth_stack_, num_scales=num_scales)\n if 'groundtruth_depth_weight' in self._endpoints:\n gt_depth_weight_stack_ = tf.concat(\n self._endpoints['groundtruth_depth_weight'], axis=0)\n else:\n gt_depth_weight_stack_ = tf.cast(\n tf.greater(gt_depth_stack_, 0.2), tf.float32)\n gt_depth_weight_pyramid = _get_pyramid(\n gt_depth_weight_stack_, num_scales=num_scales)\n\n if 'groundtruth_depth_filter' in self._endpoints:\n depth_filter_ = tf.concat(\n self._endpoints['groundtruth_depth_filter'], axis=0)\n depth_filter_ = tf.cast(depth_filter_, tf.float32)\n depth_filter_pyramid = _get_pyramid(\n gt_depth_stack_, num_scales=num_scales)\n\n # Calculate losses at each scale. Iterate in reverse so that the final\n # output values are set at scale 0.\n for s in reversed(range(self._params.num_scales)):\n # Weight applied to all losses at this scale.\n scale_w = 1.0 / 2**s\n\n rgb_stack = rgb_pyramid[s]\n predicted_depth_stack = predicted_depth_pyramid[s]\n flipped_predicted_depth_stack = flipped_predicted_depth_pyramid[s]\n\n if 'groundtruth_depth' in self._endpoints:\n gt_depth_stack = gt_depth_pyramid[s]\n depth_error = tf.abs(gt_depth_stack - predicted_depth_stack)\n\n # Weigh the spatial loss if a weight map is provided. Otherwise, revert\n # to original behavior.\n gt_depth_weight_stack = gt_depth_weight_pyramid[s]\n depth_error = depth_error * gt_depth_weight_stack\n\n # Optionally filter the depth map if a boolean depth filter is provided.\n # We use a TPU-friendly equivalent of tf.boolean_mask.\n depth_filter = tf.ones_like(depth_error, tf.float32)\n if 'groundtruth_depth_filter' in self._endpoints:\n depth_filter = depth_filter_pyramid[s]\n\n self._losses['depth_supervision'] += scale_w * tf.reduce_mean(\n depth_error * depth_filter) / tf.reduce_mean(depth_filter)\n\n # In theory, the training losses should be agnostic to the global scale of\n # the predicted depth. However in reality second order effects can lead to\n # (https://en.wikipedia.org/wiki/Von_Neumann_stability_analysis) diverging\n # modes. For some reason this happens when training on TPU. Since the\n # scale is immaterial anyway, we normalize it out, and the training\n # stabilizes.\n #\n # Note that the depth supervision term, which is sensitive to the scale,\n # was applied before this normalization. Therefore the scale of the depth\n # is learned.\n mean_depth = tf.reduce_mean(predicted_depth_stack)\n\n # When training starts, the depth sometimes tends to collapse to a\n # constant value, which seems to be a fixed point where the trainig can\n # stuck. To discourage this collapse, we penalize the reciprocal of the\n # variance with a tiny weight. Note that the mean of predicted_depth is\n # one, hence we subtract 1.0.\n depth_var = tf.reduce_mean(\n tf.square(predicted_depth_stack / mean_depth - 1.0))\n self._losses['depth_variance'] = scale_w * 1.0 / depth_var\n\n if self._params.scale_normalization:\n predicted_depth_stack /= mean_depth\n flipped_predicted_depth_stack /= mean_depth\n\n disp = 1.0 / predicted_depth_stack\n\n mean_disp = tf.reduce_mean(disp, axis=[1, 2, 3], keep_dims=True)\n self._losses['depth_smoothing'] += (\n scale_w *\n regularizers.joint_bilateral_smoothing(disp / mean_disp, rgb_stack))\n self._output_endpoints['disparity'] = disp\n\n flipped_rgb_stack = flipped_rgb_pyramid[s]\n\n background_translation = tf.concat(\n self._endpoints['background_translation'], axis=0)\n flipped_background_translation = tf.concat(\n self._endpoints['background_translation'][::-1], axis=0)\n residual_translation = residual_translation_pyramid[s]\n flipped_residual_translation = flipped_residual_translation_pyramid[s]\n if self._params.scale_normalization:\n background_translation /= mean_depth\n flipped_background_translation /= mean_depth\n residual_translation /= mean_depth\n flipped_residual_translation /= mean_depth\n translation = residual_translation + background_translation\n flipped_translation = (\n flipped_residual_translation + flipped_background_translation)\n\n rotation = tf.concat(self._endpoints['rotation'], axis=0)\n flipped_rotation = tf.concat(self._endpoints['rotation'][::-1], axis=0)\n intrinsics_mat = intrinsics_mat_pyramid[s]\n intrinsics_mat_inv = intrinsics_utils.invert_intrinsics_matrix(\n intrinsics_mat)\n validity_mask = validity_mask_pyramid[s]\n\n transformed_depth = transform_depth_map.using_motion_vector(\n tf.squeeze(predicted_depth_stack, axis=-1), translation, rotation,\n intrinsics_mat, intrinsics_mat_inv)\n flipped_predicted_depth_stack = tf.squeeze(\n flipped_predicted_depth_stack, axis=-1)\n if self._params.target_depth_stop_gradient:\n flipped_predicted_depth_stack = tf.stop_gradient(\n flipped_predicted_depth_stack)\n # The first and second halves of the batch not contain Frame1's and\n # Frame2's depths transformed onto Frame2 and Frame1 respectively. Te\n # demand consistency, we need to `flip` `predicted_depth` as well.\n loss_endpoints = (\n consistency_losses.rgbd_and_motion_consistency_loss(\n transformed_depth,\n rgb_stack,\n flipped_predicted_depth_stack,\n flipped_rgb_stack,\n rotation,\n translation,\n flipped_rotation,\n flipped_translation,\n validity_mask=validity_mask))\n\n normalized_trans = regularizers.normalize_motion_map(\n residual_translation, translation)\n self._losses['motion_smoothing'] += scale_w * regularizers.l1smoothness(\n normalized_trans, self._weights.motion_drift == 0)\n self._losses['motion_drift'] += scale_w * regularizers.sqrt_sparsity(\n normalized_trans)\n self._losses['depth_consistency'] += (\n scale_w * loss_endpoints['depth_error'])\n self._losses['rgb_consistency'] += scale_w * loss_endpoints['rgb_error']\n self._losses['ssim'] += scale_w * 0.5 * loss_endpoints['ssim_error']\n\n self._losses['rotation_cycle_consistency'] += (\n scale_w * loss_endpoints['rotation_error'])\n self._losses['translation_cycle_consistency'] += (\n scale_w * loss_endpoints['translation_error'])\n\n self._output_endpoints['depth_proximity_weight'] = loss_endpoints[\n 'depth_proximity_weight']\n self._output_endpoints['trans'] = translation\n self._output_endpoints['inv_trans'] = flipped_translation\n\n for k, w in self._weights.as_dict().items():\n # multiply by 2 to match the scale of the old code.\n self._losses[k] *= w * 2\n\n if tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES):\n self._losses[tf.GraphKeys.REGULARIZATION_LOSSES] = tf.add_n(\n tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))\n\n\ndef _get_intrinsics_mat_pyramid(intrinsics_mat, num_scales):\n \"\"\"Returns multiple intrinsic matrices for different scales.\n\n Args:\n intrinsics_mat: <float32>[B, 3, 3] tensor containing the intrinsics matrix\n at the original scale.\n num_scales: integer indicating *total* number of matrices to return. If\n `num_scales` is 1, the function just returns the input matrix in a list.\n\n Returns:\n List containing `num_scales` intrinsics matrices, each with shape\n <float32>[B, 3, 3]. The first element in the list is the input\n intrinsics matrix and the last element is the intrinsics matrix for the\n coarsest scale.\n \"\"\"\n # intrinsics_mat: [B, 3, 3]\n intrinsics_mat_pyramid = [intrinsics_mat]\n # Scale the intrinsics accordingly for each scale.\n for s in range(1, num_scales):\n fx = intrinsics_mat[:, 0, 0] / 2**s\n fy = intrinsics_mat[:, 1, 1] / 2**s\n cx = intrinsics_mat[:, 0, 2] / 2**s\n cy = intrinsics_mat[:, 1, 2] / 2**s\n intrinsics_mat_pyramid.append(_make_intrinsics_matrix(fx, fy, cx, cy))\n return intrinsics_mat_pyramid\n\n\ndef _make_intrinsics_matrix(fx, fy, cx, cy):\n \"\"\"Constructs a batch of intrinsics matrices given arguments..\n\n Args:\n fx: <float32>[B] tensor containing horizontal focal length.\n fy: <float32>[B] tensor containing vertical focal length.\n cx: <float32>[B] tensor containing horizontal principal offset.\n cy: <float32>[B] tensor containing vertical principal offset.\n\n Returns:\n <float32>[B, 3, 3] tensor containing batch of intrinsics matrices.\n \"\"\"\n # fx, fy, cx, cy: [B]\n zeros = tf.zeros_like(fx)\n ones = tf.ones_like(fx)\n r1 = tf.stack([fx, zeros, cx], axis=-1)\n r2 = tf.stack([zeros, fy, cy], axis=-1)\n r3 = tf.stack([zeros, zeros, ones], axis=-1)\n intrinsics = tf.stack([r1, r2, r3], axis=1)\n return intrinsics\n\n\ndef _min_pool2d(input_, ksize, strides, padding):\n return -tf.nn.max_pool_v2(-input_, ksize, strides, padding)\n\n\ndef _get_pyramid(img, num_scales, pooling_fn=tf.nn.avg_pool2d):\n \"\"\"Generates a pyramid from the input image/tensor at different scales.\n\n This function behaves similarly to `tfg.image.pyramid.split()`. Instead of\n using an image resize operation, it uses average pooling to give each\n input pixel equal weight in constructing coarser scales.\n\n Args:\n img: [B, height, width, C] tensor, where B stands for batch size and C\n stands for number of channels.\n num_scales: integer indicating *total* number of scales to return. If\n `num_scales` is 1, the function just returns the input image in a list.\n pooling_fn: A callable with tf.nn.avg_pool2d's signature, to be used for\n pooling `img` across scales.\n\n Returns:\n List containing `num_scales` tensors with shapes\n [B, height / 2^s, width / 2^s, C] where s is in [0, num_scales - 1]. The\n first element in the list is the input image and the last element is the\n resized input corresponding to the coarsest scale.\n \"\"\"\n pyramid = [img]\n for _ in range(1, num_scales):\n # Scale image stack.\n last_img = pyramid[-1]\n scaled_img = pooling_fn(\n last_img, [1, 2, 2, 1], [1, 2, 2, 1], padding='VALID')\n pyramid.append(scaled_img)\n return pyramid\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Tests for research.biology.chemgraph.mcts.molecules.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom six.moves import range\nimport tensorflow.compat.v1 as tf\n\nfrom mol_dqn.chemgraph.dqn import molecules\n\n\nclass MoleculesTest(tf.test.TestCase):\n\n def test_empty_init(self):\n mol = molecules.Molecule({'C', 'O'})\n mol.initialize()\n self.assertSetEqual(mol.get_valid_actions(), {'C', 'O'})\n\n def test_empty_action(self):\n mol = molecules.Molecule({'C', 'O'})\n mol.initialize()\n result = mol.step('C')\n self.assertEqual(result.state, 'C')\n self.assertEqual(result.reward, 0)\n self.assertEqual(result.terminated, False)\n\n def test_benzene_init(self):\n mol = molecules.Molecule({'C', 'O'}, 'c1ccccc1')\n mol.initialize()\n self.assertSetEqual(\n mol.get_valid_actions(),\n {'Oc1ccccc1', 'c1ccccc1', 'Cc1ccccc1', 'c1cc2cc-2c1', 'c1cc2ccc1-2'})\n\n def test_benzene_action(self):\n mol = molecules.Molecule({'C', 'O'}, 'c1ccccc1')\n mol.initialize()\n result = mol.step('Cc1ccccc1')\n self.assertEqual(result.state, 'Cc1ccccc1')\n self.assertEqual(result.reward, 0)\n self.assertEqual(result.terminated, False)\n\n def test_ethane_init(self):\n mol = molecules.Molecule({'C', 'O'}, 'CC')\n mol.initialize()\n self.assertSetEqual(\n mol.get_valid_actions(),\n {'CC', 'C=C', 'CCC', 'C#CC', 'CCO', 'CC=O', 'C', 'C=CC', 'C#C'})\n\n def test_cyclobutane_init(self):\n # We want to know that it is possible to form another\n # ring when there is one ring present.\n mol = molecules.Molecule({'C', 'O'}, 'C1CCC1')\n mol.initialize()\n self.assertSetEqual(\n mol.get_valid_actions(), {\n 'C1CCC1', 'C=C1CCC1', 'C1C2CC12', 'C1=CCC1', 'CCCC', 'O=C1CCC1',\n 'CC1CCC1', 'OC1CCC1', 'C1#CCC1', 'C1C2=C1C2'\n })\n\n def test_do_not_allow_removal(self):\n mol = molecules.Molecule({'C', 'O'}, 'CC', allow_removal=False)\n mol.initialize()\n self.assertSetEqual(\n mol.get_valid_actions(),\n {'CC', 'CCC', 'C#CC', 'CCO', 'CC=O', 'C=CC', 'C=C', 'C#C'})\n\n def test_do_not_allow_no_modification(self):\n mol = molecules.Molecule({'C', 'O'}, 'C#C', allow_no_modification=False)\n mol.initialize()\n actions_noallow_no_modification = mol.get_valid_actions()\n mol = molecules.Molecule({'C', 'O'}, 'C#C', allow_no_modification=True)\n mol.initialize()\n actions_allow_no_modification = mol.get_valid_actions()\n self.assertSetEqual(\n {'C#C'},\n actions_allow_no_modification - actions_noallow_no_modification)\n\n def test_do_not_allow_bonding_between_rings(self):\n atom_types = {'C'}\n start_smiles = 'CC12CC1C2'\n mol = molecules.Molecule(\n atom_types, start_smiles, allow_bonds_between_rings=True)\n mol.initialize()\n actions_true = mol.get_valid_actions()\n mol = molecules.Molecule(\n atom_types, start_smiles, allow_bonds_between_rings=False)\n mol.initialize()\n actions_false = mol.get_valid_actions()\n\n self.assertSetEqual({'CC12C3C1C32', 'CC12C3=C1C32'},\n actions_true - actions_false)\n\n def test_limited_ring_formation(self):\n atom_types = {'C'}\n start_smiles = 'CCCCC'\n mol = molecules.Molecule(\n atom_types, start_smiles, allowed_ring_sizes={3, 4, 5})\n mol.initialize()\n actions_allow_5_member_ring = mol.get_valid_actions()\n mol = molecules.Molecule(\n atom_types, start_smiles, allowed_ring_sizes={3, 4})\n mol.initialize()\n actions_do_not_allow_5_member_ring = mol.get_valid_actions()\n\n self.assertSetEqual(\n {'C1CCCC1', 'C1#CCCC1', 'C1=CCCC1'},\n actions_allow_5_member_ring - actions_do_not_allow_5_member_ring)\n\n def test_initialize(self):\n mol = molecules.Molecule({'C', 'O'}, 'c1ccccc1', record_path=True)\n mol.initialize()\n # Test if the molecule is correctly initialized.\n self.assertEqual(mol.state, 'c1ccccc1')\n self.assertEqual(mol.num_steps_taken, 0)\n self.assertListEqual(mol.get_path(), ['c1ccccc1'])\n # Take a step\n result = mol.step('Cc1ccccc1')\n self.assertEqual(result.state, 'Cc1ccccc1')\n self.assertEqual(result.reward, 0)\n self.assertListEqual(mol.get_path(), ['c1ccccc1', 'Cc1ccccc1'])\n # Test if the molecule is reset to its initial state.\n mol.initialize()\n self.assertEqual(mol.state, 'c1ccccc1')\n self.assertEqual(mol.num_steps_taken, 0)\n self.assertListEqual(mol.get_path(), ['c1ccccc1'])\n\n def test_state_transition(self):\n mol = molecules.Molecule({'C', 'O'}, 'c1ccccc1')\n mol.initialize()\n result = mol.step('Cc1ccccc1')\n self.assertEqual(result.state, 'Cc1ccccc1')\n self.assertEqual(result.reward, 0)\n self.assertEqual(result.terminated, False)\n self.assertEqual(mol.state, 'Cc1ccccc1')\n self.assertEqual(mol.num_steps_taken, 1)\n\n def test_invalid_actions(self):\n mol = molecules.Molecule({'C', 'O'}, 'c1ccccc1')\n mol.initialize()\n with self.assertRaisesRegexp(ValueError, 'Invalid action.'):\n mol.step('C')\n\n def test_episode_not_started(self):\n mol = molecules.Molecule({'C', 'O'}, 'c1ccccc1')\n with self.assertRaisesRegexp(ValueError, 'This episode is terminated.'):\n mol.step('Cc1ccccc1')\n\n def test_end_episode(self):\n mol = molecules.Molecule({'C', 'O'}, 'c1ccccc1', max_steps=3)\n mol.initialize()\n for _ in range(3):\n action = mol.get_valid_actions().pop()\n result = mol.step(action)\n self.assertEqual(result.terminated, True)\n with self.assertRaisesRegexp(ValueError, 'This episode is terminated.'):\n mol.step(mol.get_valid_actions().pop())\n\n def test_goal_settings(self):\n mol = molecules.Molecule(\n {'C', 'O'}, 'c1ccccc1', target_fn=lambda x: x == 'Cc1ccccc1')\n mol.initialize()\n result = mol.step('Cc1ccccc1')\n self.assertEqual(result.state, 'Cc1ccccc1')\n self.assertEqual(result.reward, 0)\n self.assertEqual(result.terminated, True)\n with self.assertRaisesRegexp(ValueError, 'This episode is terminated.'):\n mol.step(mol.get_valid_actions().pop())\n\n def test_reward_settings(self):\n\n class TargetedMolecule(molecules.Molecule):\n\n def _reward(self):\n return int(self._state == 'Cc1ccccc1')\n\n mol = TargetedMolecule({'C', 'O'}, 'c1ccccc1')\n mol.initialize()\n result = mol.step('Cc1ccccc1')\n self.assertEqual(result.state, 'Cc1ccccc1')\n self.assertEqual(result.reward, 1)\n self.assertEqual(result.terminated, False)\n\n def test_image_generation(self):\n mol = molecules.Molecule({'C', 'O'}, 'c1ccccc1', max_steps=3)\n mol.initialize()\n image = mol.visualize_state()\n del image\n\n def test_record(self):\n mol = molecules.Molecule({'C', 'O'}, 'c1ccccc1', record_path=True)\n mol.initialize()\n mol.step('Cc1ccccc1')\n mol.step('CCc1ccccc1')\n mol.step('Cc1ccccc1')\n mol.step('c1ccccc1')\n self.assertListEqual(\n mol.get_path(),\n ['c1ccccc1', 'Cc1ccccc1', 'CCc1ccccc1', 'Cc1ccccc1', 'c1ccccc1'])\n\n def test_more_than_three_possible_bonds(self):\n mol = molecules.Molecule({'C', 'S'})\n mol.initialize()\n mol.step('C')\n\n\nif __name__ == '__main__':\n tf.test.main()\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Synthetic sequence problems.\n\nThese problems take a sequence of onehot encoded tokens, and predict another\nsequence of tokens.\n\nSee copy_sequence, and associative_sequence in ../datasets.py for a description\nof the problems used.\n\"\"\"\n\nimport functools\nfrom typing import Callable\nimport sonnet as snt\n\nfrom task_set import datasets\nfrom task_set import registry\nfrom task_set.tasks import base\nimport tensorflow.compat.v1 as tf\n\n\ndef sequence_to_sequence_rnn(\n core_fn):\n \"\"\"A RNN based model for sequence to sequence prediction.\n\n This module takes a batch of data containing:\n * input: a [batch_size, seq_lenth, feature] onehot tensor.\n * output : a [batch_size, seq_lenth, feature] onehot tensor.\n * loss_mask: a [batch_size, seq_lenth] tensor.\n\n The input sequence encoded is passed it through a RNN, then a linear layer to\n the prediction dimension specified by the output. A cross entropy loss is then\n done comparing the predicted output with the actual outputs. A weighted\n average is then done using weights specified by the loss_mask.\n\n Args:\n core_fn: A fn that returns a sonnet RNNCore.\n\n Returns:\n A Callable that returns a snt.Module.\n \"\"\"\n\n def _build(batch):\n \"\"\"Build the sonnet module.\"\"\"\n rnn = core_fn()\n\n initial_state = rnn.initial_state(batch[\"input\"].shape[0])\n outputs, _ = tf.nn.dynamic_rnn(\n rnn,\n batch[\"input\"],\n initial_state=initial_state,\n dtype=tf.float32,\n time_major=False)\n\n pred_logits = snt.BatchApply(snt.Linear(batch[\"output\"].shape[2]))(outputs)\n\n flat_shape = [\n pred_logits.shape[0] * pred_logits.shape[1], pred_logits.shape[2]\n ]\n flat_pred_logits = tf.reshape(pred_logits, flat_shape)\n flat_actual_tokens = tf.reshape(batch[\"output\"], flat_shape)\n flat_mask = tf.reshape(batch[\"loss_mask\"], [flat_shape[0]])\n\n loss_vec = tf.nn.softmax_cross_entropy_with_logits_v2(\n labels=flat_actual_tokens, logits=flat_pred_logits)\n total_loss = tf.reduce_sum(flat_mask * loss_vec)\n mean_loss = total_loss / tf.reduce_sum(flat_mask)\n\n return mean_loss\n\n return lambda: snt.Module(_build)\n\n_rnn_mod_map = {\n \"LSTM\": snt.LSTM,\n \"GRU\": snt.GRU,\n \"VRNN\": snt.VanillaRNN,\n}\n\n# pylint: disable=bad-whitespace\n_cfgs = [\n (\"LSTM\", 128, 128, 5, 20,),\n (\"LSTM\", 128, 128, 20, 50,),\n (\"LSTM\", 256, 128, 40, 100,),\n\n (\"LSTM\", 128, 128, 10, 50,),\n (\"GRU\", 128, 128, 10, 50,),\n (\"VRNN\", 128, 128, 10, 50,),\n\n (\"LSTM\", 256, 128, 20, 50,),\n (\"GRU\", 256, 128, 20, 50,),\n (\"VRNN\", 256, 128, 20, 50,),\n]\n# pylint: enable=bad-whitespace\n\n\ndef _make_associative_name(c):\n return \"Associative_%s%d_BS%d_Pairs%d_Tokens%d\" % c\n\n\ndef associative_fn(c):\n base_model_fn = sequence_to_sequence_rnn(lambda: _rnn_mod_map[c[0]](c[1]))\n return base.DatasetModelTask(\n base_model_fn,\n datasets.associative_sequence(c[2], num_pairs=c[3], num_tokens=c[4]))\n\n\nfor _cfg in _cfgs:\n registry.task_registry.register_fixed(_make_associative_name(_cfg))(\n functools.partial(associative_fn, _cfg))\n\n\n# pylint: disable=bad-whitespace\n_cfgs = [\n (\"LSTM\", 128, 128, 5, 10,),\n (\"LSTM\", 128, 128, 20, 20,),\n (\"LSTM\", 128, 128, 50, 5,),\n\n (\"LSTM\", 128, 128, 20, 10,),\n (\"GRU\", 128, 128, 20, 10,),\n (\"VRNN\", 128, 128, 20, 10,),\n\n (\"LSTM\", 256, 128, 40, 50,),\n (\"GRU\", 256, 128, 40, 50,),\n (\"VRNN\", 256, 128, 40, 50,),\n]\n# pylint: enable=bad-whitespace\n\n\ndef _make_copy_name(c):\n return \"Copy_%s%d_BS%d_Length%d_Tokens%d\" % c\n\n\ndef copy_fn(c):\n base_model_fn = sequence_to_sequence_rnn(lambda: _rnn_mod_map[c[0]](c[1]))\n return base.DatasetModelTask(\n base_model_fn,\n datasets.copy_sequence(\n c[2], sequence_length=c[3], num_separator=1, num_tokens=c[4]))\n\n\nfor _cfg in _cfgs:\n registry.task_registry.register_fixed(_make_copy_name(_cfg))(\n functools.partial(copy_fn, _cfg))\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Tests for supcon.losses.\"\"\"\nfrom absl.testing import parameterized\nimport numpy as np\nimport tensorflow.compat.v2 as tf\n\nfrom supcon import enums\nfrom supcon import losses\n\n\nclass LossesTest(tf.test.TestCase, parameterized.TestCase):\n\n def testKerasImplementation(self):\n features = np.random.uniform(0., 1., (12, 2, 20))\n labels = np.eye(12, 15, dtype=np.int32)\n loss = losses.ContrastiveLoss()(labels, features)\n self.assertEqual(loss.shape, ())\n self.assertFalse(np.isnan(loss.numpy()))\n\n def testKerasLossVsNonKerasLoss(self):\n features = np.random.uniform(0., 1., size=(12, 2, 20))\n labels = np.eye(12, 15, dtype=np.int32)\n loss_keras = losses.ContrastiveLoss()(labels, features)\n loss_direct = tf.reduce_mean(\n losses.contrastive_loss(features, labels=labels))\n self.assertFalse(np.isnan(loss_direct.numpy()))\n self.assertFalse(np.isnan(loss_keras.numpy()))\n self.assertEqual(loss_direct.numpy(), loss_keras.numpy())\n\n def testIncorrectFeaturesRank(self):\n features = np.zeros([1, 1])\n with self.assertRaisesRegex(ValueError, 'Invalid features rank'):\n losses.contrastive_loss(features)\n\n def testUnknownBatchSizeDimension(self):\n features = tf.keras.layers.Input(\n dtype=tf.float32, batch_size=None, shape=(2, 20))\n with self.assertRaisesRegex(ValueError, 'features has unknown batch_size'):\n losses.contrastive_loss(features)\n\n def testUnknownNumViewsDimension(self):\n features = tf.keras.layers.Input(\n dtype=tf.float32, batch_size=1, shape=(None, 20))\n with self.assertRaisesRegex(ValueError, 'features has unknown num_views'):\n losses.contrastive_loss(features)\n\n def testIncorrectLabelsShape(self):\n features = np.random.uniform(0., 1., size=(10, 3, 20))\n labels = np.random.randint(1, size=(5))\n with self.assertRaisesRegex(ValueError, 'Invalid labels shape'):\n losses.contrastive_loss(features, labels=labels)\n\n def testIncorrectLabelsRank(self):\n features = np.random.uniform(0., 1., size=(10, 3, 20))\n labels = np.random.randint(5, size=(4, 4))\n with self.assertRaisesRegex(ValueError, 'Invalid labels shape'):\n losses.contrastive_loss(features, labels=labels)\n\n def testUnknownContrastMode(self):\n features = np.random.uniform(size=(10, 3, 20))\n labels = np.eye(10, dtype=np.int32)\n with self.assertRaisesRegex(ValueError, 'Invalid contrast_mode'):\n losses.contrastive_loss(features, labels, contrast_mode='invalid')\n\n def testUnknownSummationLocation(self):\n features = np.random.uniform(size=(10, 3, 20))\n labels = np.eye(10, dtype=np.int32)\n with self.assertRaisesRegex(ValueError, 'Invalid summation_location'):\n losses.contrastive_loss(features, labels, summation_location='invalid')\n\n def testUnknownDenominatorMode(self):\n features = np.random.uniform(size=(10, 3, 20))\n labels = np.eye(10, dtype=np.int32)\n with self.assertRaisesRegex(ValueError, 'Invalid denominator_mode'):\n losses.contrastive_loss(features, labels, denominator_mode='invalid')\n\n def testDefaultBehaviourSameAsAllLabelsDifferent(self):\n features = np.random.uniform(size=(10, 3, 20))\n labels = np.eye(10, dtype=np.int64)\n loss = tf.reduce_mean(losses.contrastive_loss(features))\n loss_without_labels = tf.reduce_mean(\n losses.contrastive_loss(features, labels))\n self.assertFalse(np.isnan(loss.numpy()))\n self.assertFalse(np.isnan(loss_without_labels.numpy()))\n self.assertEqual(loss.numpy(), loss_without_labels.numpy())\n\n def testContrastModeOneVsAll(self):\n # shape (2, 2, 3)\n features = np.array([[[0, 0, 1], [0, 1, 0]], [[1., 0., 0.], [0., -1., 0.]]])\n loss_one = tf.reduce_mean(\n losses.contrastive_loss(\n features, contrast_mode=enums.LossContrastMode.ONE_VIEW))\n self.assertFalse(np.isnan(loss_one.numpy()))\n expected_loss = 1.098612 # np.log(3.)\n self.assertAlmostEqual(np.mean(loss_one.numpy()), expected_loss, places=6)\n loss_all = tf.reduce_mean(\n losses.contrastive_loss(\n features, contrast_mode=enums.LossContrastMode.ALL_VIEWS))\n self.assertFalse(np.isnan(loss_all.numpy()))\n self.assertNotAlmostEqual(\n np.mean(loss_all.numpy()), expected_loss, places=6)\n\n def testLossValue(self):\n sqrt2 = np.sqrt(2.)\n sqrt6 = np.sqrt(6.)\n features = np.array([[[0, 0, 1], [0, (2. * sqrt2) / 3., -1 / 3.]],\n [[sqrt6 / 3., -sqrt2 / 3., -1. / 3],\n [-sqrt6 / 3., -sqrt2 / 3., -1. / 3]]])\n loss = losses.contrastive_loss(features)\n self.assertFalse(np.isnan(loss.numpy()).any())\n expected_loss = 1.098612 # np.log(3.)\n self.assertAlmostEqual(np.mean(loss.numpy()), expected_loss, places=6)\n\n def testLossValueWithLabels(self):\n sqrt2 = np.sqrt(2.)\n sqrt6 = np.sqrt(6.)\n features = np.array([[[0, 0, 1], [0, (2. * sqrt2) / 3., -1 / 3.]],\n [[sqrt6 / 3., -sqrt2 / 3., -1. / 3],\n [-sqrt6 / 3., -sqrt2 / 3., -1. / 3]]])\n labels = np.eye(2, dtype=np.int32)\n loss = losses.contrastive_loss(features, labels=labels)\n self.assertFalse(np.isnan(loss.numpy()).any())\n expected_loss = 1.098612 # np.log(3.)\n self.assertAlmostEqual(np.mean(loss.numpy()), expected_loss, places=6)\n\n def testLossValueWithLabelsAndPositives(self):\n features = np.array([[[0, 0, 1], [0, 0, 1]], [[0, 1, 0], [0, 1, 0]],\n [[1, 0, 0], [1, 0, 0]]])\n labels = np.eye(3, dtype=np.int32)\n # Make the label of sample 1 and 2 the same (= label 0)\n labels[1] = labels[0]\n loss = losses.contrastive_loss(features, labels).numpy()\n self.assertFalse(np.isnan(loss).any())\n expected_loss = [\n 1.57149910, # (3. * np.log(np.e + 4) - 1) / 3.\n 1.57149910, # (3. * np.log(np.e + 4) - 1) / 3.\n 0.90483244, # np.log(np.e + 4) - 1\n ]\n self.assertAlmostEqual(loss[0], expected_loss[0], places=6)\n self.assertAlmostEqual(loss[1], expected_loss[1], places=6)\n self.assertAlmostEqual(loss[2], expected_loss[2], places=6)\n\n def testLossValueWithTemp(self):\n sqrt2 = np.sqrt(2.)\n sqrt6 = np.sqrt(6.)\n features = np.array([[[0, 0, 1], [0, (2. * sqrt2) / 3., -1 / 3.]],\n [[sqrt6 / 3., -sqrt2 / 3., -1. / 3],\n [-sqrt6 / 3., -sqrt2 / 3., -1. / 3]]])\n loss = losses.contrastive_loss(features, temperature=0.1)\n self.assertFalse(np.isnan(loss.numpy()).any())\n expected_loss = 0.1098612 # 0.1 * np.log(3.)\n self.assertAlmostEqual(np.mean(loss.numpy()), expected_loss, places=5)\n\n def testLossValueWithTempNoScaleByTemp(self):\n sqrt2 = np.sqrt(2.)\n sqrt6 = np.sqrt(6.)\n features = np.array([[[0, 0, 1], [0, (2. * sqrt2) / 3., -1 / 3.]],\n [[sqrt6 / 3., -sqrt2 / 3., -1. / 3],\n [-sqrt6 / 3., -sqrt2 / 3., -1. / 3]]])\n loss = losses.contrastive_loss(\n features, temperature=0.1, scale_by_temperature=False)\n self.assertFalse(np.isnan(loss.numpy()).any())\n expected_loss = 1.098612 # np.log(3.)\n self.assertAlmostEqual(np.mean(loss.numpy()), expected_loss, places=5)\n\n @parameterized.named_parameters(('1x1 features', (10, 3, 1, 1, 64)),\n ('3x3 features', (10, 3, 3, 3, 8)),\n ('16x16 features', (10, 3, 16, 16, 4)),\n ('rank-3 features', (10, 3, 16, 8)))\n def testConvFeatures(self, features_shape):\n features_shape = tf.TensorShape(features_shape)\n features = tf.random.uniform(shape=features_shape)\n # Normalize embeddings to ensure the Loss does not return NaN values\n # for large feature sizes.\n normalization_axes = list(range(2, features_shape.rank))\n normalized_features = tf.nn.l2_normalize(features, axis=normalization_axes)\n loss = tf.reduce_mean(losses.contrastive_loss(normalized_features))\n self.assertFalse(np.isnan(loss.numpy()))\n\n @parameterized.named_parameters(\n # The following values have all been manually checked to be the correct\n # outputs given the inputs in the test.\n ('out_and_all', enums.LossSummationLocation.OUTSIDE,\n enums.LossDenominatorMode.ALL, -1, [1.6755852, 1.7973773, 1.58471]),\n ('out_and_one', enums.LossSummationLocation.OUTSIDE,\n enums.LossDenominatorMode.ONE_POSITIVE, -1,\n [1.1782627, 1.3435497, 1.58471]),\n ('out_and_none', enums.LossSummationLocation.OUTSIDE,\n enums.LossDenominatorMode.ONLY_NEGATIVES, -1,\n [0.79652834, 1.0154991, 1.3529458]),\n ('out_and_large_cap', enums.LossSummationLocation.OUTSIDE,\n enums.LossDenominatorMode.ALL, 4, [1.6755852, 1.7973773, 1.58471]),\n ('out_and_small_cap', enums.LossSummationLocation.OUTSIDE,\n enums.LossDenominatorMode.ALL, 2, [1.1385298, 1.1824025, 1.1557417],\n (0, 0, 0)),\n ('out_and_zero_cap', enums.LossSummationLocation.OUTSIDE,\n enums.LossDenominatorMode.ALL, 0, [1.3769293, 1.0785717, 1.58471]),\n ('in_and_all', enums.LossSummationLocation.INSIDE,\n enums.LossDenominatorMode.ALL, -1, [1.6356678, 1.7135872, 1.58471]),\n ('in_and_one', enums.LossSummationLocation.INSIDE,\n enums.LossDenominatorMode.ONE_POSITIVE, -1,\n [1.1571673, 1.2941568, 1.58471]),\n ('in_and_none', enums.LossSummationLocation.INSIDE,\n enums.LossDenominatorMode.ONLY_NEGATIVES, -1,\n [0.756611, 0.93170905, 1.3529458]),\n ('in_and_large_cap', enums.LossSummationLocation.INSIDE,\n enums.LossDenominatorMode.ALL, 4, [1.6356678, 1.7135872, 1.58471]),\n ('in_and_small_cap', enums.LossSummationLocation.INSIDE,\n enums.LossDenominatorMode.ALL, 2, [1.0986123, 1.0986123, 1.0986123],\n (0, 0, 0)),\n ('in_and_zero_cap', enums.LossSummationLocation.INSIDE,\n enums.LossDenominatorMode.ALL, 0, [1.3769293, 1.0785717, 1.58471]))\n def testLossForSummationLocationsAndDenominatorModes(self,\n summation_location,\n denominator_mode,\n positives_cap,\n expected_loss,\n labels=(0, 0, 1)):\n features = np.array([\n [[0.01, 0.02, 0.14], [0.38, 0.61, 0.50]],\n [[0.86, 0.97, 0.33], [0.26, 0.68, 0.45]],\n [[0.32, 0.64, 0.28], [0.45, 0.74, 0.73]],\n ])\n labels = tf.one_hot(labels, 2)\n loss = losses.contrastive_loss(\n features,\n labels=labels,\n summation_location=summation_location,\n denominator_mode=denominator_mode,\n positives_cap=positives_cap)\n self.assertTupleEqual(loss.numpy().shape, (len(expected_loss),))\n for index, (val1, val2) in enumerate(zip(loss.numpy(), expected_loss)):\n self.assertAlmostEqual(\n val1,\n val2,\n places=5,\n msg=f'Lists not almost equal at index {index}: '\n '{loss.numpy()} != {expected_loss}')\n\n def testLossForOneView(self):\n features = np.array([\n [[0.01, 0.02, 0.14]],\n [[0.86, 0.97, 0.33]],\n [[0.32, 0.64, 0.28]],\n ])\n labels = np.array([[0, 1, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]],\n dtype=np.int32)\n loss = losses.contrastive_loss(features, labels=labels, temperature=1.0)\n\n pos0 = np.exp(np.dot(features[0, 0, :], features[1, 0, :]))\n neg0 = np.exp(np.dot(features[0, 0, :], features[2, 0, :]))\n loss0 = -np.log(pos0 / (pos0 + neg0))\n pos1 = np.exp(np.dot(features[1, 0, :], features[0, 0, :]))\n neg1 = np.exp(np.dot(features[1, 0, :], features[2, 0, :]))\n loss1 = -np.log(pos1 / (pos1 + neg1))\n expected_loss = np.array([loss0, loss1, 0.0])\n\n self.assertTupleEqual(loss.numpy().shape, expected_loss.shape)\n for index, (val1, val2) in enumerate(zip(loss.numpy(), expected_loss)):\n self.assertAlmostEqual(\n val1,\n val2,\n places=5,\n msg=f'Lists not almost equal at index {index}: '\n f'{loss.numpy()} != {expected_loss}')\n\n def testLossOnTPU(self):\n # Calling tpu.replicate in Eager mode doesn't work. Wrapping in a graph\n # implicitly disables Eager mode within its scope.\n with tf.Graph().as_default():\n features = tf.constant([\n [[0.01, 0.02, 0.14], [0.38, 0.61, 0.50]],\n [[0.86, 0.97, 0.33], [0.26, 0.68, 0.45]],\n [[0.32, 0.64, 0.28], [0.45, 0.74, 0.73]],\n [[0.45, 0.62, 0.07], [0.13, 0.28, 0.91]],\n ])\n labels = tf.one_hot((0, 0, 1, 1), 2)\n\n tpu_result = tf.compat.v1.tpu.replicate(\n losses.contrastive_loss,\n [[features[:2], labels[:2]], [features[2:], labels[2:]]])\n # tpu_result should be a list of 2 lists, each containing a single float\n # Tensor with shape [2].\n self.assertLen(tpu_result, 2)\n self.assertLen(tpu_result[0], 1)\n self.assertLen(tpu_result[1], 1)\n self.assertEqual([2], tpu_result[0][0].shape.as_list())\n self.assertEqual([2], tpu_result[1][0].shape.as_list())\n tpu_loss = tf.reshape(tpu_result, [4])\n\n cpu_loss = losses.contrastive_loss(features, labels=labels)\n\n cpu_partial_loss_1 = losses.contrastive_loss(\n features[:2], labels=labels[:2])\n cpu_partial_loss_2 = losses.contrastive_loss(\n features[2:], labels=labels[2:])\n cpu_partial_loss = tf.concat([cpu_partial_loss_1, cpu_partial_loss_2],\n axis=0)\n\n with self.cached_session() as sess:\n sess.run(tf.compat.v1.tpu.initialize_system())\n\n tpu_loss, cpu_loss, cpu_partial_loss = sess.run(\n (tpu_loss, cpu_loss, cpu_partial_loss))\n print(tpu_loss)\n print(cpu_loss)\n # Numerical precision isn't so high on TPU.\n self.assertAllClose(tpu_loss, cpu_loss, atol=1e-2)\n # Verify that the TPU computation is different than independently\n # computing the two \"local batches\" on CPU, because of the internal\n # cross_replica_concat.\n self.assertNotAllClose(tpu_loss, cpu_partial_loss, atol=1e-2)\n\n\nif __name__ == '__main__':\n tf.test.main()\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Models for distillation.\n\n\"\"\"\nimport os\nfrom typing import Tuple\n\nfrom absl import logging\nimport tensorflow as tf\nimport tensorflow_hub as hub\nfrom non_semantic_speech_benchmark.data_prep import augmentation\nfrom non_semantic_speech_benchmark.distillation import frontend_lib\n\n\n\ndef _map_mobilenet_func(mnet_size):\n return {\n 'small': tf.keras.applications.MobileNetV3Small,\n 'large': tf.keras.applications.MobileNetV3Large,\n 'debug': _debug_net,\n }[mnet_size.lower()]\n\n\ndef _debug_net(pooling, *args, **kwargs):\n \"\"\"Small net for debugging.\"\"\"\n del args, kwargs\n final_shape = [-1, 1] if pooling else [-1, 1, 1, 1]\n layers = [\n tf.keras.layers.Lambda(lambda x: tf.reshape( # pylint: disable=g-long-lambda\n tf.reduce_mean(x, axis=[1, 2, 3]), final_shape)),\n ]\n return tf.keras.Sequential(layers)\n\n\ndef get_keras_model(model_type,\n output_dimension,\n truncate_output = False,\n frontend = True,\n tflite = False,\n spec_augment = False):\n \"\"\"Make a Keras student model.\"\"\"\n # For debugging, log hyperparameter values.\n logging.info('model name: %s', model_type)\n logging.info('truncate_output: %s', truncate_output)\n logging.info('output_dimension: %i', output_dimension)\n logging.info('frontend: %s', frontend)\n logging.info('tflite: %s', tflite)\n logging.info('spec_augment: %s', spec_augment)\n\n output_dict = {} # Dictionary of model outputs.\n\n # Construct model input and frontend.\n model_in, feats = frontend_keras(frontend, tflite)\n feats.shape.assert_is_compatible_with([None, None, None, 1])\n spec_augment_fn = augmentation.SpecAugment() if spec_augment else tf.identity\n feats = spec_augment_fn(feats)\n\n # Build network.\n logging.info('Features shape: %s', feats.shape)\n model_out = build_main_net(model_type, feats)\n logging.info('Model output shape: %s', model_out.shape)\n\n # The last fully-connected layer can sometimes be the single largest\n # layer in the entire network. It's also not always very valuable. We try\n # two methods of getting the right output dimension:\n # 1) A FC layer\n # 2) Taking the first `output_dimension` elements.\n need_final_layer = (output_dimension and\n model_out.shape[1] != output_dimension)\n\n # If we need to truncate, do it before we save the embedding. Otherwise,\n # the embedding will contain some garbage dimensions.\n if need_final_layer and truncate_output:\n if model_out.shape[1] < output_dimension:\n embeddings = tf.pad(\n model_out, [[0, 0], [0, output_dimension - model_out.shape[1]]])\n else:\n embeddings = model_out[:, :output_dimension]\n else:\n embeddings = model_out\n\n # Construct optional final layer, and create output dictionary.\n output_dict['embedding'] = embeddings\n\n target = embeddings\n if need_final_layer and not truncate_output:\n target = tf.keras.layers.Dense(\n output_dimension, name='embedding_to_target')(target)\n output_dict['embedding_to_target'] = target\n output_model = tf.keras.Model(inputs=[model_in], outputs=output_dict)\n\n return output_model\n\n\ndef frontend_keras(frontend, tflite):\n \"\"\"Returns model input and features.\"\"\"\n # TFLite use-cases usually use non-batched inference, and this also enables\n # hardware acceleration.\n num_batches = 1 if tflite else None\n frontend_args = frontend_lib.frontend_args_from_flags()\n feats_inner_dim = frontend_lib.get_frontend_output_shape()[0]\n if frontend:\n logging.info('frontend_args: %s', frontend_args)\n model_in = tf.keras.Input((None,),\n name='audio_samples',\n batch_size=num_batches)\n bs = tf.shape(model_in)[0]\n feats = frontend_lib.SamplesToFeats(tflite, frontend_args)(model_in)\n feats.shape.assert_is_compatible_with(\n [num_batches, feats_inner_dim, frontend_args['frame_width'],\n frontend_args['num_mel_bins']])\n feats = tf.reshape(\n feats, [bs, -1, frontend_args['num_mel_bins'], 1])\n else:\n model_in = tf.keras.Input(\n (feats_inner_dim * frontend_args['frame_width'],\n frontend_args['num_mel_bins'], 1),\n batch_size=num_batches,\n name='log_mel_spectrogram')\n feats = model_in\n\n # `model_in` can be wavs or spectral features, but `feats` must be a 4D\n # spectrogram.\n feats.shape.assert_is_compatible_with(\n [None, feats_inner_dim * frontend_args['frame_width'],\n frontend_args['num_mel_bins'], 1])\n\n return (model_in, feats)\n\n\ndef build_main_net(\n model_type,\n feats,\n):\n \"\"\"Constructs main network.\"\"\"\n if model_type.startswith('mobilenet_'):\n # Format is \"mobilenet_{size}_{alpha}_{avg_pool}\"\n _, mobilenet_size, alpha, avg_pool = model_type.split('_')\n alpha = float(alpha)\n avg_pool = bool(avg_pool)\n logging.info('mobilenet_size: %s', mobilenet_size)\n logging.info('alpha: %f', alpha)\n logging.info('avg_pool: %s', avg_pool)\n model = _map_mobilenet_func(mobilenet_size)(\n input_shape=feats.shape[1:],\n alpha=alpha,\n minimalistic=False,\n include_top=False,\n weights=None,\n pooling='avg' if avg_pool else None,\n dropout_rate=0.0)\n expected_output_shape = [None, None] if avg_pool else [None, 1, 1, None]\n elif model_type.startswith('efficientnet'):\n # pylint:disable=line-too-long\n model_fn, final_dim = {\n 'efficientnetb0': (tf.keras.applications.EfficientNetB0, 1280),\n 'efficientnetb1': (tf.keras.applications.EfficientNetB1, 1280),\n 'efficientnetb2': (tf.keras.applications.EfficientNetB2, 1408),\n 'efficientnetb3': (tf.keras.applications.EfficientNetB3, 1536),\n 'efficientnetb4': (tf.keras.applications.EfficientNetB4, 1792),\n 'efficientnetb5': (tf.keras.applications.EfficientNetB5, 2048),\n 'efficientnetb6': (tf.keras.applications.EfficientNetB6, 2304),\n 'efficientnetb7': (tf.keras.applications.EfficientNetB7, 2560),\n # V2\n 'efficientnetv2b0': (tf.keras.applications.efficientnet_v2.EfficientNetV2B0, 1280),\n 'efficientnetv2b1': (tf.keras.applications.efficientnet_v2.EfficientNetV2B1, 1280),\n 'efficientnetv2b2': (tf.keras.applications.efficientnet_v2.EfficientNetV2B2, 1408),\n 'efficientnetv2b3': (tf.keras.applications.efficientnet_v2.EfficientNetV2B3, 1536),\n 'efficientnetv2bL': (tf.keras.applications.efficientnet_v2.EfficientNetV2L, 1280),\n 'efficientnetv2bM': (tf.keras.applications.efficientnet_v2.EfficientNetV2M, 1280),\n 'efficientnetv2bS': (tf.keras.applications.efficientnet_v2.EfficientNetV2S, 1280),\n }[model_type]\n # pylint:enable=line-too-long\n model = model_fn(\n include_top=False,\n weights=None, # could be pretrained from imagenet.\n input_shape=feats.shape[1:],\n pooling='avg',\n )\n expected_output_shape = [None, final_dim]\n else:\n raise ValueError(f'`model_type` not recognized: {model_type}')\n\n # TODO(joelshor): Consider checking that there are trainable weights in\n # `model`.\n model_out = model(feats)\n model_out.shape.assert_is_compatible_with(expected_output_shape)\n\n return model_out\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Functions to transform raw embeddings into more meaningful representations.\n\"\"\"\n\nimport numpy as np\nimport pandas as pd\nfrom sklearn import decomposition\nimport tensorflow.compat.v1 as tf\n\nfrom correct_batch_effects_wdn import metadata\n\n\ndef keep_rows_by_multi_index(df, multi_index_name, multi_index_value):\n \"\"\"Keeps rows with given multi-index values from a DataFrame.\n\n Args:\n df: DataFrame with samples as rows and multi-indices.\n multi_index_name: String, the name of the multi-index.\n multi_index_value: String or a list of strings, the values of the\n multi-index.\n\n Returns:\n A subset of the DataFrame with only the specified rows.\n\n Raises:\n ValueError: Multi-index to drop is not contained in the DataFrame.\n \"\"\"\n if multi_index_name not in df.index.names:\n raise ValueError(\n 'Multi-index %s is not contained in the DataFrame.' % multi_index_name)\n if not isinstance(multi_index_value, list):\n multi_index_value = [multi_index_value]\n return df[df.index.get_level_values(\n level=multi_index_name).isin(multi_index_value)]\n\n\ndef get_negative_controls(df):\n \"\"\"Get negative control samples from a data frame of samples.\n\n Args:\n df: DataFrame with samples as rows and metadata.TREATMENT_GROUP as an index.\n\n Returns:\n The input DataFrame filtered to negative control samples.\n \"\"\"\n treatment_name = metadata.TREATMENT_GROUP\n return keep_rows_by_multi_index(df, treatment_name, metadata.NEGATIVE_CONTROL)\n\n\ndef eig_symmetric(m):\n \"\"\"Get the eigenvalues and eigenvectors for a real, symmetric matrix.\n\n Uses linalg.eigh, which is optimized for symmetric matrices. Eigenvalues\n are then sorted in descending order.\n\n Args:\n m: a real, symmetric matrix\n\n Returns:\n A tuple consisting of a vector of eigenvalues in descending order and\n a matrix of eigenvectors (eigenvectors are columns).\n \"\"\"\n lambda_m, q_m = np.linalg.eigh(m)\n idx = lambda_m.argsort()[::-1] # sort by eigenvalues, descending\n return lambda_m[idx], q_m[:, idx]\n\n\ndef factor_analysis(embedding_dataframe, fraction, n_components):\n \"\"\"Projects the embeddings by factor analysis using negative controls.\n\n It would be interesting to explore factor analysis because it is a natural way\n to extract important latent features from the data, and PCA is actually a\n special case of factor analysis. When the variances of the error term in\n factor analysis are identical and go towards zero, the posterior estimate of\n the latent variables becomes exactly PCA.\n\n TVN is essentially PCA without dimension reduction. Compared with TVN, the\n drawback of factor analysis is that it requires to specify the number of\n latent variables. As an ad-hoc approach, I would suggest specifying it as the\n number of unique treatments.\n\n Args:\n embedding_dataframe: Pandas dataframe of the embeddings with each row as a\n sample.\n fraction: Fraction of negative control samples used to estimate parameters\n in factor analysis.\n n_components: Number of latent variables. If -1, specify n_components as\n the number of unique treatments.\n\n Returns:\n A Pandas dataframe with a reduced number of dimensions.\n \"\"\"\n # specify the number of latent variables as the number of unique treatments,\n # excluding the negative control\n if n_components == -1:\n n_components = embedding_dataframe.reset_index()[[\n metadata.COMPOUND, metadata.CONCENTRATION\n ]].drop_duplicates().shape[0] - 1\n factor_analysis_object = decomposition.FactorAnalysis(\n n_components=n_components)\n factor_analysis_object.fit(\n get_negative_controls(embedding_dataframe).sample(frac=fraction,\n axis=0).values)\n return pd.DataFrame(\n data=factor_analysis_object.transform(embedding_dataframe.values),\n index=embedding_dataframe.index)\n\n\ndef get_bootstrap_sample(df):\n \"\"\"Get a bootstrap sample of a DataFrame.\n\n To make different bootstrap samples comparable, we assume that the\n experimental condition, i.e., the layout of batches, plates and\n wells, stays the same. Otherwise the bootstrap samples would vary\n considerably. Thus, the bootstrapping is conducted for the embedding vectors\n within each well. Without making any distributional assumption, nonparametric\n bootstrapping is performed.\n\n Args:\n df: DataFrame with samples as rows. Should have a MultiIndex with levels\n batch, plate and well\n\n Returns:\n A bootstrap sample of the DataFrame.\n \"\"\"\n df_list = []\n levels = [metadata.BATCH, metadata.PLATE, metadata.WELL]\n if metadata.TIMEPOINT in df.index.names:\n levels.append(metadata.TIMEPOINT)\n if metadata.SEQUENCE in df.index.names:\n levels.append(metadata.SEQUENCE)\n for _, df_well in df.groupby(level=levels):\n index = np.random.choice(df_well.shape[0], size=df_well.shape[0])\n df_list.append(df_well.iloc[index, :])\n return pd.concat(df_list)\n\n\ndef normalize_df_by_coral(df,\n center,\n center_cov,\n level_to_normalize=metadata.BATCH,\n lambda_reg=1.0):\n \"\"\"Remove batch effects by variants of CORAL method.\n\n This function is adapted from @geoffd's colab:\n https://drive.google.com/open?id=0B-umIy5UPrYIVWJYYnZaMzNRNGc.\n\n The main idea of CORAL method is (1) finding an affine transformation on each\n batch (or plate) such that after transformation the covariances of the\n negative control on each batch (or plate) are the same as the overall\n covariance of the negative control before transformation; (2) applying each\n learned transformation to all compounds on each batch (or plate).\n\n Args:\n df: Pandas dataframe with complete multi-index of metadata and embeddings\n center: A boolean giving whether the embedding vectors are centered by the\n mean of those of the negative control on each batch (or plate)\n center_cov: A boolean giving whether the covariance is centered\n level_to_normalize: (optional) A string giving the level on which CORAL\n method is applied\n lambda_reg: the weight of the regularization term (i.e., an identity matrix)\n added to the estimated covariance on each batch (or plate)\n\n Returns:\n A Pandas dataframe transformed by CORAL method, which has the same\n multi-index as df.\n \"\"\"\n normalized = []\n for _, df_level in df.groupby(level=level_to_normalize):\n df_control = df_level.xs(\n metadata.NEGATIVE_CONTROL,\n level=metadata.TREATMENT_GROUP,\n drop_level=False)\n control_mean = np.mean(df_control, axis=0)\n if center_cov:\n control_cov = np.cov(df_control, rowvar=False)\n else:\n control_cov = (df_control.T.dot(df_control)) / df_control.shape[0]\n # regularize\n control_cov += lambda_reg * np.identity(control_cov.shape[1])\n lambda_cov, q_cov = eig_symmetric(control_cov)\n if center:\n df_level -= control_mean\n normalized_df = (df_level.dot(q_cov) / np.sqrt(lambda_cov)).dot(q_cov.T)\n normalized.append(normalized_df)\n return pd.concat(normalized)\n\n\ndef coral_without_mean_shift_batch(df):\n \"\"\"Apply a CORAL normalization without mean shift on batch level.\"\"\"\n return normalize_df_by_coral(df, center=False, center_cov=True)\n\n\ndef transform_df(df, rotate_mat, shift_vec):\n \"\"\"Transform a DataFrame by rotating and shifting.\n\n Denote each row of df by x. Mathematically, it transforms x to\n x * rotate_mat^T + shift_vec^T. The transpose is due to that x is a row\n vector.\n\n Args:\n df: DataFrame with samples as rows\n rotate_mat: 2-D NumPy array of size p-by-p\n shift_vec: 2-D NumPy array of size p-by-1\n\n Returns:\n A transformed DataFrame.\n \"\"\"\n return df.dot(rotate_mat.T) + np.squeeze(shift_vec)\n\n\ndef sum_of_square(a):\n \"\"\"Sum of squared elements of Tensor a.\"\"\"\n return tf.reduce_sum(tf.square(a))\n\n\ndef drop_unevaluated_comp(df):\n \"\"\"Drop unevaluated compounds from a dataframe.\"\"\"\n df = df[df.index.get_level_values(\n level=metadata.TREATMENT_GROUP) != metadata.NEGATIVE_CONTROL]\n df = df[df.index.get_level_values(level=metadata.MOA) != metadata.UNKNOWN]\n return df\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Trains and evaluates motion blur neural network.\n\nLearning to Synthesize Motion Blur\nhttp://timothybrooks.com/tech/motion-blur\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom absl import flags\nimport tensorflow.compat.v1 as tf\nfrom tensorflow.compat.v1 import estimator as tf_estimator\n\nfrom motion_blur.train import dataset\nfrom motion_blur.train import estimator\nfrom motion_blur.train import network\nfrom tensorflow.contrib import training as contrib_training\n\nFLAGS = flags.FLAGS\n\nflags.DEFINE_string(\n 'model_dir',\n None,\n 'Location at which to save model logs and checkpoints.')\n\nflags.DEFINE_string(\n 'train_pattern',\n None,\n 'Pattern for directory containing source JPG images for training.')\n\nflags.DEFINE_string(\n 'test_pattern',\n None,\n 'Pattern for directory containing source JPG images for testing.')\n\nflags.DEFINE_integer(\n 'image_size',\n 256,\n 'Width and height to crop training and testing frames. '\n 'Must be a multiple of 16',\n lower_bound=16)\n\nflags.DEFINE_integer(\n 'batch_size',\n 16,\n 'Training batch size.',\n lower_bound=1)\n\nflags.DEFINE_float(\n 'learning_rate',\n 2e-5,\n 'Learning rate for Adam optimization.',\n lower_bound=0.0)\n\nflags.register_validator(\n 'image_size',\n lambda image_size: image_size % 16 == 0,\n message='\\'image_size\\' must multiple of 16.')\n\nflags.mark_flag_as_required('model_dir')\nflags.mark_flag_as_required('train_pattern')\nflags.mark_flag_as_required('test_pattern')\n\n\ndef main(_):\n inference_fn = network.inference\n hparams = contrib_training.HParams(learning_rate=FLAGS.learning_rate)\n model_fn = estimator.create_model_fn(inference_fn, hparams)\n config = tf_estimator.RunConfig(FLAGS.model_dir)\n tf_estimator = tf_estimator.Estimator(model_fn=model_fn, config=config)\n\n train_dataset_fn = dataset.create_dataset_fn(\n FLAGS.train_pattern,\n height=FLAGS.image_size,\n width=FLAGS.image_size,\n batch_size=FLAGS.batch_size)\n\n eval_dataset_fn = dataset.create_dataset_fn(\n FLAGS.test_pattern,\n height=FLAGS.image_size,\n width=FLAGS.image_size,\n batch_size=FLAGS.batch_size)\n\n train_spec, eval_spec = estimator.create_train_and_eval_specs(\n train_dataset_fn, eval_dataset_fn)\n\n tf.logging.set_verbosity(tf.logging.INFO)\n tf_estimator.train_and_evaluate(tf_estimator, train_spec, eval_spec)\n\n\nif __name__ == '__main__':\n tf.app.run(main)\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"MLP VAE on images task family.\"\"\"\n\nimport numpy as np\nimport sonnet as snt\n\nfrom task_set import registry\nfrom task_set.tasks import base\nfrom task_set.tasks import generative_utils\nfrom task_set.tasks import utils\nimport tensorflow.compat.v1 as tf\n\n\[email protected]_registry.register_sampler(\"mlp_vae_family\")\ndef sample_mlp_vae_family_cfg(seed):\n \"\"\"Samples a task config for a MLP VAE model on image datasets.\n\n These configs are nested python structures that provide enough information\n to create an instance of the problem.\n\n Args:\n seed: int Random seed to generate task from.\n\n Returns:\n A nested dictionary containing a configuration.\n \"\"\"\n rng = np.random.RandomState(seed)\n cfg = {}\n enc_n_layers = rng.choice([1, 2, 3, 4])\n cfg[\"enc_hidden_units\"] = [\n utils.sample_log_int(rng, 32, 128) for _ in range(enc_n_layers)\n ]\n\n dec_n_layers = rng.choice([1, 2, 3])\n cfg[\"dec_hidden_units\"] = [\n utils.sample_log_int(rng, 32, 128) for _ in range(dec_n_layers)\n ]\n\n cfg[\"activation\"] = utils.sample_activation(rng)\n cfg[\"w_init\"] = utils.sample_initializer(rng)\n cfg[\"dataset\"] = utils.sample_image_dataset(rng)\n return cfg\n\n\[email protected]_registry.register_getter(\"mlp_vae_family\")\ndef get_mlp_vae_family(cfg):\n \"\"\"Gets a task for the given cfg.\n\n Args:\n cfg: config specifying the model generated by `sample_mlp_vae_family_cfg`.\n\n Returns:\n base.BaseTask for the given config.\n \"\"\"\n act_fn = utils.get_activation(cfg[\"activation\"])\n w_init = utils.get_initializer(cfg[\"w_init\"])\n init = {\"w\": w_init}\n\n datasets = utils.get_image_dataset(cfg[\"dataset\"])\n\n def _build(batch):\n \"\"\"Build the sonnet module.\"\"\"\n flat_img = snt.BatchFlatten()(batch[\"image\"])\n latent_size = cfg[\"enc_hidden_units\"][-1]\n\n def encoder_fn(net):\n hidden_units = cfg[\"enc_hidden_units\"][:-1] + [latent_size * 2]\n mod = snt.nets.MLP(hidden_units, activation=act_fn, initializers=init)\n outputs = mod(net)\n return generative_utils.LogStddevNormal(outputs)\n\n encoder = snt.Module(encoder_fn, name=\"encoder\")\n\n def decoder_fn(net):\n hidden_units = cfg[\"dec_hidden_units\"] + [flat_img.shape.as_list()[1] * 2]\n mod = snt.nets.MLP(hidden_units, activation=act_fn, initializers=init)\n net = mod(net)\n net = tf.clip_by_value(net, -10, 10)\n return generative_utils.QuantizedNormal(mu_log_sigma=net)\n\n decoder = snt.Module(decoder_fn, name=\"decoder\")\n zshape = tf.stack([tf.shape(flat_img)[0], 2 * latent_size])\n prior = generative_utils.LogStddevNormal(tf.zeros(shape=zshape))\n\n log_p_x, kl_term = generative_utils.log_prob_elbo_components(\n encoder, decoder, prior, flat_img)\n elbo = log_p_x - kl_term\n\n metrics = {\n \"kl_term\": tf.reduce_mean(kl_term),\n \"log_kl_term\": tf.log(tf.reduce_mean(kl_term)),\n \"log_p_x\": tf.reduce_mean(log_p_x),\n \"elbo\": tf.reduce_mean(elbo),\n \"log_neg_log_p_x\": tf.log(-tf.reduce_mean(elbo))\n }\n\n return base.LossAndAux(-tf.reduce_mean(elbo), metrics)\n\n return base.DatasetModelTask(lambda: snt.Module(_build), datasets)\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Learned optimizer search lists in pytorch!\"\"\"\nfrom typing import Callable\nfrom . import common\nimport numpy as np\nimport torch\n\nfrom torch.optim.optimizer import Optimizer\n\n\ndef get_cosine_learning_rate_fn(\n training_steps, learning_rate, min_learning_rate_mult,\n constant_fraction, warmup_fraction):\n \"\"\"Get a function that does cosine learning rate decay with warmup.\n\n The learning rate starts at zero, is \"warmed up\" linearly over\n `warmup_fraction * training_steps` iterations to achieve a final value of\n `learning_rate`. A constant learning rate of `learning_rate` is held up until\n `training_steps*constant_fraction` at which point a cosine decay is started\n to a final learning rate of `min_learning_rate_mult * learning_rate`.\n\n The cosine decay sets the learning rate using a monotomically decreasing\n section of the cosine function from 0 to pi/2. It has been proven to be useful\n in large large language modeling (gpt, megatron-lm) and image classification.\n See https://arxiv.org/abs/1608.03983 for more information on the cosine decay.\n\n\n Args:\n training_steps: number of training steps the schedule should be run for.\n learning_rate: base learning rate. This is the learning rate used just after\n warmup and where the decay starts from.\n min_learning_rate_mult: a multiplicative factor to control how low the\n learning rate should be decayed to.\n constant_fraction: the fraction of training steps number of steps to take\n before starting the decay. This includes the time spent warming up the\n learning rate.\n warmup_fraction: the fraction of training steps to use for a learning rate\n warmup.\n\n Returns:\n A function that takes as input a training iteration and returns the learning\n rate from the specified schedule.\n \"\"\"\n\n def ff(x):\n return torch.tensor(x, dtype=torch.float32)\n\n def fn(global_step):\n \"\"\"Returns a learning rate given the current training iteration.\"\"\"\n\n float_training_steps = ff(training_steps)\n global_step = ff(global_step)\n\n # ensure we don't train longer than training steps\n global_step = torch.min(global_step, float_training_steps)\n\n constant_steps = float_training_steps * constant_fraction\n x = torch.max(ff(global_step), ff(constant_steps))\n\n min_learning_rate = min_learning_rate_mult * learning_rate\n\n if warmup_fraction:\n min_warmup_fraction = max(warmup_fraction, constant_fraction)\n warmup_steps = float_training_steps * min_warmup_fraction\n is_warmup = ff(ff(warmup_steps) > ff(global_step))\n warmup_lr = (global_step / warmup_steps) * learning_rate\n else:\n warmup_lr = learning_rate\n is_warmup = 0.0\n\n step = x - constant_steps\n\n constant_and_decay = (learning_rate - min_learning_rate) * (\n torch.cos(step * np.pi /\n (float_training_steps - constant_steps)) / 2.0 +\n 0.5) + min_learning_rate\n\n new_learning_rate = constant_and_decay * (1.0 - is_warmup) + is_warmup * (\n warmup_lr)\n return new_learning_rate\n\n return fn\n\n\nclass NadamWCosineDecay(Optimizer):\n \"\"\"Optimizer that implements Nadam / Adam / AdamW / NadamW type optimizers.\n\n This implements the default TF Optimizer API.\n \"\"\"\n\n def __init__(\n self,\n params,\n learning_rate=1e-3,\n beta1=0.9,\n beta2=0.999,\n epsilon=1e-8,\n adamw_weight_decay=0.0,\n l2_weight_decay=0.0,\n use_bias_correction=True,\n use_nesterov=False,\n constant_fraction=1.0,\n warmup_fraction=0.0,\n min_learning_rate_mult=1.0,\n training_steps=10000,\n ):\n \"\"\"Construct a new Nadam / Adam / AdamW / NadamW optimizer.\n\n Args:\n params: Model parameters.\n learning_rate: A Tensor or a floating point value. The base learning rate.\n beta1: A float value or a constant float tensor. The exponential decay\n rate for the 1st moment estimates.\n beta2: A float value or a constant float tensor. The exponential decay\n rate for the 2nd moment estimates.\n epsilon: A small constant for numerical stability. This epsilon is\n \"epsilon hat\" in the Kingma and Ba paper (in the formula just before\n Section 2.1), not the epsilon in Algorithm 1 of the paper.\n adamw_weight_decay: A floating point value. Weight decay similar to that\n in AdamW.\n l2_weight_decay: A floating point value. Weight decay similar to that of\n adding L2 loss.\n use_bias_correction: A boolean for whether or not to use bias correction.\n use_nesterov: A boolean for whether or not to use the NAdam algorithm.\n constant_fraction: the fraction of training steps number of steps to take\n before starting the decay. This includes the time spent warming up the\n warmup_fraction: the fraction of training steps to use for a learning rate\n warmup.\n min_learning_rate_mult: a multiplicative factor to control how low the\n learning rate should be decayed to. learning rate.\n training_steps: number of training steps the schedule should be run for.\n \"\"\"\n defaults = dict(\n lr=learning_rate,\n beta1=beta1,\n beta2=beta2,\n epsilon=epsilon,\n adamw_weight_decay=adamw_weight_decay,\n l2_weight_decay=l2_weight_decay,\n use_nesterov=use_nesterov,\n constant_fraction=constant_fraction,\n warmup_fraction=warmup_fraction,\n min_learning_rate_mult=min_learning_rate_mult,\n training_steps=training_steps,\n use_bias_correction=use_bias_correction)\n\n super(NadamWCosineDecay, self).__init__(params, defaults)\n\n @torch.no_grad()\n def step(self, closure=None):\n \"\"\"Performs a single optimization step.\n\n Arguments:\n closure (callable, optional): A closure that reevaluates the model and\n returns the loss.\n\n Returns:\n loss: tensor\n\n Raises:\n RuntimeError: if sparse gradients are used.\n \"\"\"\n loss = None\n if closure is not None:\n with torch.enable_grad():\n loss = closure()\n\n for group in self.param_groups:\n for p in group[\"params\"]:\n if p.grad is None:\n continue\n grad = p.grad\n\n if grad.is_sparse:\n raise RuntimeError(\"No SparseGrads supported at this time.\")\n\n state = self.state[p]\n\n # State initialization\n if len(state) == 0: # pylint: disable=g-explicit-length-test\n state[\"step\"] = 0\n # Exponential moving average of gradient values\n state[\"exp_avg\"] = torch.zeros_like(\n p, memory_format=torch.preserve_format)\n # Exponential moving average of squared gradient values\n state[\"exp_avg_sq\"] = torch.zeros_like(\n p, memory_format=torch.preserve_format)\n\n lr = get_cosine_learning_rate_fn(group[\"training_steps\"], group[\"lr\"],\n group[\"min_learning_rate_mult\"],\n group[\"constant_fraction\"],\n group[\"warmup_fraction\"])(\n state[\"step\"])\n\n grad = grad - p * group[\"l2_weight_decay\"]\n\n beta1, beta2 = group[\"beta1\"], group[\"beta2\"]\n\n exp_avg, exp_avg_sq = state[\"exp_avg\"], state[\"exp_avg_sq\"]\n exp_avg.mul_(beta1).add_(1 - beta1, grad)\n exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)\n\n state[\"step\"] += 1\n t = state[\"step\"]\n\n # correction\n if group[\"use_bias_correction\"]:\n lr_t = lr * np.sqrt(1.0 - beta2**t) / (1.0 - beta1**t)\n else:\n lr_t = lr\n\n if group[\"use_nesterov\"]:\n numerator = (beta1 * exp_avg + (1.0 - beta1) * grad)\n denom = torch.sqrt(exp_avg_sq) + group[\"epsilon\"]\n step = lr_t * numerator / denom\n else:\n denom = torch.sqrt(exp_avg_sq) + group[\"epsilon\"]\n step = lr_t * exp_avg / denom\n\n step = step + (lr_t * group[\"adamw_weight_decay\"] * p)\n\n p.add_(-step)\n\n return loss\n\n\ndef optimizer_for_idx(params, idx, training_steps):\n \"\"\"Get a Optimizer for the given configuration and training_steps.\"\"\"\n config = common.get_optimizer_config(idx)\n config[\"training_steps\"] = training_steps\n return NadamWCosineDecay(params, **config)\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Simple Double DQN agent.\"\"\"\n# pylint: disable=line-too-long\n\nimport numpy as np\nimport tensorflow as tf\n\n\ndef mlp_policy(input_shape, action_shape):\n \"\"\"Returns a keras model of fully connected layers.\"\"\"\n return tf.keras.Sequential([\n tf.keras.layers.Dense(64, input_shape=(input_shape,), activation='relu'),\n tf.keras.layers.Dense(64, activation='relu'),\n tf.keras.layers.Dense(action_shape),\n ])\n\n\nclass DoubleDQN:\n \"\"\"A basic double dqn agent.\n\n Attributes:\n learning_rate: learning rate of the optimizer\n gamma: future discount of the agent\n online_model: model that contains the most recent weight\n target_model: model that contains the weight of the moving average\n optimizer: the optimizer used for training\n loss: the loss function\n global_step: how many simulation steps have been taken\n learn_step: how many optimization steps have been taken\n batch_size: size of the batch\n state_size: size of the observation\n action_size: size of the action\n saver: tensorflow weight saver\n manager: tensorflow checkpoint manager\n \"\"\"\n\n def __init__(self,\n learning_rate=0.001,\n discount=0.99,\n batch_size=64,\n state_size=4,\n action_size=2,\n use_huber_loss=False,\n state_process_fn=None,\n action_process_fn=None,\n action_post_process_fn=None,\n model_dir=None):\n \"\"\"Initialize the double dqn agent.\n\n Args:\n learning_rate: learning rate of the optimizer\n discount: future discount of the agent\n batch_size: size of the batch\n state_size: size of the observation\n action_size: size of the action\n use_huber_loss: whether to use huber loss or l2 loss\n state_process_fn: function that process state before compute\n action_process_fn: function that process action before compute\n action_post_process_fn: function that process state after compute\n model_dir: optional directory for saving weights\n \"\"\"\n\n # hyper parameters\n self.learning_rate = learning_rate\n self.gamma = discount\n\n self.online_model = mlp_policy(state_size, action_size)\n self.target_model = mlp_policy(state_size, action_size)\n self.online_model.build()\n self.target_model.build()\n self.optimizer = tf.keras.optimizers.Adam(lr=self.learning_rate,)\n self.loss = tf.keras.losses.Huber()\n\n self.global_step = tf.Variable(1, name='global_step')\n self.learn_step = tf.Variable(1, name='learn_step')\n\n self.batch_size = batch_size\n self.state_size = state_size\n self.action_size = action_size\n self._use_huber_loss = use_huber_loss\n\n self._state_process_fn = state_process_fn\n self._action_process_fn = action_process_fn\n self._action_post_process_fn = action_post_process_fn\n\n if model_dir:\n self.saver = tf.train.Checkpoint(\n optimizer=self.optimizer,\n online_model=self.online_model,\n target_model=self.target_model,\n step=self.global_step)\n self.manager = tf.train.CheckpointManager(\n self.saver, model_dir, max_to_keep=5, checkpoint_name='model')\n\n def update_target_network(self):\n \"\"\"\"Update the target network with moving average.\"\"\"\n self.target_model.set_weights(self.online_model.get_weights())\n\n def step(self, state, env, epsilon):\n \"\"\"\"Produce action on a state.\"\"\"\n del env\n state = state[None, :]\n if self._state_process_fn:\n state = self._state_process_fn(state)\n if np.random.rand() <= epsilon:\n action = np.random.choice(self.action_size)\n else:\n state = tf.convert_to_tensor(state, dtype=tf.float32)\n q_value = self.online_model(state, training=False)[0]\n action = np.argmax(q_value)\n if self._action_post_process_fn:\n action = self._action_post_process_fn(action)\n return action\n\n def train(self, batch):\n \"\"\"Train the agent on a batch of transitions.\"\"\"\n states = batch['obs']\n actions = batch['action']\n rewards = batch['reward']\n next_states = batch['obs_next']\n dones = batch['done']\n\n if self._state_process_fn:\n states = self._state_process_fn(states)\n next_states = self._state_process_fn(next_states)\n\n if self._action_process_fn:\n actions = self._action_process_fn(actions)\n\n batch = {\n 'states':\n tf.convert_to_tensor(np.vstack(states), dtype=tf.float32),\n 'actions':\n tf.convert_to_tensor(actions, dtype=tf.int32),\n 'rewards':\n tf.convert_to_tensor(rewards, dtype=tf.float32),\n 'next_states':\n tf.convert_to_tensor(np.vstack(next_states), dtype=tf.float32),\n 'dones':\n tf.convert_to_tensor(dones, dtype=tf.float32)\n }\n loss = self._train(batch).numpy()\n return {'loss': loss}\n\n @tf.function\n def _train(self, batch):\n \"\"\"Update models with data in batch.\"\"\"\n dqn_variable = self.online_model.trainable_variables\n with tf.GradientTape() as tape:\n tape.watch(dqn_variable)\n error = self._loss(batch)\n dqn_grads = tape.gradient(error, dqn_variable)\n self.optimizer.apply_gradients(zip(dqn_grads, dqn_variable))\n return error\n\n def _loss(self, batch):\n \"\"\"Compute the td loss.\"\"\"\n states, next_states = batch['states'], batch['next_states']\n rewards, dones, actions = batch['rewards'], batch['dones'], batch['actions']\n target_q = self.target_model(next_states, training=True)\n online_q = self.online_model(next_states, training=True)\n next_action = tf.argmax(online_q, axis=1)\n target_value = tf.reduce_sum(\n tf.one_hot(next_action, self.action_size) * target_q, axis=1)\n\n target_value = (1 - dones) * self.gamma * target_value + rewards\n main_q = self.online_model(states)\n main_value = tf.reduce_sum(\n tf.one_hot(actions, self.action_size) * main_q, axis=1)\n if self._use_huber_loss:\n error = self.loss(target_value, main_value)\n else:\n error = tf.square(main_value - tf.stop_gradient(target_value)) * 0.5\n error = tf.reduce_mean(error)\n return error\n\n def init_networks(self):\n \"\"\"Initialize the weights.\"\"\"\n print('No need to initialize in eager mode.')\n\n def save_model(self, model_dir=None):\n \"\"\"Save current model weights with manager or to model_dir.\n\n Args:\n model_dir: optional directory for saving the model weights\n \"\"\"\n assert self.manager or model_dir, 'No manager and no model dir!'\n save_path = self.manager.save()\n print('Save model: step {} to {}'.format(int(self.global_step), save_path))\n\n def load_model(self, model_dir=None):\n \"\"\"Load current model weights with manager or from model_dir.\n\n Args:\n model_dir: optional directory for saving the model weights\n \"\"\"\n assert self.manager or model_dir, 'No manager and no model dir!'\n if not model_dir:\n model_dir = self.manager.latest_checkpoint\n else:\n model_dir = tf.train.latest_checkpoint(model_dir)\n self.saver.restore(model_dir)\n\n def increase_global_step(self):\n \"\"\"Increment gloabl step by 1.\"\"\"\n return self.global_step.assign_add(1).numpy()\n\n def get_global_step(self):\n \"\"\"Get the current value of global step in python integer.\"\"\"\n return int(self.global_step)\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Tests for data_lib.\"\"\"\n\nfrom absl.testing import absltest\nimport apache_beam as beam\nfrom apache_beam.testing import test_pipeline\nfrom apache_beam.testing import util\nimport numpy as np\nimport tensorflow as tf\n\nfrom assessment_plan_modeling.ap_parsing import ap_parsing_lib\nfrom assessment_plan_modeling.ap_parsing import augmentation_lib as aug_lib\nfrom assessment_plan_modeling.ap_parsing import data_lib\nfrom assessment_plan_modeling.ap_parsing import tokenizer_lib\nfrom assessment_plan_modeling.note_sectioning import note_section_lib\n\n\nclass APParsingDataLibTest(tf.test.TestCase):\n\n def test_get_converted_labels(self):\n ap_text = \"\\n\".join([\n \"50 yo m with hx of copd, dm2\", \"#. COPD ex: started on abx in ED.\",\n \" - continue abx.\"\n ])\n tokens = tokenizer_lib.tokenize(ap_text)\n\n labels = [\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=32,\n end_char=39), # span_text=\"COPD ex\"\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_DESCRIPTION,\n start_char=41,\n end_char=63), # span_text=\"started on abx in ED.\\n\"\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.ACTION_ITEM,\n start_char=67,\n end_char=80,\n action_item_type=ap_parsing_lib.ActionItemType.MEDICATIONS\n ), # span_text=\"continue abx.\"\n ]\n\n converted_labels = data_lib.generate_model_labels(labels, tokens)\n\n expected_fragment_labels = np.zeros(45)\n expected_fragment_labels[21] = 1 # B-PT COPD ex\n expected_fragment_labels[22:24] = 2 # I-PT COPD ex\n\n expected_fragment_labels[26] = 3 # B-PD started on abx in ED\n expected_fragment_labels[27:35] = 4 # I-PD started on abx in ED\n\n expected_fragment_labels[41] = 5 # B-AI continue abx\n expected_fragment_labels[42:44] = 6 # I-AI continue abx\n\n expected_ai_labels = np.zeros(45)\n expected_ai_labels[41:44] = 1 # continue abx - medications\n\n self.assertAllEqual(converted_labels[\"fragment_type\"],\n expected_fragment_labels)\n self.assertAllEqual(converted_labels[\"action_item_type\"],\n expected_ai_labels)\n\n def test_get_token_features(self):\n ap_text = \"50 yo m with hx of copd, dm2\\n#. COPD Ex\"\n\n # 0 1 23456 7 8\n vocab = [\" \", \"\\n\"] + list(\"-:.,#\") + [\"2\", \"50\"] + [\n \"abx\",\n \"continue\",\n \"copd\",\n \"dm\",\n \"ed\",\n \"ex\",\n \"hx\",\n \"in\",\n \"m\",\n \"of\",\n ]\n\n tokens = tokenizer_lib.tokenize(ap_text)\n\n token_features = data_lib.generate_token_features(tokens, vocab)\n\n expected_features = {\n # OOV is 1\n \"token_ids\": [\n 11, 3, 2, 3, 20, 3, 2, 3, 18, 3, 21, 3, 14, 8, 3, 15, 10, 4, 9, 7,\n 3, 14, 3, 17\n ],\n \"token_type\": [\n 3, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 2, 5, 1, 3, 5, 2, 2, 5, 1, 5,\n 1\n ],\n \"is_upper\": [\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,\n 0\n ],\n \"is_title\": [\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 1\n ]\n }\n\n for key in token_features:\n self.assertAllClose(token_features[key], expected_features[key], msg=key)\n\n def test_extract_ap_sections(self):\n section_markers = {\n \"hpi\": [\"history of present illness\"],\n \"a&p\": [\"assessment and plan\"],\n }\n note = data_lib.Note(\n note_id=1,\n text=\"hpi:\\n 50yof with hx of dm2.\\na&p:\\n # dm2:\\n-RISS\",\n subject_id=0,\n category=\"PHYSICIAN\")\n\n expected = [note_section_lib.Section(28, 46, [\"assessment and plan\"])]\n self.assertEqual(\n list(data_lib.extract_ap_sections(note.text, section_markers)),\n expected)\n\n # multi section\n note = data_lib.Note(\n note_id=1,\n text=\"hpi:\\n 50yof with hx of dm2.\\na&p: DM2\\na&p:\\n # dm2:\\n-RISS\",\n subject_id=0,\n category=\"PHYSICIAN\")\n\n expected = [\n note_section_lib.Section(28, 37, [\"assessment and plan\"]),\n note_section_lib.Section(37, 55, [\"assessment and plan\"]),\n ]\n self.assertEqual(\n list(data_lib.extract_ap_sections(note.text, section_markers)),\n expected)\n\n def test_process_rating_labels(self):\n rating_labels = [\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=0,\n end_char=50), # before\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=45,\n end_char=65), # partially contained\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=50,\n end_char=150), # exactly matches section\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=100,\n end_char=105), # contained\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=150,\n end_char=155), # after\n ]\n\n expected = [\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=0,\n end_char=100),\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=50,\n end_char=55)\n ]\n self.assertEqual(\n data_lib.process_rating_labels(rating_labels,\n note_section_lib.Section(50, 150, [])),\n expected)\n\n\nclass ProcessAPDataDoFnTest(absltest.TestCase):\n\n def test_usage(self):\n section_markers = {\n \"hpi\": [\"history of present illness\"],\n \"a&p\": [\"assessment and plan\"],\n }\n ap_texts = [\"a&p:\\n # dm2:\\n-RISS\", \"a&p:\\n # COPD:\\n-nebs\"]\n notes_with_ratings = [(\"0\", {\n \"notes\": [\n data_lib.Note(\n note_id=0,\n text=\"blablabla\\n\" + ap_texts[0],\n subject_id=0,\n category=\"PHYSICIAN\")\n ],\n \"ratings\": [[\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=19,\n end_char=22),\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.ACTION_ITEM,\n action_item_type=ap_parsing_lib.ActionItemType.MEDICATIONS,\n start_char=24,\n end_char=28)\n ]],\n \"note_partition\": [\"val\"]\n })] + [(\"1\", {\n \"notes\": [\n data_lib.Note(\n note_id=1,\n text=\"blablabla\\n\" + ap_texts[1],\n subject_id=1,\n category=\"PHYSICIAN\")\n ],\n \"ratings\": [],\n \"note_partition\": []\n })]\n\n expected = [\n (\"0|10\",\n data_lib.APData(\n partition=data_lib.Partition.VAL,\n note_id=\"0\",\n subject_id=\"0\",\n ap_text=ap_texts[0],\n char_offset=10,\n tokens=tokenizer_lib.tokenize(ap_texts[0]),\n labeled_char_spans=[\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=8,\n end_char=11),\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.ACTION_ITEM,\n action_item_type=ap_parsing_lib.ActionItemType.MEDICATIONS,\n start_char=14,\n end_char=18)\n ])),\n (\"1|10\",\n data_lib.APData(\n partition=data_lib.Partition.NONRATED,\n note_id=\"1\",\n subject_id=\"1\",\n ap_text=ap_texts[1],\n char_offset=10,\n tokens=tokenizer_lib.tokenize(ap_texts[1]),\n labeled_char_spans=[\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=8,\n end_char=12),\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.ACTION_ITEM,\n start_char=15,\n end_char=19)\n ]))\n ]\n with test_pipeline.TestPipeline() as p:\n results = (\n p\n | beam.Create(notes_with_ratings)\n | beam.ParDo(\n data_lib.ProcessAPData(filter_inorganic_threshold=0),\n section_markers))\n util.assert_that(results, util.equal_to(expected))\n\n def test_multiratings(self):\n section_markers = {\n \"hpi\": [\"history of present illness\"],\n \"a&p\": [\"assessment and plan\"],\n }\n ap_text = \"a&p:\\n # dm2:\\n-RISS\"\n notes_with_ratings = [(\"0\", {\n \"notes\": [\n data_lib.Note(\n note_id=0,\n text=\"blablabla\\n\" + ap_text,\n subject_id=0,\n category=\"PHYSICIAN\")\n ],\n \"ratings\":\n [[\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=19,\n end_char=22),\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.ACTION_ITEM,\n start_char=24,\n end_char=28)\n ],\n [\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=18,\n end_char=22),\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.ACTION_ITEM,\n start_char=25,\n end_char=28)\n ]],\n \"note_partition\": [\"test\", \"test\"]\n })]\n\n expected = [\n (\"0|10\",\n data_lib.APData(\n partition=data_lib.Partition.TEST,\n note_id=\"0\",\n subject_id=\"0\",\n ap_text=ap_text,\n char_offset=10,\n tokens=tokenizer_lib.tokenize(ap_text),\n labeled_char_spans=[\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=8,\n end_char=11),\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.ACTION_ITEM,\n start_char=14,\n end_char=18)\n ]))\n ] * 2\n with test_pipeline.TestPipeline() as p:\n results = (\n p\n | beam.Create(notes_with_ratings)\n | beam.ParDo(\n data_lib.ProcessAPData(filter_inorganic_threshold=0),\n section_markers))\n util.assert_that(results, util.equal_to(expected))\n\n\nclass ApplyAugmentationsDoFnTest(absltest.TestCase):\n\n def test_usage(self):\n augmentation_config = aug_lib.AugmentationConfig(\n augmentation_sequences=[\n aug_lib.AugmentationSequence(\n name=\"test\",\n augmentation_sequence=[\n aug_lib.ChangeDelimAugmentation(\n fragment_types=[\n ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE\n ],\n delims=[\"\\n\"])\n ])\n ],\n augmentation_number_deterministic=1)\n\n ap_data = [\n (\n \"0|10\",\n data_lib.APData(\n note_id=0,\n subject_id=0,\n ap_text=\"a&p:\\n # dm2:\\n-RISS\",\n labeled_char_spans=[\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=8,\n end_char=11), # span_text=\"dm2\",\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.ACTION_ITEM,\n action_item_type=ap_parsing_lib.ActionItemType\n .MEDICATIONS,\n start_char=14,\n end_char=18) # span_text=\"RISS\",\n ])),\n ]\n expected = [\n *ap_data,\n (\n \"0|10\",\n data_lib.APData(\n note_id=0,\n subject_id=0,\n ap_text=\"a&p\\ndm2:\\n- RISS\",\n tokens=tokenizer_lib.tokenize(\"a&p\\ndm2:\\n- RISS\"),\n labeled_char_spans=[\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.PROBLEM_TITLE,\n start_char=4,\n end_char=7), # span_text=\"dm2\",\n ap_parsing_lib.LabeledCharSpan(\n span_type=ap_parsing_lib.LabeledSpanType.ACTION_ITEM,\n action_item_type=ap_parsing_lib.ActionItemType\n .MEDICATIONS,\n start_char=11,\n end_char=15) # span_text=\"RISS\",\n ],\n augmentation_name=\"test\")),\n ]\n\n with test_pipeline.TestPipeline() as p:\n results = (\n p\n | beam.Create(ap_data)\n | beam.ParDo(data_lib.ApplyAugmentations(), augmentation_config))\n util.assert_that(results, util.equal_to(expected))\n\n\nif __name__ == \"__main__\":\n absltest.main()\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Custom components of vatt, e.g. BN_ReLU, etc..\"\"\"\n\nfrom absl import logging\n\nimport tensorflow as tf\n\n\nclass BNReLU(tf.keras.layers.Layer):\n \"\"\"Does BN + ReLU with cross replica option.\"\"\"\n\n def __init__(self,\n bn_config,\n use_xreplica_bn=True,\n use_relu=True,\n use_bn=True,\n name=\"bn_relu\"):\n super(BNReLU, self).__init__(name=name)\n self.use_relu = use_relu\n self.use_bn = use_bn\n assert use_bn or use_relu, \"Either relu or bn should be specified\"\n if use_bn:\n if use_xreplica_bn:\n logging.info(\"Using Cross Replica BatchNorm.\")\n self.bn = tf.keras.layers.experimental.SyncBatchNormalization(\n **bn_config)\n else:\n self.bn = tf.keras.layers.BatchNormalization(**bn_config)\n\n def call(self,\n inputs,\n is_training=False):\n if self.use_bn:\n inputs = self.bn(inputs, training=is_training)\n if self.use_relu:\n inputs = tf.nn.relu(inputs)\n\n return inputs\n\n\nclass NonLinearProj(tf.keras.layers.Layer):\n \"\"\"Non-linear projection head.\"\"\"\n\n def __init__(self,\n d_inner,\n d_embd,\n bn_config,\n use_xreplica_bn=True,\n use_inner_bn=True,\n use_bn_out=False,\n name=\"non_linear_proj\"):\n super(NonLinearProj, self).__init__(name=name)\n self._bn_config = bn_config\n self._use_xreplica_bn = use_xreplica_bn\n self._use_inner_bn = use_inner_bn\n self._use_bn_out = use_bn_out\n\n self.dense_inner = None\n if d_inner is not None:\n self.dense_inner = tf.keras.layers.Dense(\n d_inner, name=\"final_projection_inner\")\n self.bn_relu = BNReLU(\n bn_config=self._bn_config,\n use_xreplica_bn=self._use_xreplica_bn,\n use_relu=True,\n use_bn=self._use_inner_bn,\n name=\"final_projection_inner_bn_relu\")\n self.dense_final = tf.keras.layers.Dense(\n d_embd, use_bias=not self._use_bn_out, name=\"final_projection\")\n if self._use_bn_out:\n self.bn_out = BNReLU(\n bn_config=self._bn_config,\n use_xreplica_bn=self._use_xreplica_bn,\n use_relu=False,\n use_bn=True,\n name=\"final_projection_bn\")\n\n def call(self,\n inputs,\n is_training):\n if self.dense_inner is None:\n d_inner = inputs.shape[-1]\n self.dense_inner = tf.keras.layers.Dense(d_inner,\n name=\"final_projection_inner\")\n\n inputs = self.dense_inner(inputs)\n inputs = self.bn_relu(inputs, is_training)\n inputs = self.dense_final(inputs)\n if self._use_bn_out:\n inputs = self.bn_out(inputs, is_training)\n\n return inputs\n\n\nclass ReluDenseBN(tf.keras.layers.Layer):\n \"\"\"Relu + Dense + BN module.\"\"\"\n\n def __init__(self,\n d_model,\n pre_bn=False,\n bn_config=None,\n use_xreplica_bn=True,\n name=\"relue_dense_relu\"):\n super(ReluDenseBN, self).__init__(name=name)\n self.pre_bn = pre_bn\n if use_xreplica_bn:\n logging.info(\"Using Cross Replica BatchNorm in Relu-Dense-BN.\")\n bn_module = tf.keras.layers.experimental.SyncBatchNormalization\n else:\n bn_module = tf.keras.layers.BatchNormalization\n\n if bn_config is None:\n bn_config = {\"scale\": True}\n if use_xreplica_bn:\n bn_config.update({\"momentum\": 0.9})\n\n if self.pre_bn:\n self.pre_bn = bn_module(**bn_config)\n\n self.dense = tf.keras.layers.Dense(d_model,\n use_bias=False,\n name=\"linear_projection\")\n self.bn = bn_module(**bn_config)\n\n def call(self, inputs, training):\n if self.pre_bn:\n inputs = self.pre_bn(inputs, training)\n\n inputs = tf.nn.relu(inputs)\n inputs = self.dense(inputs)\n inputs = self.bn(inputs, training)\n\n return inputs\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"The contrastive model.\"\"\"\n\nimport tensorflow.compat.v1 as tf\nfrom supcon import classification_head\nfrom supcon import enums\nfrom supcon import projection_head\nfrom supcon import resnet\n\n\nclass ContrastiveModel(tf.layers.Layer):\n \"\"\"A model suitable for contrastive training with different backbone networks.\n\n Attributes:\n architecture: An enums.EncoderArchitecture. The type of the architecture to\n use for the encoder.\n normalize_projection_head_input: Whether the encoder output that is the\n input to the projection head should be normalized.\n normalize_classification_head_input: Whether the encoder output that is the\n input to the classification head should be normalized.\n jointly_train_classification_head: Whether the classification head is\n trained simultaneously with the encoder. If false, a stop_gradient is\n added between the classification head and the encoder.\n encoder_kwargs: Keyword arguments that are passed on to the constructor of\n the encoder. The specific encoder implementation is determined by\n `architecture`.\n projection_head_kwargs: Keyword arguments that are passed on to the\n constructor of the projection head. These are the arguments to\n `projection_head.ProjectionHead`.\n classification_head_kwargs: Keyword arguments that are passed on to the\n constructor of the classification head. These are the arguments to\n `classification_head.ClassificationHead`.\n name: A name for this object.\n \"\"\"\n\n def __init__(self,\n architecture=enums.EncoderArchitecture.RESNET_V1,\n normalize_projection_head_input=True,\n normalize_classification_head_input=True,\n stop_gradient_before_projection_head=False,\n stop_gradient_before_classification_head=True,\n encoder_kwargs=None,\n projection_head_kwargs=None,\n classification_head_kwargs=None,\n name='ContrastiveModel',\n **kwargs):\n super(ContrastiveModel, self).__init__(name=name, **kwargs)\n\n self.normalize_projection_head_input = normalize_projection_head_input\n self.normalize_classification_head_input = (\n normalize_classification_head_input)\n self.stop_gradient_before_projection_head = (\n stop_gradient_before_projection_head)\n self.stop_gradient_before_classification_head = (\n stop_gradient_before_classification_head)\n\n encoder_fns = {\n enums.EncoderArchitecture.RESNET_V1: resnet.ResNetV1,\n enums.EncoderArchitecture.RESNEXT: resnet.ResNext,\n }\n if architecture not in encoder_fns:\n raise ValueError(f'Architecture should be one of {encoder_fns.keys()}, '\n f'found: {architecture}.')\n encoder_fn = encoder_fns[architecture]\n\n assert encoder_kwargs is not None\n projection_head_kwargs = projection_head_kwargs or {}\n classification_head_kwargs = classification_head_kwargs or {}\n\n self.encoder = encoder_fn(name='Encoder', **encoder_kwargs)\n self.projection_head = projection_head.ProjectionHead(\n **projection_head_kwargs)\n self.classification_head = classification_head.ClassificationHead(\n **classification_head_kwargs)\n\n def call(self, inputs, training):\n embedding = self.encoder(inputs, training)\n normalized_embedding = tf.nn.l2_normalize(embedding, axis=1)\n\n projection_input = (\n normalized_embedding\n if self.normalize_projection_head_input else embedding)\n if self.stop_gradient_before_projection_head:\n projection_input = tf.stop_gradient(projection_input)\n projection = self.projection_head(projection_input, training)\n\n classification_input = (\n normalized_embedding\n if self.normalize_classification_head_input else embedding)\n if self.stop_gradient_before_classification_head:\n classification_input = tf.stop_gradient(classification_input)\n classification = self.classification_head(classification_input, training)\n\n return embedding, normalized_embedding, projection, classification\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Dream Fields learn a 3D neural radiance field (NeRF) given a textual prompt.\"\"\"\n\nimport collections\nimport functools\nimport os\nimport time\nfrom typing import Optional\n\nfrom . import augment\nfrom . import helpers\nfrom . import log\nfrom . import scene\nfrom . import schedule\n\nfrom absl import logging\nfrom clu import metric_writers\nimport flax\nimport flax.linen as nn\nfrom flax.training import checkpoints\nimport jax\nfrom jax import random\nimport jax.numpy as np\nimport matplotlib.pyplot as plt\nimport ml_collections\nimport numpy as onp\nfrom scipy import stats\nimport tensorflow.io.gfile as gfile\nimport tqdm\n\n\nclass DreamField:\n \"\"\"Trainable Dream Field model.\"\"\"\n\n def __init__(self, config):\n self.config = config\n\n def run_train(self,\n experiment_dir,\n work_unit_dir,\n rng,\n yield_results=False):\n \"\"\"Train a Dream Field and save results to work_unit_dir.\"\"\"\n t_start = time.time()\n config = self.config\n\n logging.info('Local devices: %s', jax.local_devices())\n logging.info('All devices: %s', jax.devices())\n\n ## Load CLIP\n encode_image, encode_text, preprocess_image, tokenize_fn = (\n helpers.load_image_text_model(config.loss_model))\n\n ## Pick a prompt\n template = config.get('query_template', '{query}')\n query = template.format(query=config.query)\n z_clip = encode_text(tokenize_fn(query))\n\n ## Encode retrieval set\n if config.queries_r:\n if config.retrieve_models[0] == config.loss_model:\n # Reuse loss model.\n encode_image_r, preprocess_image_r = encode_image, preprocess_image\n encode_text_r, tokenize_fn_r = encode_text, tokenize_fn\n else:\n # Load new model.\n encode_image_r, encode_text_r, preprocess_image_r, tokenize_fn_r = (\n helpers.load_image_text_model(config.retrieve_models[0]))\n\n if config.query not in config.queries_r:\n config.queries_r.append(config.query)\n z_clip_r = encode_text_r(tokenize_fn_r(config.queries_r))\n true_idx_r = config.queries_r.index(config.query)\n assert true_idx_r >= 0 # Input query must be set of retrieval queries.\n\n del encode_text_r, tokenize_fn_r # Clean up retrieval text encoder.\n\n del encode_text, tokenize_fn # Clean up text encoder.\n\n ## Scene origin manually tracked\n scene_origin = scene.EMA(np.zeros(3, dtype=np.float64), decay=0.999)\n\n def train_step(state, rays, key, *multistep_constants):\n \"\"\"Perform a training iteration, optionally composed of multiple substeps.\n\n Using multiple substeps slightly reduces training time, but only one\n substep per training iteration is used in experiments.\n\n Args:\n state: Optimizer state.\n rays: Camera rays for rendering, shared across all substeps.\n key: PRNGKey for random number generation (e.g. for augmentations).\n *multistep_constants: Training constants that can vary across substeps.\n 7 arrays of constants of length config.substeps are expected:\n (1) lrs: learning rates\n (2) scs: scale factor for integrated positional encoding. Larger\n scales lead to a blurrier appearance. A constant sc=1 is the\n standard mip-NeRF IPE, and used by Dream Fields.\n (3) sns: standard deviation of pre-activation noise for NeRF\n density. Dream Fields use sn=0. density(x) = softplus(s(x) + eps),\n eps ~ N(0, sn^2)\n (4) mrs: norm of radiance mask, defining scene bounds.\n (5) betas: scale of beta prior loss. Dream Fields use beta=0.\n (6) acct: transmittance loss hyperparameter, defining the target\n average opacity. This is 1 - tau (target transmittance).\n (7) acclam: weight of transmittance loss.\n\n Returns:\n state: Updated optimizer state.\n last_augs: Augmented views of renderings from the last substep.\n mean_losses: Dictionary of losses averaged over replicas and substeps.\n scene_origin: Updated origin of the scene, based on the center of mass.\n \"\"\"\n # NOTE(jainajay): rays are shared across all substeps\n pmean = functools.partial(jax.lax.pmean, axis_name='batch')\n psum = functools.partial(jax.lax.psum, axis_name='batch')\n\n def loss_fn(params, key, sc, sn, mr, beta, acct, acclam):\n render_key, aug_key, key = random.split(key, 3)\n\n # Render from nerf\n (rgb_est_flat, _, acc_est_flat), aux = render_rays(\n rays=rays,\n variables=params,\n rng=render_key,\n config=config,\n sc=sc,\n sigma_noise_std=sn,\n mask_rad=mr,\n origin=scene_origin.value,\n train=True)\n rgb_est = scene.gather_and_reshape(rgb_est_flat, config.render_width, 3)\n acc_est = scene.gather_and_reshape(acc_est_flat, config.render_width, 1)\n # Make augmentations process specific\n aug_key = random.fold_in(aug_key, pid)\n # Perform augmentations and resize to clip_width\n augs = augment.augment_rendering(config, rgb_est, acc_est, aug_key)\n\n # Run through CLIP\n z_est = encode_image(preprocess_image(augs))\n clip_loss = -(z_est * z_clip).sum(-1).mean()\n total_loss = clip_loss\n\n transparency_loss = config.get('transparency_loss', None)\n acc_mean = np.mean(acc_est)\n aux['losses']['acc_mean'] = acc_mean\n if transparency_loss == 'neg_lam_transmittance_clipped':\n # Compute the Dream Fields transmittance loss for scene sparsity.\n trans_mean = 1 - acc_mean\n trans_mean_clipped = np.minimum(1 - acct, trans_mean)\n reg = acclam * trans_mean_clipped\n total_loss -= reg\n\n aux['losses']['trans_mean_clipped'] = trans_mean_clipped\n aux['losses']['acc_reg_additive'] = reg\n else:\n assert transparency_loss is None\n\n # Compute a sparsity loss by placing a bimodal beta prior on the\n # per-pixel transmittance. This prior was proposed by Lombardi et al\n # in \"Neural Volumes: Learning Dynamic Renderable Volumes from Images\"\n # and is used only in ablations.\n beta_loss = np.mean(\n np.log(np.maximum(1e-6, acc_est_flat)) +\n np.log(np.maximum(1e-6, 1. - acc_est_flat)))\n total_loss += beta_loss * beta\n\n # Compute a weighted mean of each replica's estimated scene origin,\n # since replicas get a different subset of rays\n total_sigma = psum(aux['scene_origin_sigma'])\n aux['scene_origin'] = psum(aux['scene_origin'] *\n aux['scene_origin_sigma'] / total_sigma)\n # Compute loss that pushes scene content to 0 origin. We set the loss\n # weight zero_origin_lam = 0 in experiments so the loss is just for\n # logging how far the origin has drifted.\n origin_loss = np.sum(np.square(aux['scene_origin']))\n if config.get('zero_origin_lam', 0.):\n total_loss += config.zero_origin_lam * origin_loss\n\n aux['losses'].update({\n 'clip_loss': clip_loss,\n 'beta_loss': beta_loss,\n 'origin_loss': origin_loss,\n 'loss': total_loss,\n })\n aux['augs'] = augs\n return total_loss, aux\n\n grad_fn = jax.value_and_grad(loss_fn, has_aux=True)\n\n # Scan over substeps\n def body_fn(state, step_constants):\n lr, step_constants = step_constants[0], step_constants[1:]\n grad_fn_key, _ = random.split(key, 2)\n (_, aux), grad = grad_fn(state.target, grad_fn_key, *step_constants)\n grad = pmean(grad) # all-reduce grad\n aux['losses'] = pmean(aux['losses'])\n aux['losses']['grad_norm'] = helpers.tree_norm(grad)\n state = state.apply_gradient(grad, learning_rate=lr)\n return state, aux\n\n assert len(multistep_constants) == 7\n multistep_constants = np.array(multistep_constants).T\n\n if config.substeps == 1:\n state, aux = body_fn(state, np.squeeze(multistep_constants))\n last_augs = aux['augs']\n else:\n state, aux = jax.lax.scan(body_fn, state, multistep_constants)\n # Augmentations from last substep.\n # Shape: [n_local_aug, clip_width, clip_width, 3]\n last_augs = aux['augs'][-1]\n\n # Average each type of loss over substeps\n mean_losses = jax.tree_map(np.mean, aux['losses'])\n return state, last_augs, mean_losses, aux['scene_origin']\n\n train_pstep = jax.pmap(\n train_step,\n axis_name='batch',\n in_axes=(0, 0, 0, None, None, None, None, None, None, None))\n\n onp.random.seed(config.seed)\n\n n_device = jax.local_device_count()\n pid = jax.process_index()\n logging.info('n_device %d', n_device)\n ## Modified NeRF architecture, with swish, softplus, skips.\n variables, render_rays = helpers.init_nerf_model(rng.advance(1), config)\n state = flax.optim.Adam(config.lr0, eps=config.adam_eps).create(variables)\n\n ## Try to restore a checkpoint.\n restore_dir = config.get('restore_dir', experiment_dir)\n restore_dir = os.path.join(restore_dir, os.path.basename(work_unit_dir))\n if checkpoints.latest_checkpoint(restore_dir):\n restored = checkpoints.restore_checkpoint(\n restore_dir,\n target={\n 'origin': np.zeros(3),\n 'state': state,\n 'vars': variables\n })\n scene_origin.value = onp.array(restored['origin'])\n state = restored['state']\n variables = restored['vars']\n logging.info('restored checkpoint from step %d', state.state.step)\n else:\n logging.info('did not find checkpoint in %s', restore_dir)\n\n ## Replicate state.\n step_init = state.state.step\n helpers.defragment()\n state = flax.jax_utils.replicate(state, jax.devices())\n helpers.defragment()\n\n ## pmap'd rendering for test time evaluation.\n kwargs_test = dict(rng=None, sigma_noise_std=0.)\n config_test = ml_collections.ConfigDict(config)\n config_test.update(config.test)\n config_test_hq = ml_collections.ConfigDict(config_test)\n config_test_hq.update(config.test_hq)\n\n @functools.partial(jax.pmap, in_axes=(0, None, None, None))\n def render_test_p(rays, variables, sc=1., mr=1.):\n return render_rays(\n rays=rays,\n variables=variables,\n sc=sc,\n mask_rad=mr,\n origin=scene_origin.value,\n config=config_test,\n **kwargs_test)[0]\n\n @functools.partial(jax.pmap, in_axes=(0, None, None, None))\n def render_test_hq_p(rays, variables, sc=1., mr=1.):\n return render_rays(\n rays=rays,\n variables=variables,\n config=config_test_hq,\n sc=sc,\n mask_rad=mr,\n origin=scene_origin.value,\n **kwargs_test)[0]\n\n def render_test(rays, variables, sc=1., mr=1., hq=False):\n sh = rays[0].shape\n rays = [x.reshape((jax.device_count(), -1) + x.shape[1:]) for x in rays]\n if hq:\n out = render_test_hq_p(rays, variables, sc, mr)\n else:\n out = render_test_p(rays, variables, sc, mr)\n out = [x.reshape(sh[:-1] + (-1,)) for x in out]\n return out\n\n def render_loop(rays, variables, sc=1., mr=1., chunk=2**13, hq=False):\n sh = list(rays[0].shape[:-1])\n rays = [x.reshape((-1,) + x.shape[-1:]) for x in rays]\n outs = [\n render_test([x[i:i + chunk]\n for x in rays], variables, sc, mr, hq=hq)\n for i in range(0, rays[0].shape[0], chunk)\n ]\n outs = [\n np.reshape(np.concatenate([z[i]\n for z in outs]), sh + [-1])\n for i in range(3)\n ]\n return outs\n\n ## Training loop\n t_total = 0.\n logging.info('Experiment dir %s', experiment_dir)\n logging.info('Work unit dir %s', work_unit_dir)\n gfile.makedirs(work_unit_dir)\n\n # Set up metric writer\n writer = metric_writers.create_default_writer(\n work_unit_dir, asynchronous=True, just_logging=jax.process_index() > 0)\n if jax.process_index() == 0:\n train_config = config.copy_and_resolve_references()\n log.write_config_json(train_config, work_unit_dir)\n\n # Scale instrinsics to different resolutions.\n hwf_clip_r = scene.scale_intrinsics(config.retrieve_widths[0])\n hwf_base = scene.scale_intrinsics(config.render_width)\n hwf_video = scene.scale_intrinsics(config.get('lq_video_width', 300.))\n hwf_video_hq = scene.scale_intrinsics(config.get('hq_video_width', 400.))\n\n # JIT compile ray generation\n @jax.jit\n def camera_ray_batch_base(p, focal_mult):\n return scene.camera_ray_batch(p, *hwf_base[:2], hwf_base[2] * focal_mult)\n\n @jax.jit\n def sample_pose_focal(key):\n return scene.sample_camera(key, config.th_range, config.phi_range,\n config.rad_range, config.focal_mult_range)\n\n shard_rays_jit = jax.jit(functools.partial(scene.shard_rays))\n\n def sample_iter_data(key, step):\n # Sample pose, focal length multiplier.\n pose, rad, focal_mult = sample_pose_focal(key)\n\n # Generate rays, shaped for pmap over devices.\n rays = camera_ray_batch_base(pose, focal_mult)\n rays_in = shard_rays_jit(rays)\n # Select rays for this process\n rays_in = jax.tree_map(lambda x: x[pid], rays_in)\n\n substeps = np.arange(start=step, stop=step + config.substeps, step=1)\n\n # mip-NeRF scale annealing.\n decays = config.mipnerf.decay_start * (\n 1 - substeps / config.mipnerf.decay_iters)\n scs = np.maximum(1., 2**decays)\n\n # Sigma noise annealing.\n sns = schedule.sigma_noise_std_fn(\n substeps, i_split=config.sn_i_split, sn0=config.sn0, sn1=config.sn1)\n\n # Scene bounds annealing.\n mrs = schedule.mask_rad_fn(\n substeps, i_split=config.mr_i_split, mr0=config.mr0, mr1=config.mr1)\n\n # Anneal target opacity (1 - transmittance).\n accts = schedule.anneal_exponentially(substeps, config.acc_target_i_split,\n config.acc_target0,\n config.acc_target1)\n # The area of an object on the image plane grows with the focal length\n # and shrinks with increasing camera radius. Scale target opacity\n # proportionally with the squared focal multiplier and inversely\n # proportionally with the squared camera radius. For consistency with\n # early experiments that did not use this scaling, we also scale by a\n # constant, 1 / (4^2 * 1.2).\n acct_scaling = focal_mult**2 / ((rad / 4.)**2) / 1.2\n accts = np.minimum(1., acct_scaling * accts)\n acclams = np.where(substeps < config.acc_lam_after, 0., config.acc_lam)\n\n # Beta prior encourages either 0 or 1 opacity for rays\n betas = np.where(substeps < config.beta_after, .0,\n config.get('beta_lam', .001))\n\n # Learning rate schedule.\n # NOTE: vectorized calculation of lrs doesn't work with multiple substeps\n lrs = schedule.lr_fn(\n substeps,\n i_split=config.lr_i_split,\n i_end=config.iters,\n lr0=config.lr0,\n lr1=config.lr1,\n lr2=config.lr2,\n cosine_decay=config.lr_cosine_decay)\n\n return substeps, rays_in, lrs, scs, sns, mrs, betas, accts, acclams\n\n pbar = tqdm.trange(\n step_init,\n config.iters + config.substeps,\n config.substeps,\n desc='training')\n for i in pbar:\n t = time.time()\n\n substeps, rays_in, lrs, scs, sns, mrs, betas, accts, acclams = (\n sample_iter_data(rng.advance(1), i))\n l = substeps[-1]\n\n keys_pstep = rng.split(n_device)\n # NOTE: loss is averaged across substeps.\n new_state, augs, mean_losses, new_scene_origin = train_pstep(\n state, rays_in, keys_pstep, lrs, scs, sns, mrs, betas, accts, acclams)\n\n # Reduce across devices\n mean_losses = jax.tree_map(np.mean, mean_losses)\n\n # Gradient skipping if nan.\n if (helpers.all_finite_tree(mean_losses) and\n helpers.all_finite_tree(new_state)):\n state = new_state\n else:\n logging.warn('Skipping update on step %d. non-finite loss or state', i)\n continue\n\n # Update scene origin.\n if config.get('ema_scene_origin', False):\n if helpers.all_finite(new_scene_origin):\n scene_origin.update(new_scene_origin[0])\n else:\n logging.warn(\n 'Skipping origin update on step %d. '\n 'non-finite origin. old: %s skipped update: %s', i,\n scene_origin.value, new_scene_origin)\n\n ## Yield results, for display in colab.\n augs = augs.reshape(-1, *augs.shape[2:]) # devices, n_localaug, HWC->BHWC\n if yield_results:\n yield mean_losses, augs, scene_origin.value\n else:\n yield None\n pbar.set_description(f'Loss: {mean_losses[\"loss\"]:.4f}')\n\n ## Logging.\n if i == 0:\n continue\n\n t_total += time.time() - t\n\n if i % config.log_scalars_every == 0:\n scalars = {f'losses/{key}': value for key, value in mean_losses.items()}\n scalars.update({\n 'schedule/mipnerf_scale': scs[-1],\n 'schedule/lr': lrs[-1],\n 'schedule/mask_rad': mrs[-1],\n 'schedule/sigma_noise_std': sns[-1],\n 'schedule/beta': betas[-1],\n 'schedule/acc_target': accts[-1],\n 'schedule/acc_lam': acclams[-1],\n 'origin/x': scene_origin.value[0],\n 'origin/y': scene_origin.value[1],\n 'origin/z': scene_origin.value[2],\n 'origin/norm': np.linalg.norm(scene_origin.value),\n })\n\n secs_per_iter = t_total / (l - step_init)\n iters_per_sec = (l - step_init) / t_total\n wall = time.time() - t_start\n scalars.update({\n 'system/wall': wall,\n 'system/secs_per_iter': secs_per_iter,\n 'system/iters_per_sec': iters_per_sec,\n })\n\n if i % config.render_every == 0:\n variables = helpers.state_to_variables(state)\n cam2world = scene.pose_spherical(30., -45., 4.)\n rays = scene.camera_ray_batch(cam2world, *hwf_clip_r)\n\n # Render with no scale manipulation.\n outs = render_loop(rays, variables, sc=1., mr=mrs[-1], hq=True)\n outs = [np.squeeze(x) for x in outs]\n step_images = {\n 'render/rgb': outs[0][None],\n 'render/depth': outs[1][None, Ellipsis, None],\n 'render/acc': outs[2][None, Ellipsis, None],\n }\n\n # Compute retrieval metric.\n if config.queries_r:\n z_est = encode_image_r(preprocess_image_r(outs[0][None]))\n cosine_sim = (z_est * z_clip_r).sum(-1) # 1d, num retrieval queries\n log_prob = nn.log_softmax(cosine_sim)\n prefix = f'val/{config.retrieve_models[0]}/retrieve_'\n scalars.update({\n f'{prefix}cosine_sim':\n cosine_sim[true_idx_r],\n f'{prefix}loss':\n -log_prob[true_idx_r],\n f'{prefix}acc':\n (np.argmax(cosine_sim) == true_idx_r).astype(float)\n })\n\n augs_tiled = log.make_image_grid(augs[:8])\n step_images['render/augmentations'] = augs_tiled\n\n fig = plt.figure()\n plt.imshow(1. / np.maximum(config.near, outs[1]))\n plt.colorbar()\n plt.title('disparity')\n disparity = log.plot_to_image(fig)\n step_images['render/disparity'] = disparity\n\n writer.write_images(step=l, images=step_images)\n\n if config.render_lq_video and (i == config.iters or config.video_every and\n i % config.video_every == 0):\n\n def rays_theta(th):\n cam2world = scene.pose_spherical(th, -30., 4.)\n return scene.camera_ray_batch(cam2world, *hwf_video)\n\n th_range = np.linspace(\n 0, 360, config.get('lq_video_n_frames', 60), endpoint=False)\n variables = helpers.state_to_variables(state)\n frames_all = [\n render_loop(rays_theta(th), variables, scs[-1], mrs[-1], hq=False)\n for th in tqdm.tqdm(th_range, desc='render video')\n ]\n\n videos = [[np.squeeze(f[i]) for f in frames_all] for i in range(3)]\n for video, label in zip(videos, 'rgb depth acc'.split()):\n scale = (label == 'depth')\n log.log_video(\n None, video, 'frames', label, l, work_unit_dir, scale=scale)\n\n if i % config.log_scalars_every == 0:\n writer.write_scalars(step=l, scalars=scalars)\n\n if i % config.flush_every == 0:\n writer.flush()\n\n defrag_every = config.get('defragment_every', default=0)\n if defrag_every and i % defrag_every == 0:\n helpers.defragment()\n\n if config.get('checkpoint_every') and i % config.checkpoint_every == 0:\n saved_path = checkpoints.save_checkpoint(\n ckpt_dir=work_unit_dir,\n target={\n 'state': flax.jax_utils.unreplicate(state),\n 'vars': helpers.state_to_variables(state),\n 'origin': np.array(scene_origin.value),\n },\n step=l,\n keep=1,\n overwrite=True,\n keep_every_n_steps=config.get('keep_every_n_steps', None))\n logging.info('saved checkpoint to %s', saved_path)\n\n # Make a higher res, higher frame rate video.\n if config.render_hq_video and (config.get('hq_video_every', None) and\n i % config.hq_video_every == 0 or\n i == config.iters):\n\n my_rays = lambda c2w: scene.camera_ray_batch(c2w, *hwf_video_hq)\n th_range = np.linspace(\n 0, 360, config.get('hq_video_n_frames', 240), endpoint=False)\n poses = [scene.pose_spherical(th, -30., 4.) for th in th_range]\n variables = helpers.state_to_variables(state)\n frames_all = [\n render_loop(my_rays(pose), variables, 1., config.mr1, hq=True)\n for pose in tqdm.tqdm(poses, 'render hq video')\n ]\n\n videos = [\n [onp.array(np.squeeze(f[j])) for f in frames_all] for j in range(3)\n ]\n meta_path = os.path.join(work_unit_dir, 'meta_hq.npy')\n with gfile.GFile(meta_path, 'wb') as f:\n logging.info('saving metadata for rendered hq frames to %s',\n meta_path)\n onp.save(f, dict(poses=onp.array(poses), hwf=onp.array(hwf_video_hq)))\n for video, label in zip(videos, 'rgb depth acc'.split()):\n scale = (label == 'depth')\n log.log_video(\n None, video, 'frames_hq', label, i, work_unit_dir, scale=scale)\n\n writer.flush()\n writer.close()\n logging.info('%f sec elapsed total', time.time() - t_start)\n\n def render_from_checkpoint(self,\n work_unit_dir,\n widths,\n render_test_hq_p,\n step=None):\n \"\"\"Restore learned radiance field weights and scene origin.\"\"\"\n zero_outs = {\n width: [np.zeros((width, width, c)).squeeze() for c in [3, 1, 1, 3]\n ] for width in widths\n }\n latest_checkpoint = checkpoints.latest_checkpoint(work_unit_dir)\n if not latest_checkpoint:\n print(f'ERROR: no checkpoint found in {work_unit_dir}')\n return latest_checkpoint, zero_outs\n\n try:\n restored = checkpoints.restore_checkpoint(\n work_unit_dir, target=None, step=step)\n except ValueError as e:\n print(f'ERROR loading checkpoint from {work_unit_dir} at step {step}:', e)\n return latest_checkpoint, zero_outs\n variables = flax.core.frozen_dict.FrozenDict(restored['vars'])\n origin = restored['origin']\n if not np.all(np.isfinite(origin)):\n print('origin', origin, 'has nan value(s) for wu', work_unit_dir)\n\n # Render wrapper methods.\n def render_test(rays):\n sh = rays[0].shape\n rays = scene.padded_shard_rays(rays, multihost=False)\n out = render_test_hq_p(rays, variables, origin)\n out = [x.reshape((onp.prod(sh[:-1]), -1)) for x in out] # gather flat\n out = [x[:sh[0]] for x in out] # Unpad\n return out\n\n def render_loop(rays, chunk=2**16):\n sh = list(rays[0].shape[:-1])\n rays = [x.reshape((-1,) + x.shape[-1:]) for x in rays]\n outs = [\n render_test([x[i:i + chunk]\n for x in rays])\n for i in range(0, rays[0].shape[0], chunk)\n ]\n outs = [\n np.reshape(np.concatenate([z[i]\n for z in outs]), sh + [-1])\n for i in range(3)\n ]\n return outs\n\n # Render validation view.\n renders_by_width = {}\n for width in set(widths):\n logging.info('rendering at width %d', width)\n hwf_clip_r = scene.scale_intrinsics(width)\n cam2world = scene.pose_spherical(30., -45., 4.)\n rays = scene.camera_ray_batch(cam2world, *hwf_clip_r)\n outs = render_loop(rays)\n outs = [np.squeeze(x) for x in outs]\n renders_by_width[width] = outs\n\n return latest_checkpoint, renders_by_width\n\n def run_eval(self,\n experiment_dir,\n rng,\n step=None,\n work_units=None,\n model_names_r=None,\n widths_r=None):\n \"\"\"Evaluate models in experiment_dir for R-Precision.\"\"\"\n logging.info('Local devices: %s', jax.local_devices())\n logging.info('All devices: %s', jax.devices())\n\n config = log.load_config_json(os.path.join(experiment_dir, '1'))\n logging.info('Config: %s', config)\n\n # Load retrieval models.\n if not model_names_r:\n model_names_r = config.retrieve_models\n models_r = [\n helpers.load_image_text_model(name)\n for name in tqdm.tqdm(model_names_r, desc='loading retrieval models')\n ]\n if not widths_r:\n widths_r = config.retrieve_widths\n\n print('model_names_r', model_names_r)\n print('widths_r', widths_r)\n\n # Encode retrieval set text descriptions.\n z_clip_rs = [] # text encodings of queries with all retrieval models\n # shape: [n_models, n_queries, d_model for specific model]\n if config.queries_r:\n for _, encode_text, _, tokenize_fn in tqdm.tqdm(\n models_r, desc='embedding queries with retrieval models'):\n z_clip_r = encode_text(tokenize_fn(config.queries_r))\n z_clip_rs.append(z_clip_r)\n\n # JIT rendering.\n kwargs_test = dict(rng=None, sigma_noise_std=0.)\n config_test_hq = ml_collections.ConfigDict(config)\n config_test_hq.update(config.test_hq)\n _, render_rays = helpers.init_nerf_model(rng.advance(1), config)\n\n @functools.partial(jax.pmap, in_axes=(0, None, None))\n def render_test_hq_p(rays, variables, origin):\n return render_rays(\n rays=rays,\n variables=variables,\n config=config_test_hq,\n sc=1.,\n mask_rad=config_test_hq.mr1,\n origin=origin,\n **kwargs_test)[0]\n\n # Render\n if work_units is None:\n work_units = gfile.listdir(experiment_dir)\n work_units = [int(wu) for wu in work_units if wu.isnumeric()]\n work_units.sort()\n work_unit_queries = []\n work_unit_configs = []\n n_wu = len(work_units)\n # create resolution -> n_wu -> 4ximg mapping\n all_renders_by_width = collections.defaultdict(list)\n for work_unit in tqdm.tqdm(work_units, 'Rendering all work units'):\n # Load query used to generate this object\n work_unit_dir = os.path.join(experiment_dir, str(work_unit))\n wu_config = log.load_config_json(work_unit_dir)\n work_unit_configs.append(wu_config)\n work_unit_queries.append(wu_config.query) # not templated\n\n # Render the object\n _, renders = self.render_from_checkpoint(work_unit_dir, widths_r,\n render_test_hq_p, step)\n for width, render in renders.items():\n all_renders_by_width[width].append(render)\n\n print('all_renders_by_width keys', list(all_renders_by_width.keys()))\n\n def aggregate(raw):\n raw = onp.array(raw).astype(onp.float)\n return {\n 'mean': onp.mean(raw),\n 'sem': stats.sem(raw),\n 'raw': raw,\n }\n\n metrics = {\n 'renders_by_width': jax.tree_map(onp.array, dict(all_renders_by_width)),\n 'work_unit_configs': work_unit_configs,\n 'work_unit_queries': work_unit_queries,\n }\n\n ## Embed images with all retrieval models\n pbar = tqdm.tqdm(\n zip(model_names_r, widths_r, z_clip_rs, models_r),\n desc='Embedding renderings',\n total=len(model_names_r))\n for model_name, width, z_text, (encode_image, _, preprocess, _) in pbar:\n renders = all_renders_by_width[width]\n rgbs = np.array([rgb for rgb, _, _, _ in renders])\n print('about to encode rgbs with shape', rgbs.shape)\n print(' model_name', model_name)\n print(' width', width)\n z_est = encode_image(preprocess(rgbs))\n\n assert z_est.shape[0] == n_wu\n assert z_text.shape[0] == len(config.queries_r)\n cosine_sim = (z_est[:, None] * z_text[None]).sum(-1) # [n_wu, queries_r]\n idx_true = np.array(\n [config.queries_r.index(query) for query in work_unit_queries])\n cosine_sim_true = np.take_along_axis(\n cosine_sim, idx_true[:, None], axis=1).squeeze(1)\n log_prob = nn.log_softmax(cosine_sim, axis=1) # normalize over captions\n log_likelihood = np.take_along_axis(\n log_prob, idx_true[:, None], axis=1).squeeze(1)\n correct = np.argmax(cosine_sim, axis=1) == idx_true\n metrics[model_name] = {\n 'val/retrieve_cosine_sim': aggregate(cosine_sim_true),\n 'val/retrieve_loss': aggregate(-log_likelihood),\n 'val/retrieve_acc': aggregate(correct),\n }\n\n metrics_path = os.path.join(experiment_dir, 'metrics.npy')\n with gfile.GFile(metrics_path, 'wb') as f:\n logging.info('Writing metrics to %s', metrics_path)\n onp.save(f, metrics)\n\n for k, v in metrics.items():\n if k not in ('renders_by_width', 'work_unit_configs'):\n logging.info('Metric %s: %s', k, v)\n\n return metrics\n\n\ndef run_train(*, config, experiment_dir, work_unit_dir,\n rng):\n for _ in DreamField(config).run_train(\n experiment_dir=experiment_dir,\n work_unit_dir=work_unit_dir,\n rng=rng,\n yield_results=False):\n pass\n\n\ndef run_eval(*,\n experiment_dir,\n rng,\n step = None,\n model_names_r=None,\n widths_r=None):\n return DreamField(None).run_eval(\n experiment_dir=experiment_dir,\n rng=rng,\n step=step,\n model_names_r=model_names_r,\n widths_r=widths_r)\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Collection of model functions implementing different multihop variants.\"\"\"\n\nfrom language.labs.drkit import model_fns as model_utils\nfrom language.labs.drkit import search_utils\nimport tensorflow.compat.v1 as tf\n\nfrom tensorflow.contrib import layers as contrib_layers\n\nDEFAULT_VALUE = -10000.\n\n\ndef follow_mention(batch_entities,\n relation_st_qry,\n relation_en_qry,\n entity_word_ids,\n entity_word_masks,\n ent2ment_ind,\n ent2ment_val,\n ment2ent_map,\n word_emb_table,\n word_weights,\n mips_search_fn,\n tf_db,\n hidden_size,\n mips_config,\n qa_config,\n is_training,\n ensure_index=None):\n \"\"\"Sparse implementation of the relation follow operation.\n\n Args:\n batch_entities: [batch_size, num_entities] SparseTensor of incoming entities\n and their scores.\n relation_st_qry: [batch_size, dim] Tensor representating start query vectors\n for dense retrieval.\n relation_en_qry: [batch_size, dim] Tensor representating end query vectors\n for dense retrieval.\n entity_word_ids: [num_entities, max_entity_len] Tensor holding word ids of\n each entity.\n entity_word_masks: [num_entities, max_entity_len] Tensor with masks into\n word ids above.\n ent2ment_ind: [num_entities, num_mentions] RaggedTensor mapping entities to\n mention indices which co-occur with them.\n ent2ment_val: [num_entities, num_mentions] RaggedTensor mapping entities to\n mention scores which co-occur with them.\n ment2ent_map: [num_mentions] Tensor mapping mentions to their entities.\n word_emb_table: [vocab_size, dim] Tensor of word embedddings. (?)\n word_weights: [vocab_size, 1] Tensor of word weights. (?)\n mips_search_fn: Function which accepts a dense query vector and returns the\n top-k indices closest to it (from the tf_db).\n tf_db: [num_mentions, 2 * dim] Tensor of mention representations.\n hidden_size: Scalar dimension of word embeddings.\n mips_config: MIPSConfig object.\n qa_config: QAConfig object.\n is_training: Boolean.\n ensure_index: [batch_size] Tensor of mention ids. Only needed if\n `is_training` is True. (? each example only one ensure entity?)\n\n Returns:\n ret_mentions_ids: [batch_size, k] Tensor of retrieved mention ids.\n ret_mentions_scs: [batch_size, k] Tensor of retrieved mention scores.\n ret_entities_ids: [batch_size, k] Tensor of retrieved entities ids.\n \"\"\"\n if qa_config.entity_score_threshold is not None:\n # Remove the entities which have scores lower than the threshold.\n mask = tf.greater(batch_entities.values, qa_config.entity_score_threshold)\n batch_entities = tf.sparse.retain(batch_entities, mask)\n batch_size = batch_entities.dense_shape[0] # number of the batches\n batch_ind = batch_entities.indices[:, 0] # the list of the batch ids\n entity_ind = batch_entities.indices[:, 1] # the list of the entity ids\n entity_scs = batch_entities.values # the list of the scores of each entity\n\n # Obtain BOW embeddings for the given set of entities.\n # [NNZ, dim] NNZ (number of non-zero entries) = len(entity_ind)\n batch_entity_emb = model_utils.entity_emb(entity_ind, entity_word_ids,\n entity_word_masks, word_emb_table,\n word_weights)\n batch_entity_emb = batch_entity_emb * tf.expand_dims(entity_scs, axis=1)\n # [batch_size, dim]\n uniq_batch_ind, uniq_idx = tf.unique(batch_ind)\n agg_emb = tf.unsorted_segment_sum(batch_entity_emb, uniq_idx,\n tf.shape(uniq_batch_ind)[0])\n batch_bow_emb = tf.scatter_nd(\n tf.expand_dims(uniq_batch_ind, 1), agg_emb,\n tf.stack([batch_size, hidden_size], axis=0))\n batch_bow_emb.set_shape([None, hidden_size])\n if qa_config.projection_dim is not None:\n with tf.variable_scope(\"projection\"):\n batch_bow_emb = contrib_layers.fully_connected(\n batch_bow_emb,\n qa_config.projection_dim,\n activation_fn=tf.nn.tanh,\n reuse=tf.AUTO_REUSE,\n scope=\"bow_projection\")\n # Each instance in a batch has onely one vector as embedding.\n\n # Ragged sparse search.\n # (num_batch x num_entities) * (num_entities x num_mentions)\n # [batch_size x num_mentions] sparse\n sp_mention_vec = model_utils.sparse_ragged_mul(\n batch_entities,\n ent2ment_ind,\n ent2ment_val,\n batch_size,\n mips_config.num_mentions,\n qa_config.sparse_reduce_fn, # max or sum\n threshold=qa_config.entity_score_threshold,\n fix_values_to_one=qa_config.fix_sparse_to_one)\n if is_training and qa_config.ensure_answer_sparse:\n ensure_indices = tf.stack([tf.range(batch_size), ensure_index], axis=-1)\n sp_ensure_vec = tf.SparseTensor(\n tf.cast(ensure_indices, tf.int64),\n tf.ones([batch_size]),\n dense_shape=[batch_size, mips_config.num_mentions])\n sp_mention_vec = tf.sparse.add(sp_mention_vec, sp_ensure_vec)\n sp_mention_vec = tf.SparseTensor(\n indices=sp_mention_vec.indices,\n values=tf.minimum(1., sp_mention_vec.values),\n dense_shape=sp_mention_vec.dense_shape)\n\n # Dense scam search.\n # [batch_size, 2 * dim]\n # Constuct query embeddings (dual encoder: [subject; relation]).\n scam_qrys = tf.concat(\n [batch_bow_emb + relation_st_qry, batch_bow_emb + relation_en_qry],\n axis=1)\n with tf.device(\"/cpu:0\"):\n # [batch_size, num_neighbors]\n _, ret_mention_ids = mips_search_fn(scam_qrys)\n if is_training and qa_config.ensure_answer_dense:\n ret_mention_ids = model_utils.ensure_values_in_mat(\n ret_mention_ids, ensure_index, tf.int32)\n # [batch_size, num_neighbors, 2 * dim]\n ret_mention_emb = tf.gather(tf_db, ret_mention_ids)\n\n if qa_config.l2_normalize_db:\n ret_mention_emb = tf.nn.l2_normalize(ret_mention_emb, axis=2)\n # [batch_size, 1, num_neighbors]\n ret_mention_scs = tf.matmul(\n tf.expand_dims(scam_qrys, 1), ret_mention_emb, transpose_b=True)\n # [batch_size, num_neighbors]\n ret_mention_scs = tf.squeeze(ret_mention_scs, 1)\n # [batch_size, num_mentions] sparse\n dense_mention_vec = model_utils.convert_search_to_vector(\n ret_mention_scs, ret_mention_ids, tf.cast(batch_size, tf.int32),\n mips_config.num_neighbors, mips_config.num_mentions)\n\n # Combine sparse and dense search.\n if (is_training and qa_config.train_with_sparse) or (\n (not is_training) and qa_config.predict_with_sparse):\n # [batch_size, num_mentions] sparse\n if qa_config.sparse_strategy == \"dense_first\":\n ret_mention_vec = model_utils.sp_sp_matmul(dense_mention_vec,\n sp_mention_vec)\n elif qa_config.sparse_strategy == \"sparse_first\":\n with tf.device(\"/cpu:0\"):\n ret_mention_vec = model_utils.rescore_sparse(sp_mention_vec, tf_db,\n scam_qrys)\n else:\n raise ValueError(\"Unrecognized sparse_strategy %s\" %\n qa_config.sparse_strategy)\n else:\n # [batch_size, num_mentions] sparse\n ret_mention_vec = dense_mention_vec\n\n # Get entity scores and ids.\n # [batch_size, num_entities] sparse\n entity_indices = tf.cast(\n tf.gather(ment2ent_map, ret_mention_vec.indices[:, 1]), tf.int64)\n ret_entity_vec = tf.SparseTensor(\n indices=tf.concat(\n [ret_mention_vec.indices[:, 0:1],\n tf.expand_dims(entity_indices, 1)],\n axis=1),\n values=ret_mention_vec.values,\n dense_shape=[batch_size, qa_config.num_entities])\n\n return ret_entity_vec, ret_mention_vec, dense_mention_vec, sp_mention_vec\n\n\ndef maxscale_spare_tensor(sp_tensor):\n \"\"\"Scales the sparse tensor with its maximum per row.\"\"\"\n sp_tensor_maxmiums = tf.sparse.reduce_max(sp_tensor, 1) # batch_size\n gather_sp_tensor_maxmiums = tf.gather(sp_tensor_maxmiums,\n sp_tensor.indices[:, 0:1])\n gather_sp_tensor_maxmiums = tf.reshape(gather_sp_tensor_maxmiums,\n tf.shape(sp_tensor.values))\n scaled_val = sp_tensor.values / gather_sp_tensor_maxmiums\n scaled_sp_tensor = tf.SparseTensor(sp_tensor.indices, scaled_val,\n sp_tensor.dense_shape)\n return scaled_sp_tensor\n\n\ndef follow_fact(\n batch_facts,\n relation_st_qry,\n relation_en_qry,\n fact2fact_ind,\n fact2fact_val,\n fact2ent_ind,\n fact2ent_val,\n fact_mips_search_fn,\n tf_fact_db,\n fact_mips_config,\n qa_config,\n is_training,\n hop_id=0,\n is_printing=True,\n):\n \"\"\"Sparse implementation of the relation follow operation.\n\n Args:\n batch_facts: [batch_size, num_facts] SparseTensor of incoming facts and\n their scores.\n relation_st_qry: [batch_size, dim] Tensor representating start query vectors\n for dense retrieval.\n relation_en_qry: [batch_size, dim] Tensor representating end query vectors\n for dense retrieval.\n fact2fact_ind: [num_facts, num_facts] RaggedTensor mapping facts to entity\n indices which co-occur with them.\n fact2fact_val: [num_facts, num_facts] RaggedTensor mapping facts to entity\n scores which co-occur with them.\n fact2ent_ind: [num_facts, num_entities] RaggedTensor mapping facts to entity\n indices which co-occur with them.\n fact2ent_val: [num_facts, num_entities] RaggedTensor mapping facts to entity\n scores which co-occur with them.\n fact_mips_search_fn: Function which accepts a dense query vector and returns\n the top-k indices closest to it (from the tf_fact_db).\n tf_fact_db: [num_facts, 2 * dim] Tensor of fact representations.\n fact_mips_config: MIPS Config object.\n qa_config: QAConfig object.\n is_training: Boolean.\n hop_id: int, the current hop id.\n is_printing: if print results for debugging.\n\n Returns:\n ret_entities: [batch_size, num_entities] Tensor of retrieved entities.\n ret_facts: [batch_size, num_facts] Tensor of retrieved facts.\n dense_fact_vec: [batch_size, num_facts] Tensor of retrieved facts (dense).\n sp_fact_vec: [batch_size, num_facts] Tensor of retrieved facts (sparse).\n \"\"\"\n num_facts = fact_mips_config.num_facts\n batch_size = batch_facts.dense_shape[0] # number of examples in a batch\n example_ind = batch_facts.indices[:, 0] # the list of the example ids\n fact_ind = batch_facts.indices[:, 1] # the list of the fact ids\n fact_scs = batch_facts.values # the list of the scores of each fact\n uniq_original_example_ind, uniq_local_example_idx = tf.unique(example_ind)\n # uniq_original_example_ind: local to original example id\n # uniq_local_example_idx: a list of local example id\n # tf.shape(uniq_original_example_ind)[0] = num_examples\n if qa_config.fact_score_threshold is not None:\n # Remove the facts which have scores lower than the threshold.\n mask = tf.greater(batch_facts.values, qa_config.fact_score_threshold)\n batch_facts = tf.sparse.retain(batch_facts, mask)\n # Sparse: Ragged sparse search from the current facts to the next facts.\n # (num_batch x num_facts) X (num_facts x num_facts)\n # [batch_size x num_facts] sparse\n if hop_id > 0:\n sp_fact_vec = model_utils.sparse_ragged_mul(\n batch_facts,\n fact2fact_ind,\n fact2fact_val,\n batch_size,\n num_facts,\n \"sum\", # Note: check this.\n threshold=None,\n fix_values_to_one=True)\n # Note: find a better way for this.\n mask = tf.greater(sp_fact_vec.values, 3) # 1/0.2 = 5\n sp_fact_vec = tf.sparse.retain(sp_fact_vec, mask)\n else:\n # For the first hop, then we use the init fact itself.\n # Because the sparse retieval is already done from the question.\n sp_fact_vec = batch_facts\n\n # Note: Remove the previous hop's facts\n # Note: Limit the number of fact followers.\n\n # Dense: Aggregate the facts in each batch as a single fact embedding vector.\n fact_embs = tf.gather(tf_fact_db, fact_ind) # len(fact_ind) X 2dim\n # Note: check, does mean make sense?\n # sum if it was softmaxed\n # mean..\n del fact_scs # Not used for now.\n # fact_embs = fact_embs * tf.expand_dims(fact_scs, axis=1) #batch_fact.values\n ### Start of debugging w/ tf.Print ###\n if is_printing:\n fact_embs = tf.compat.v1.Print(\n input_=fact_embs,\n data=[tf.shape(batch_facts.indices)[0], batch_facts.indices],\n message=\"\\n\\n###\\n batch_facts.indices and total #facts at hop %d \\n\" %\n hop_id,\n first_n=10,\n summarize=50)\n fact_embs = tf.compat.v1.Print(\n input_=fact_embs,\n data=[\n batch_facts.values,\n ],\n message=\"batch_facts.values at hop %d \\n\" % hop_id,\n first_n=10,\n summarize=25)\n fact_embs = tf.compat.v1.Print(\n input_=fact_embs,\n data=[tf.shape(sp_fact_vec.indices)[0], sp_fact_vec.indices],\n message=\"\\n Sparse Fact Results @ hop %d \\n\" % hop_id +\n \" sp_fact_vec.indices at hop %d \\n\" % hop_id,\n first_n=10,\n summarize=50)\n fact_embs = tf.compat.v1.Print(\n input_=fact_embs,\n data=[\n sp_fact_vec.values,\n ],\n message=\"sp_fact_vec.values at hop %d \\n\" % hop_id,\n first_n=10,\n summarize=25)\n ### End of debugging w/ tf.Print ###\n\n agg_emb = tf.math.unsorted_segment_mean(\n fact_embs, uniq_local_example_idx,\n tf.shape(uniq_original_example_ind)[0])\n batch_fact_emb = tf.scatter_nd(\n tf.expand_dims(uniq_original_example_ind, 1), agg_emb,\n tf.stack([batch_size, 2 * qa_config.projection_dim], axis=0))\n # Each instance in a batch has onely one vector as the overall fact emb.\n batch_fact_emb.set_shape([None, 2 * qa_config.projection_dim])\n\n # Note: Normalize the embeddings if they are not from SoftMax.\n # batch_fact_emb = tf.nn.l2_normalize(batch_fact_emb, axis=1)\n\n # Dense scam search.\n # [batch_size, 2 * dim]\n # Note: reform query embeddings.\n scam_qrys = batch_fact_emb + tf.concat([relation_st_qry, relation_en_qry],\n axis=1)\n with tf.device(\"/cpu:0\"):\n # [batch_size, num_neighbors]\n _, ret_fact_ids = fact_mips_search_fn(scam_qrys)\n # [batch_size, num_neighbors, 2 * dim]\n ret_fact_emb = tf.gather(tf_fact_db, ret_fact_ids)\n\n if qa_config.l2_normalize_db:\n ret_fact_emb = tf.nn.l2_normalize(ret_fact_emb, axis=2)\n # [batch_size, 1, num_neighbors]\n # The score of a fact is its innder product with qry.\n ret_fact_scs = tf.matmul(\n tf.expand_dims(scam_qrys, 1), ret_fact_emb, transpose_b=True)\n # [batch_size, num_neighbors]\n ret_fact_scs = tf.squeeze(ret_fact_scs, 1)\n # [batch_size, num_facts] sparse\n dense_fact_vec = model_utils.convert_search_to_vector(\n ret_fact_scs, ret_fact_ids, tf.cast(batch_size, tf.int32),\n fact_mips_config.num_neighbors, fact_mips_config.num_facts)\n\n # Combine sparse and dense search.\n if (is_training and qa_config.train_with_sparse) or (\n (not is_training) and qa_config.predict_with_sparse):\n # [batch_size, num_mentions] sparse\n if qa_config.sparse_strategy == \"dense_first\":\n ret_fact_vec = model_utils.sp_sp_matmul(dense_fact_vec, sp_fact_vec)\n elif qa_config.sparse_strategy == \"sparse_first\":\n with tf.device(\"/cpu:0\"):\n ret_fact_vec = model_utils.rescore_sparse(sp_fact_vec, tf_fact_db,\n scam_qrys)\n else:\n raise ValueError(\"Unrecognized sparse_strategy %s\" %\n qa_config.sparse_strategy)\n else:\n # [batch_size, num_facts] sparse\n ret_fact_vec = dense_fact_vec\n\n # # Scaling facts with SoftMax.\n ret_fact_vec = tf.sparse.reorder(ret_fact_vec)\n # max_ip_scores = tf.reduce_max(ret_fact_vec.values)\n # min_ip_scores = tf.reduce_min(ret_fact_vec.values)\n # range_ip_scores = max_ip_scores - min_ip_scores\n # scaled_values = (ret_fact_vec.values - min_ip_scores) / range_ip_scores\n scaled_facts = tf.SparseTensor(\n indices=ret_fact_vec.indices,\n values=ret_fact_vec.values / tf.reduce_max(ret_fact_vec.values),\n dense_shape=ret_fact_vec.dense_shape)\n # ret_fact_vec_sf = tf.sparse.softmax(scaled_facts)\n ret_fact_vec_sf = scaled_facts\n\n # Remove the facts which have scores lower than the threshold.\n mask = tf.greater(ret_fact_vec_sf.values, 0.5) # Must larger than max/5\n ret_fact_vec_sf_fitered = tf.sparse.retain(ret_fact_vec_sf, mask)\n\n # Note: add a soft way to score (all) the entities based on the facts.\n # Note: maybe use the pre-computed (tf-idf) similarity score here. e2e\n # Retrieve entities before Fact-SoftMaxing\n ret_entities_nosc = model_utils.sparse_ragged_mul(\n ret_fact_vec_sf, # Use the non-filtered scores of the retrieved facts.\n fact2ent_ind,\n fact2ent_val,\n batch_size,\n qa_config.num_entities,\n \"sum\",\n threshold=qa_config.fact_score_threshold,\n fix_values_to_one=True)\n\n ret_entities = tf.SparseTensor(\n indices=ret_entities_nosc.indices,\n values=ret_entities_nosc.values / tf.reduce_max(ret_entities_nosc.values),\n dense_shape=ret_entities_nosc.dense_shape)\n\n ### Start of debugging w/ tf.Print ###\n if is_printing:\n tmp_vals = ret_entities.values\n\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[tf.shape(ret_fact_vec.indices)[0], ret_fact_vec.indices],\n message=\"\\n\\n-rescored- ret_fact_vec.indices at hop %d \\n\" % hop_id,\n first_n=10,\n summarize=51)\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n ret_fact_vec.values,\n ],\n message=\"-rescored- ret_fact_vec.values at hop %d \\n\" % hop_id,\n first_n=10,\n summarize=25)\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n ret_fact_vec_sf.values,\n ],\n message=\"ret_fact_vec_sf.values at hop %d \\n\" % hop_id,\n first_n=10,\n summarize=25)\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n tf.shape(ret_fact_vec_sf_fitered.values),\n ret_fact_vec_sf_fitered.values,\n ],\n message=\"ret_fact_vec_sf_fitered.values at hop %d \\n\" % hop_id,\n first_n=10,\n summarize=25)\n ret_entities = tf.SparseTensor(\n indices=ret_entities.indices,\n values=tmp_vals,\n dense_shape=ret_entities.dense_shape)\n ### End of debugging w/ tf.Print ###\n\n return ret_entities, ret_fact_vec_sf_fitered, None, None\n\n\ndef multi_hop_fact(qry_input_ids,\n qry_input_mask,\n qry_entity_ids,\n entity_ids,\n entity_mask,\n ent2fact_ind,\n ent2fact_val,\n fact2ent_ind,\n fact2ent_val,\n fact2fact_ind,\n fact2fact_val,\n is_training,\n use_one_hot_embeddings,\n bert_config,\n qa_config,\n fact_mips_config,\n num_hops,\n exclude_set=None,\n is_printing=True):\n \"\"\"Multi-hops of propagation from input to output facts.\n\n Args:\n qry_input_ids:\n qry_input_mask:\n qry_entity_ids:\n entity_ids: (entity_word_ids) [num_entities, max_entity_len] Tensor holding\n word ids of each entity.\n entity_mask: (entity_word_masks) [num_entities, max_entity_len] Tensor with\n masks into word ids above.\n ent2fact_ind:\n ent2fact_val:\n fact2ent_ind:\n fact2ent_val:\n fact2fact_ind:\n fact2fact_val:\n is_training:\n use_one_hot_embeddings:\n bert_config:\n qa_config:\n fact_mips_config:\n num_hops:\n exclude_set:\n is_printing:\n\n Returns:\n layer_entities:\n layer_facts:\n layer_dense:\n layer_sp:\n batch_entities_nosc:\n qry_seq_emb:\n \"\"\"\n del entity_ids, entity_mask, exclude_set # Not used for now.\n # MIPS search for facts. Build fact feature Database\n with tf.device(\"/cpu:0\"):\n tf_fact_db, fact_mips_search_fn = search_utils.create_mips_searcher(\n fact_mips_config.ckpt_var_name,\n # [fact_mips_config.num_facts, fact_mips_config.emb_size],\n fact_mips_config.ckpt_path,\n fact_mips_config.num_neighbors,\n local_var_name=\"scam_init_barrier_fact\")\n\n # for question BOW embedding\n with tf.variable_scope(\"qry/bow\"):\n # trainable word weights over the BERT vocab for all query embeddings.\n word_weights = tf.get_variable(\n \"word_weights\", [bert_config.vocab_size, 1],\n dtype=tf.float32,\n initializer=tf.ones_initializer())\n qry_seq_emb, word_emb_table = model_utils.shared_qry_encoder_v2(\n qry_input_ids, qry_input_mask, is_training, use_one_hot_embeddings,\n bert_config, qa_config)\n\n del word_weights, word_emb_table # Not used for now.\n\n batch_size = tf.shape(qry_input_ids)[0]\n # Get question entities w/o scores.\n batch_qry_entities = tf.SparseTensor(\n indices=tf.concat([\n qry_entity_ids.indices[:, 0:1],\n tf.cast(tf.expand_dims(qry_entity_ids.values, 1), tf.int64)\n ],\n axis=1),\n values=tf.ones_like(qry_entity_ids.values, dtype=tf.float32),\n dense_shape=[batch_size, qa_config.num_entities])\n # Prepare initial facts.\n initial_facts = model_utils.sparse_ragged_mul(\n batch_qry_entities,\n ent2fact_ind,\n ent2fact_val,\n batch_size,\n fact_mips_config.num_facts,\n \"sum\", # max or sum\n threshold=None,\n fix_values_to_one=True)\n\n # Note: set a hyper parameter in qa.config\n # Note: can we do top k here for sparse tensor?\n # Limit the number of init facts such that we won't have too many facts.\n\n # mask = tf.greater(initial_facts.values, 1) # >= 2 qry concepts\n # initial_facts = tf.sparse.retain(initial_facts, mask)\n\n scaled_initial_facts = maxscale_spare_tensor(initial_facts)\n mask_thresold = tf.greater(scaled_initial_facts.values, 0.25)\n final_initial_facts = tf.sparse.retain(scaled_initial_facts, mask_thresold)\n\n if is_printing:\n tmp_vals = final_initial_facts.values\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n tf.shape(initial_facts.indices),\n initial_facts.values,\n ],\n message=\"-\" * 100 + \"\\n\\n ## Initial Facts (at hop 0):\\n\"\n \"shape(initial_facts), initial_facts.values,\",\n first_n=10,\n summarize=52)\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n tf.shape(scaled_initial_facts.indices),\n scaled_initial_facts.values,\n ],\n message=\"shape(scaled_initial_facts), scaled_initial_facts.values,\",\n first_n=10,\n summarize=52)\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n tf.shape(final_initial_facts.indices),\n final_initial_facts.values,\n ],\n message=\"shape(final_initial_facts), final_initial_facts.values,\",\n first_n=10,\n summarize=52)\n\n final_initial_facts = tf.SparseTensor(final_initial_facts.indices, tmp_vals,\n final_initial_facts.dense_shape)\n layer_facts, layer_entities = [], []\n layer_dense, layer_sp = [], []\n batch_facts = final_initial_facts\n for hop in range(num_hops):\n with tf.name_scope(\"hop_%d\" % hop):\n # The question start/end embeddings for each hop.\n qry_start_emb, qry_end_emb = model_utils.layer_qry_encoder(\n qry_seq_emb,\n qry_input_ids,\n qry_input_mask,\n is_training,\n bert_config,\n qa_config,\n suffix=\"_%d\" % hop,\n project_dim=qa_config.projection_dim) # project=True\n ret_entities, ret_facts, _, _ = follow_fact(\n batch_facts, qry_start_emb, qry_end_emb, fact2fact_ind, fact2fact_val,\n fact2ent_ind, fact2ent_val, fact_mips_search_fn, tf_fact_db,\n fact_mips_config, qa_config, is_training, hop, is_printing)\n batch_facts = ret_facts # Update to next hop.\n # Update results.\n layer_facts.append(ret_facts)\n layer_entities.append(ret_entities)\n\n tf.logging.info(\"len layer_facts: %d\", len(layer_facts))\n tf.logging.info(\"len layer_entities: %d\", len(layer_entities))\n return (layer_entities, layer_facts, layer_dense, layer_sp,\n batch_qry_entities, initial_facts, qry_seq_emb)\n\n\ndef multi_hop_mention(qry_input_ids,\n qry_input_mask,\n qry_entity_ids,\n entity_ids,\n entity_mask,\n ent2ment_ind,\n ent2ment_val,\n ment2ent_map,\n is_training,\n use_one_hot_embeddings,\n bert_config,\n qa_config,\n mips_config,\n num_hops,\n exclude_set=None,\n bridge_mentions=None,\n answer_mentions=None): # answer mentions?\n \"\"\"Multi-hops of propagation from input to output entities.\n\n Args:\n qry_input_ids:\n qry_input_mask:\n qry_entity_ids:\n entity_ids: (entity_word_ids) [num_entities, max_entity_len] Tensor holding\n word ids of each entity.\n entity_mask: (entity_word_masks) [num_entities, max_entity_len] Tensor with\n masks into word ids above.\n ent2ment_ind:\n ent2ment_val:\n ment2ent_map:\n is_training:\n use_one_hot_embeddings:\n bert_config:\n qa_config:\n mips_config:\n num_hops:\n exclude_set:\n bridge_mentions:\n answer_mentions:\n\n Returns:\n layer_entities:\n layer_mentions:\n layer_dense:\n layer_sp:\n batch_entities_nosc:\n qry_seq_emb:\n \"\"\"\n # for question BOW embedding\n with tf.variable_scope(\"qry/bow\"):\n # Note: trainable word weights over the BERT vocab for query\n word_weights = tf.get_variable(\n \"word_weights\", [bert_config.vocab_size, 1],\n dtype=tf.float32,\n initializer=tf.ones_initializer())\n # Note: we can use the [CLS] token here?\n qry_seq_emb, word_emb_table = model_utils.shared_qry_encoder_v2(\n qry_input_ids, qry_input_mask, is_training, use_one_hot_embeddings,\n bert_config, qa_config)\n\n batch_size = tf.shape(qry_input_ids)[0]\n # Multiple entities per question. We need to re-score.\n with tf.name_scope(\"entity_linking\"):\n batch_entity_emb = model_utils.entity_emb(\n tf.cast(qry_entity_ids.values, tf.int64), entity_ids, entity_mask,\n word_emb_table, word_weights) # question entity embeddings.\n # Embed query into start and end vectors for dense retrieval for a hop.\n qry_el_emb, _ = model_utils.layer_qry_encoder( # question embeddings\n qry_seq_emb,\n qry_input_ids,\n qry_input_mask,\n is_training,\n bert_config,\n qa_config,\n suffix=\"_el\",\n project=False)\n batch_qry_el_emb = tf.gather(qry_el_emb, qry_entity_ids.indices[:, 0])\n batch_entity_el_scs = tf.reduce_sum(batch_qry_el_emb * batch_entity_emb, -1)\n batch_entities_nosc = tf.SparseTensor(\n # Note: double check this.\n indices=tf.concat([\n qry_entity_ids.indices[:, 0:1],\n tf.cast(tf.expand_dims(qry_entity_ids.values, 1), tf.int64)\n ],\n axis=1),\n values=batch_entity_el_scs,\n dense_shape=[batch_size, qa_config.num_entities])\n batch_entities = tf.sparse.softmax(tf.sparse.reorder(batch_entities_nosc))\n\n ensure_mentions = bridge_mentions # Note: check \"supporoting facts\"\n\n with tf.device(\"/cpu:0\"):\n # MIPS search for mentions. Mention Feature Database\n tf_db, mips_search_fn = search_utils.create_mips_searcher(\n mips_config.ckpt_var_name,\n # [mips_config.num_mentions, mips_config.emb_size],\n mips_config.ckpt_path,\n mips_config.num_neighbors,\n local_var_name=\"scam_init_barrier\")\n layer_mentions, layer_entities = [], []\n layer_dense, layer_sp = [], []\n for hop in range(num_hops):\n with tf.name_scope(\"hop_%d\" % hop):\n # Note: the question start/end embeddings for each hop?\n qry_start_emb, qry_end_emb = model_utils.layer_qry_encoder(\n qry_seq_emb,\n qry_input_ids,\n qry_input_mask,\n is_training,\n bert_config,\n qa_config,\n suffix=\"_%d\" % hop) # project=True\n\n (ret_entities, ret_mentions,\n dense_mention_vec, sp_mention_vec) = follow_mention(\n batch_entities, qry_start_emb, qry_end_emb, entity_ids, entity_mask,\n ent2ment_ind, ent2ment_val, ment2ent_map, word_emb_table,\n word_weights, mips_search_fn, tf_db, bert_config.hidden_size,\n mips_config, qa_config, is_training, ensure_mentions)\n # Note: check this. Shouldn't for wrong choices.\n if exclude_set:\n # batch_ind = tf.expand_dims(tf.range(batch_size), 1)\n exclude_indices = tf.concat([\n tf.cast(exclude_set.indices[:, 0:1], tf.int64),\n tf.cast(tf.expand_dims(exclude_set.values, 1), tf.int64)\n ],\n axis=1)\n ret_entities = model_utils.remove_from_sparse(ret_entities,\n exclude_indices)\n ret_entities = tf.sparse.reorder(ret_entities)\n scaled_entities = tf.SparseTensor(\n indices=ret_entities.indices,\n values=ret_entities.values / qa_config.softmax_temperature,\n dense_shape=ret_entities.dense_shape)\n batch_entities = tf.sparse.softmax(scaled_entities) # entities updated.\n\n ### Start of debugging w/ tf.Print ###\n tmp_vals = batch_entities.values\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n ret_entities.indices,\n ],\n message=\"ret_entities.indices at hop %d \\n\" % hop,\n first_n=10,\n summarize=50)\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n ret_entities.values,\n ],\n message=\"ret_entities.values at hop %d \\n\" % hop,\n first_n=10,\n summarize=25)\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n batch_entities.indices,\n ],\n message=\"scaled_entities.indices at hop %d \\n\" % hop,\n first_n=10,\n summarize=50)\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n batch_entities.values,\n ],\n message=\"scaled_entities.values at hop %d \\n\" % hop,\n first_n=10,\n summarize=25)\n batch_entities = tf.SparseTensor(\n indices=batch_entities.indices,\n values=tmp_vals,\n dense_shape=batch_entities.dense_shape)\n ### End of debugging w/ tf.Print ###\n\n ensure_mentions = answer_mentions # Note: seems not helpful now?\n layer_mentions.append(ret_mentions)\n layer_entities.append(ret_entities) # Note that this is not sfed.\n layer_dense.append(dense_mention_vec)\n layer_sp.append(sp_mention_vec)\n\n return (layer_entities, layer_mentions, layer_dense, layer_sp,\n batch_entities_nosc, qry_seq_emb)\n\n\ndef create_drfact_model(bert_config,\n qa_config,\n fact_mips_config,\n is_training,\n features,\n ent2fact_ind,\n ent2fact_val,\n fact2ent_ind,\n fact2ent_val,\n fact2fact_ind,\n fact2fact_val,\n entity_ids,\n entity_mask,\n use_one_hot_embeddings,\n summary_obj,\n num_hops=2,\n num_preds=100):\n \"\"\"Creates a classification model wrapper of the DrFact model.\"\"\"\n qas_ids = features[\"qas_ids\"] # question ids\n qry_input_ids = features[\"qry_input_ids\"] # question text token ids\n qry_input_mask = features[\"qry_input_mask\"] # question text masks (for bert)\n batch_size = tf.shape(qry_input_ids)[0]\n qry_entity_ids = features[\"qry_entity_id\"] # VarLenFeature\n tf.logging.info(\"type(qry_entity_ids): %s\", type(qry_entity_ids))\n\n answer_entities = None\n exclude_set_ids = None\n if is_training:\n answer_entities = features[\"answer_entities\"]\n tf.logging.info(\"type(answer_entities): %s\", type(answer_entities))\n tf.logging.info(\"type(answer_entities.indices): %s\",\n type(answer_entities.indices))\n tf.logging.info(\"answer_entities.indices.shpae: %s\",\n answer_entities.indices.shape)\n answer_index = tf.SparseTensor(\n indices=tf.concat([\n answer_entities.indices[:, 0:1],\n tf.cast(tf.expand_dims(answer_entities.values, 1), tf.int64)\n ],\n axis=1),\n values=tf.ones_like(answer_entities.values, dtype=tf.float32),\n dense_shape=[batch_size, qa_config.num_entities])\n # Make sparse version of exclude concepts.\n num_ents = qa_config.num_entities\n\n (layer_entities, layer_facts, _, _, qry_ents, _,\n qry_seq_emb) = multi_hop_fact(\n qry_input_ids,\n qry_input_mask,\n qry_entity_ids,\n entity_ids,\n entity_mask,\n ent2fact_ind,\n ent2fact_val,\n fact2ent_ind,\n fact2ent_val,\n fact2fact_ind,\n fact2fact_val,\n is_training,\n use_one_hot_embeddings,\n bert_config,\n qa_config,\n fact_mips_config,\n num_hops=num_hops,\n exclude_set=exclude_set_ids,\n )\n\n # Compute weights for each layer.\n with tf.name_scope(\"classifier\"):\n qry_emb, _ = model_utils.layer_qry_encoder(\n qry_seq_emb,\n qry_input_ids,\n qry_input_mask,\n is_training,\n bert_config,\n qa_config,\n suffix=\"_cl\",\n project_dim=qa_config.projection_dim)\n # Ideally, higher weights on k-th layer for a k-hop question\n output_weights = tf.get_variable(\n \"cl_weights\", [qa_config.projection_dim,\n len(layer_entities)],\n initializer=tf.truncated_normal_initializer(stddev=0.02))\n output_bias = tf.get_variable(\n \"cl_bias\", [len(layer_entities)], initializer=tf.zeros_initializer())\n logits = tf.matmul(qry_emb, output_weights)\n logits = tf.nn.bias_add(logits, output_bias)\n probabilities = tf.nn.softmax(logits, axis=-1)\n\n # if is_training:\n # nrows = qa_config.train_batch_size\n # else:\n # nrows = qa_config.predict_batch_size\n\n # def _to_ragged(sp_tensor):\n # r_ind = tf.RaggedTensor.from_value_rowids(\n # value_rowids=sp_tensor.indices[:, 0],\n # values=sp_tensor.indices[:, 1],\n # nrows=nrows)\n # r_val = tf.RaggedTensor.from_value_rowids(\n # value_rowids=sp_tensor.indices[:, 0],\n # values=sp_tensor.values,\n # nrows=nrows)\n # return r_ind, r_val\n\n def _layer_softmax(entities, hop_id=0):\n uniq_entity_ids, uniq_entity_scs = model_utils.aggregate_sparse_indices(\n entities.indices, entities.values, entities.dense_shape,\n qa_config.entity_score_aggregation_fn)\n # uniq_entity_scs /= 2.0 # Note: softmax_temperature\n logits = tf.SparseTensor(uniq_entity_ids, uniq_entity_scs,\n entities.dense_shape)\n logits_sf = tf.sparse.softmax(tf.sparse.reorder(logits))\n ### Start Debugging w/ Print ###\n tmp_vals = logits_sf.values\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n tf.shape(logits.indices)[0],\n logits.indices,\n ],\n message=\"\\n # Layer Entity SoftMax %d \\n logits.indices\" % hop_id,\n first_n=10,\n summarize=27)\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n tf.shape(logits.values)[0],\n logits.values,\n ],\n message=\"\\n logits.values\",\n first_n=10,\n summarize=25)\n tmp_vals = tf.compat.v1.Print(\n input_=tmp_vals,\n data=[\n tf.shape(logits_sf.values)[0],\n logits_sf.values,\n ],\n message=\"\\n logits_sf.values # End of Entity SoftMax #\\n\",\n first_n=10,\n summarize=25)\n logits_sf = tf.SparseTensor(logits_sf.indices, tmp_vals,\n logits_sf.dense_shape)\n ### End Debugging w/ Print ###\n return logits_sf\n\n predictions = {\"qas_ids\": qas_ids}\n\n layer_entities_weighted = []\n for i, layer_entity in enumerate(layer_entities):\n # ent_ind, ent_val = _to_ragged(layer_entity)\n # probabilities is the predicted weights of the layer\n layer_entity_sf = _layer_softmax(layer_entity, hop_id=i)\n layer_entities_weighted.append(\n model_utils.batch_multiply(layer_entity_sf, probabilities[:, i]))\n layer_entity_sf_dense = tf.sparse.to_dense(\n layer_entity_sf, default_value=DEFAULT_VALUE, validate_indices=False)\n layer_entity_sf_val, layer_entity_sf_ind = tf.nn.top_k(\n layer_entity_sf_dense, k=100, sorted=True)\n predictions.update({\n \"layer_%d_ent\" % i: layer_entity_sf_ind,\n \"layer_%d_scs\" % i: layer_entity_sf_val,\n })\n\n probs = layer_entities_weighted[0]\n tf.logging.info(\"layer_entities_weighted: %d\", len(layer_entities_weighted))\n for i in range(1, len(layer_entities_weighted)):\n probs = tf.sparse.add(probs, layer_entities_weighted[i])\n probs_dense = tf.sparse.to_dense(\n probs, default_value=DEFAULT_VALUE, validate_indices=False)\n answer_preds = tf.argmax(probs_dense, axis=1)\n top_ent_vals, top_ent_idx = tf.nn.top_k(probs_dense, k=num_preds, sorted=True)\n\n for hop_id, current_facts in enumerate(layer_facts):\n current_facts_dense = tf.sparse.to_dense(\n current_facts, default_value=DEFAULT_VALUE, validate_indices=False)\n current_fact_vals, current_facts_idx = tf.nn.top_k(\n current_facts_dense, k=100, sorted=True)\n predictions.update({\n \"layer_%d_fact_ids\" % hop_id: current_facts_idx,\n \"layer_%d_fact_scs\" % hop_id: current_fact_vals\n })\n\n qry_ents_dense = tf.sparse.to_dense(\n qry_ents, default_value=DEFAULT_VALUE, validate_indices=False)\n qry_ent_vals, qry_ent_idx = tf.nn.top_k(qry_ents_dense, k=100, sorted=True)\n predictions.update({\"qry_ents\": qry_ent_idx, \"qry_ent_scores\": qry_ent_vals})\n\n total_loss = None\n if is_training:\n # Note: check if this loss function is suitable for multiple answers\n sp_loss = model_utils.compute_loss_from_sptensors(probs, answer_index)\n total_loss = tf.reduce_sum(sp_loss.values) / tf.cast(batch_size, tf.float32)\n\n # Note: convert probs&ans_index to dense and compute_loss()\n # dense_answer_index = tf.sparse.to_dense(\n # answer_index, default_value=DEFAULT_VALUE, validate_indices=False)\n # dense_loss = compute_loss(probs_dense, dense_answer_index)\n\n if summary_obj is not None: # Note: Where is this?\n num_answers_ret = tf.shape(sp_loss.values)[0]\n for i in range(len(layer_entities)):\n num_ents = tf.cast(tf.shape(layer_entities[i].indices)[0],\n tf.float32) / tf.cast(batch_size, tf.float32)\n summary_obj.scalar(\"train/layer_weight_%d\" % i,\n tf.reduce_mean(probabilities[:, i], keepdims=True))\n summary_obj.scalar(\"train/num_entities_%d\" % i,\n tf.expand_dims(num_ents, 0))\n summary_obj.scalar(\"train/total_loss\", tf.expand_dims(total_loss, 0))\n summary_obj.scalar(\"train/ans_in_ret\", tf.expand_dims(num_answers_ret, 0))\n summary_obj.scalar(\"train/total_prob_mass\",\n tf.reduce_sum(probs.values, keepdims=True))\n\n # Update the entity-related prediction information.\n predictions.update({\n \"layer_probs\": probabilities,\n \"top_vals\": top_ent_vals,\n \"top_idx\": top_ent_idx,\n \"predictions\": answer_preds,\n })\n\n return total_loss, predictions\n\n\ndef create_drkit_model(bert_config,\n qa_config,\n mips_config,\n is_training,\n features,\n ent2ment_ind,\n ent2ment_val,\n ment2ent_map,\n entity_ids,\n entity_mask,\n use_one_hot_embeddings,\n summary_obj,\n num_hops=2,\n num_preds=100,\n is_excluding=False):\n \"\"\"Creates a classification model.\"\"\"\n qas_ids = features[\"qas_ids\"]\n qry_input_ids = features[\"qry_input_ids\"]\n qry_input_mask = features[\"qry_input_mask\"]\n batch_size = tf.shape(qry_input_ids)[0]\n qry_entity_ids = features[\"qry_entity_id\"]\n tf.logging.info(\"type(qry_entity_ids): %s\", type(qry_entity_ids))\n\n answer_entities = None\n exclude_set_ids = None\n if is_training:\n answer_entities = features[\"answer_entities\"]\n answer_index = tf.SparseTensor(\n indices=tf.concat([\n answer_entities.indices[:, 0:1],\n tf.cast(tf.expand_dims(answer_entities.values, 1), tf.int64)\n ],\n axis=1),\n values=tf.ones_like(answer_entities.values, dtype=tf.float32),\n dense_shape=[batch_size, qa_config.num_entities])\n # Make sparse version of exclude concepts.\n num_ents = qa_config.num_entities\n # Only when it is training.\n if is_excluding:\n exclude_set_ids = features[\"exclude_set\"]\n tf.logging.info(\"type(exclude_set_ids): %s\", type(exclude_set_ids))\n\n layer_entities, layer_mentions, _, _, el, qry_seq_emb = multi_hop_mention(\n qry_input_ids,\n qry_input_mask,\n qry_entity_ids,\n entity_ids,\n entity_mask,\n ent2ment_ind,\n ent2ment_val,\n ment2ent_map,\n is_training,\n use_one_hot_embeddings,\n bert_config,\n qa_config,\n mips_config,\n num_hops=num_hops,\n exclude_set=exclude_set_ids)\n # The first layer is the query concepts.\n layer_entities = [el] + layer_entities\n\n # Compute weights for each layer.\n with tf.name_scope(\"classifier\"):\n qry_emb, _ = model_utils.layer_qry_encoder(\n qry_seq_emb,\n qry_input_ids,\n qry_input_mask,\n is_training,\n bert_config,\n qa_config,\n suffix=\"_cl\")\n # Ideally, higher weights on k-th layer for a k-hop question\n # Note: can we make answer-aware hop weighting?\n output_weights = tf.get_variable(\n \"cl_weights\", [qa_config.projection_dim,\n len(layer_entities)],\n initializer=tf.truncated_normal_initializer(stddev=0.02))\n output_bias = tf.get_variable(\n \"cl_bias\", [len(layer_entities)], initializer=tf.zeros_initializer())\n logits = tf.matmul(qry_emb, output_weights)\n logits = tf.nn.bias_add(logits, output_bias)\n probabilities = tf.nn.softmax(logits, axis=-1)\n\n if is_training:\n nrows = qa_config.train_batch_size\n else:\n nrows = qa_config.predict_batch_size\n\n def _to_ragged(sp_tensor):\n r_ind = tf.RaggedTensor.from_value_rowids(\n value_rowids=sp_tensor.indices[:, 0],\n values=sp_tensor.indices[:, 1],\n nrows=nrows)\n r_val = tf.RaggedTensor.from_value_rowids(\n value_rowids=sp_tensor.indices[:, 0],\n values=sp_tensor.values,\n nrows=nrows)\n return r_ind, r_val\n\n def _layer_softmax(entities):\n uniq_entity_ids, uniq_entity_scs = model_utils.aggregate_sparse_indices(\n entities.indices, entities.values, entities.dense_shape,\n qa_config.entity_score_aggregation_fn)\n uniq_entity_scs /= qa_config.softmax_temperature\n logits = tf.SparseTensor(uniq_entity_ids, uniq_entity_scs,\n entities.dense_shape)\n return tf.sparse.softmax(tf.sparse.reorder(logits))\n\n predictions = {\"qas_ids\": qas_ids}\n\n layer_preds = []\n for i, layer_mention in enumerate(layer_mentions):\n layer_preds.append(\n tf.argmax(\n tf.sparse.to_dense(\n layer_mention,\n default_value=DEFAULT_VALUE,\n validate_indices=False),\n axis=1))\n men_ind, men_val = _to_ragged(layer_mention)\n predictions.update({\n \"layer_%d_men\" % i: men_ind.to_tensor(default_value=-1),\n \"layer_%d_mscs\" % i: men_val.to_tensor(default_value=-1),\n })\n\n layer_entities_weighted = []\n for i, layer_entity in enumerate(layer_entities):\n ent_ind, ent_val = _to_ragged(layer_entity)\n predictions.update({\n \"layer_%d_ent\" % i: ent_ind.to_tensor(default_value=-1),\n \"layer_%d_scs\" % i: ent_val.to_tensor(default_value=-1),\n })\n layer_entities_weighted.append(\n model_utils.batch_multiply(\n _layer_softmax(layer_entity), probabilities[:, i]))\n\n probs = tf.sparse.add(layer_entities_weighted[0], layer_entities_weighted[1])\n for i in range(2, len(layer_entities_weighted)):\n probs = tf.sparse.add(probs, layer_entities_weighted[i])\n\n probs_dense = tf.sparse.to_dense(\n probs, default_value=DEFAULT_VALUE, validate_indices=False)\n answer_preds = tf.argmax(probs_dense, axis=1)\n top_vals, top_idx = tf.nn.top_k(probs_dense, k=num_preds, sorted=True)\n\n total_loss = None\n if is_training:\n # Note: check if this loss function is suitable for multiple answers\n # Note: convert probs&ans_index to dense and compute_loss()\n sp_loss = model_utils.compute_loss_from_sptensors(probs, answer_index)\n total_loss = tf.reduce_sum(sp_loss.values) / tf.cast(batch_size, tf.float32)\n num_answers_ret = tf.shape(sp_loss.values)[0]\n if summary_obj is not None:\n for i in range(len(layer_entities)):\n num_ents = tf.cast(tf.shape(layer_entities[i].indices)[0],\n tf.float32) / tf.cast(batch_size, tf.float32)\n summary_obj.scalar(\"train/layer_weight_%d\" % i,\n tf.reduce_mean(probabilities[:, i], keepdims=True))\n summary_obj.scalar(\"train/num_entities_%d\" % i,\n tf.expand_dims(num_ents, 0))\n summary_obj.scalar(\"train/total_loss\", tf.expand_dims(total_loss, 0))\n summary_obj.scalar(\"train/ans_in_ret\", tf.expand_dims(num_answers_ret, 0))\n summary_obj.scalar(\"train/total_prob_mass\",\n tf.reduce_sum(probs.values, keepdims=True))\n\n predictions.update({\n \"layer_probs\": probabilities,\n \"top_vals\": top_vals,\n \"top_idx\": top_idx,\n \"predictions\": answer_preds,\n \"layer_predictions\": tf.stack(layer_preds, axis=1),\n })\n\n return total_loss, predictions\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Input functions used in dual encoder SMITH model.\"\"\"\n\n\nfrom absl import flags\nfrom tensorflow.compat.v1 import estimator as tf_estimator\nimport tensorflow.compat.v1 as tf # tf\nfrom smith import constants\nFLAGS = flags.FLAGS\n\n\ndef input_fn_builder(input_files,\n is_training,\n drop_remainder,\n max_seq_length=32,\n max_predictions_per_seq=5,\n num_cpu_threads=4,\n batch_size=16,\n is_prediction=False):\n \"\"\"Creates an `input_fn` closure to be passed to TPUEstimator.\"\"\"\n\n def input_fn(params): # pylint: disable=unused-argument\n \"\"\"The actual input function.\"\"\"\n name_to_features = {\n \"input_ids_1\": tf.FixedLenFeature([max_seq_length], tf.int64),\n \"input_mask_1\": tf.FixedLenFeature([max_seq_length], tf.int64),\n \"input_ids_2\": tf.FixedLenFeature([max_seq_length], tf.int64),\n \"input_mask_2\": tf.FixedLenFeature([max_seq_length], tf.int64),\n \"documents_match_labels\": tf.FixedLenFeature([1], tf.float32, 0)\n }\n if (FLAGS.train_mode == constants.TRAIN_MODE_PRETRAIN or\n FLAGS.train_mode == constants.TRAIN_MODE_JOINT_TRAIN):\n # Add some features related to word masked LM losses.\n name_to_features[\"masked_lm_positions_1\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.int64)\n name_to_features[\"masked_lm_ids_1\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.int64)\n name_to_features[\"masked_lm_weights_1\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.float32)\n name_to_features[\"masked_lm_positions_2\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.int64)\n name_to_features[\"masked_lm_ids_2\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.int64)\n name_to_features[\"masked_lm_weights_2\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.float32)\n\n # For training, we want a lot of parallel reading and shuffling.\n # For eval, we want no shuffling and parallel reading doesn't matter.\n if is_training:\n file_list = tf.data.Dataset.list_files(tf.constant(input_files))\n file_list = file_list.shuffle(buffer_size=len(input_files))\n # `cycle_length` is the number of parallel files that get read.\n cycle_length = min(num_cpu_threads, len(input_files))\n # `sloppy` mode means that the interleaving is not exact. This adds\n # even more randomness to the training pipeline.\n d = file_list.apply(\n tf.data.experimental.parallel_interleave(\n tf.data.TFRecordDataset,\n sloppy=is_training,\n cycle_length=cycle_length))\n d = d.repeat()\n d = d.shuffle(buffer_size=100)\n\n else:\n d = tf.data.TFRecordDataset(tf.constant(input_files))\n # In contrast to TPU training/evaluation, the input_fn for prediction\n # should raise an end-of-input exception (OutOfRangeError or\n # StopIteration), which serves as the stopping signal to TPUEstimator.\n # Thus during model prediction, the data can not be repeated forever.\n # Refer to\n # https://www.tensorflow.org/api_docs/python/tf/compat/v1/estimator/tpu/TPUEstimator#predict\n if not is_prediction:\n # Since we evaluate for a fixed number of steps we don't want to\n # encounter out-of-range exceptions.\n d = d.repeat()\n\n # We must `drop_remainder` on training because the TPU requires fixed\n # size dimensions. For eval, we assume we are evaling on the CPU or GPU\n # and we *don\"t* want to drop the remainder, otherwise we won't cover\n # every sample.\n d = d.apply(\n tf.data.experimental.map_and_batch(\n lambda record: _decode_record(record, name_to_features),\n batch_size=batch_size,\n num_parallel_batches=num_cpu_threads,\n drop_remainder=drop_remainder))\n return d\n\n return input_fn\n\n\ndef _decode_record(record, name_to_features):\n \"\"\"Decodes a record to a TensorFlow example.\"\"\"\n example = tf.parse_single_example(record, name_to_features)\n # tf.Example only supports tf.int64, but the TPU only supports tf.int32.\n # So cast all int64 to int32.\n example[\"input_ids_1\"] = tf.cast(example[\"input_ids_1\"], tf.int32)\n example[\"input_ids_2\"] = tf.cast(example[\"input_ids_2\"], tf.int32)\n example[\"documents_match_labels\"] = tf.cast(example[\"documents_match_labels\"],\n tf.float32)\n example[\"input_mask_1\"] = tf.cast(example[\"input_mask_1\"], tf.int32)\n example[\"input_mask_2\"] = tf.cast(example[\"input_mask_2\"], tf.int32)\n if (FLAGS.train_mode == constants.TRAIN_MODE_PRETRAIN or\n FLAGS.train_mode == constants.TRAIN_MODE_JOINT_TRAIN):\n example[\"masked_lm_ids_1\"] = tf.cast(example[\"masked_lm_ids_1\"], tf.int32)\n example[\"masked_lm_ids_2\"] = tf.cast(example[\"masked_lm_ids_2\"], tf.int32)\n example[\"masked_lm_weights_1\"] = tf.cast(example[\"masked_lm_weights_1\"],\n tf.float32)\n example[\"masked_lm_weights_2\"] = tf.cast(example[\"masked_lm_weights_2\"],\n tf.float32)\n example[\"masked_lm_positions_1\"] = tf.cast(example[\"masked_lm_positions_1\"],\n tf.int32)\n example[\"masked_lm_positions_2\"] = tf.cast(example[\"masked_lm_positions_2\"],\n tf.int32)\n return example\n\n\ndef make_serving_input_example_fn(max_seq_length=32, max_predictions_per_seq=5):\n \"\"\"Returns an Estimator input_fn for serving the model.\n\n Args:\n max_seq_length: The max input sequence length.\n max_predictions_per_seq: The max number of masked words per sequence.\n\n Returns:\n An Estimator input_fn for serving the model.\n \"\"\"\n\n def _serving_input_fn():\n \"\"\"An input_fn that expects a serialized tf.Example.\"\"\"\n\n serialized_example = tf.placeholder(\n dtype=tf.string, shape=[None], name=\"examples\")\n receiver_tensors = {\"examples\": serialized_example}\n name_to_features = {\n \"input_ids_1\": tf.FixedLenFeature([max_seq_length], tf.int64),\n \"input_mask_1\": tf.FixedLenFeature([max_seq_length], tf.int64),\n \"input_ids_2\": tf.FixedLenFeature([max_seq_length], tf.int64),\n \"input_mask_2\": tf.FixedLenFeature([max_seq_length], tf.int64),\n \"documents_match_labels\": tf.FixedLenFeature([1], tf.float32, 0)\n }\n if (FLAGS.train_mode == constants.TRAIN_MODE_PRETRAIN or\n FLAGS.train_mode == constants.TRAIN_MODE_JOINT_TRAIN):\n # This is to support model export during model pretraining or\n # joint-training process.\n name_to_features[\"masked_lm_positions_1\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.int64)\n name_to_features[\"masked_lm_ids_1\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.int64)\n name_to_features[\"masked_lm_weights_1\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.float32)\n name_to_features[\"masked_lm_positions_2\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.int64)\n name_to_features[\"masked_lm_ids_2\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.int64)\n name_to_features[\"masked_lm_weights_2\"] = tf.FixedLenFeature(\n [max_predictions_per_seq], tf.float32)\n\n parsed_features = tf.parse_example(serialized_example, name_to_features)\n # As tf.Example only supports tf.int64, but the TPU only supports\n # tf.int32, we need to cast all int64 to int32.\n parsed_features[\"input_ids_1\"] = tf.cast(parsed_features[\"input_ids_1\"],\n tf.int32)\n parsed_features[\"input_ids_2\"] = tf.cast(parsed_features[\"input_ids_2\"],\n tf.int32)\n parsed_features[\"documents_match_labels\"] = tf.cast(\n parsed_features[\"documents_match_labels\"], tf.float32)\n parsed_features[\"input_mask_1\"] = tf.cast(parsed_features[\"input_mask_1\"],\n tf.int32)\n parsed_features[\"input_mask_2\"] = tf.cast(parsed_features[\"input_mask_2\"],\n tf.int32)\n if (FLAGS.train_mode == constants.TRAIN_MODE_PRETRAIN or\n FLAGS.train_mode == constants.TRAIN_MODE_JOINT_TRAIN):\n parsed_features[\"masked_lm_ids_1\"] = tf.cast(\n parsed_features[\"masked_lm_ids_1\"], tf.int32)\n parsed_features[\"masked_lm_ids_2\"] = tf.cast(\n parsed_features[\"masked_lm_ids_2\"], tf.int32)\n parsed_features[\"masked_lm_weights_1\"] = tf.cast(\n parsed_features[\"masked_lm_weights_1\"], tf.float32)\n parsed_features[\"masked_lm_weights_2\"] = tf.cast(\n parsed_features[\"masked_lm_weights_2\"], tf.float32)\n parsed_features[\"masked_lm_positions_1\"] = tf.cast(\n parsed_features[\"masked_lm_positions_1\"], tf.int32)\n parsed_features[\"masked_lm_positions_2\"] = tf.cast(\n parsed_features[\"masked_lm_positions_2\"], tf.int32)\n return tf_estimator.export.ServingInputReceiver(\n features=parsed_features, receiver_tensors=receiver_tensors)\n\n return _serving_input_fn\n", "# coding=utf-8\n# Copyright 2022 The Google Research Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Common utility functions for cost model.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport base64\nfrom typing import Any, Optional, Text, TypeVar\n\nimport numpy as np\nimport tensorflow.compat.v1 as tf\n\nfrom tunas import cost_model_data\nfrom tunas import schema\n\n\ndef get_mask(oneof):\n if len(oneof.choices) > 1 and oneof.mask is None:\n raise ValueError('OneOf must have a mask if it has more than one choice: {}'\n .format(oneof))\n return oneof.mask\n\n\n_T = TypeVar('_T')\n\n\ndef kron(x, y):\n \"\"\"TF version of tensor product between two vectors (similar to np.kron).\"\"\"\n return tf.reshape(tf.expand_dims(x, 1) * tf.expand_dims(y, 0), [-1])\n\n\ndef estimate_cost(features, ssd):\n \"\"\"Generate a TensorFlow subgraph to estimate the cost of an architecture.\n\n Args:\n features: A 1D float tensor containing features for a single network\n architecture.\n ssd: The name of the search space definition to use for the cost model.\n\n Returns:\n A scalar float tensor containing the estimated cost for the specified\n network architecture\n \"\"\"\n kernel_data = cost_model_data.KERNEL_DATA[ssd]\n kernel_data = base64.decodebytes(kernel_data)\n kernel = np.frombuffer(kernel_data, cost_model_data.SERIALIZATION_DTYPE)\n kernel = kernel.reshape([-1, 1]).astype(np.float32)\n\n bias_data = cost_model_data.BIAS_DATA[ssd]\n bias_data = base64.decodebytes(bias_data)\n bias = np.frombuffer(bias_data, cost_model_data.SERIALIZATION_DTYPE)\n bias = bias.reshape([1]).astype(np.float32)\n\n with tf.name_scope('estimate_cost'):\n batch_features = tf.expand_dims(features, axis=0)\n batch_prediction = tf.linalg.matmul(batch_features, kernel)\n batch_prediction = tf.nn.bias_add(batch_prediction, bias)\n return tf.squeeze(batch_prediction, axis=[0, 1])\n" ]
[ [ "tensorflow.constant", "tensorflow.gather_nd", "tensorflow.zeros", "tensorflow.reduce_mean", "tensorflow.shape", "tensorflow.minimum", "tensorflow.cast", "tensorflow.exp", "tensorflow.eye", "tensorflow.math.log", "tensorflow.expand_dims", "tensorflow.keras.optimizers.Adam", "tensorflow.argmax", "numpy.zeros", "tensorflow.reduce_logsumexp", "tensorflow.GradientTape" ], [ "numpy.maximum", "numpy.minimum", "numpy.mean", "numpy.array", "numpy.zeros", "numpy.random.randint" ], [ "matplotlib.pyplot.legend", "pandas.concat", "matplotlib.pyplot.tight_layout", "pandas.Series", "numpy.diag_indices", "numpy.triu_indices_from", "matplotlib.pyplot.subplots", "matplotlib.pyplot.draw", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.xlim", "numpy.zeros_like", "scipy.spatial.distance.pdist", "scipy.cluster.hierarchy.dendrogram", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.figure" ], [ "tensorflow.compat.v1.ones_like", "tensorflow.compat.v1.stack", "tensorflow.compat.v1.square", "tensorflow.compat.v1.concat", "tensorflow.compat.v1.nn.max_pool_v2", "tensorflow.compat.v1.reduce_mean", "tensorflow.compat.v1.abs", "tensorflow.compat.v1.convert_to_tensor", "tensorflow.compat.v1.get_collection", "tensorflow.compat.v1.stop_gradient", "tensorflow.compat.v1.zeros_like", "tensorflow.compat.v1.cast", "tensorflow.compat.v1.squeeze", "tensorflow.compat.v1.greater" ], [ "tensorflow.compat.v1.test.main" ], [ "tensorflow.compat.v1.nn.softmax_cross_entropy_with_logits_v2", "tensorflow.compat.v1.reshape", "tensorflow.compat.v1.reduce_sum", "tensorflow.compat.v1.nn.dynamic_rnn" ], [ "numpy.dot", "numpy.sqrt", "tensorflow.compat.v2.compat.v1.tpu.initialize_system", "tensorflow.compat.v2.TensorShape", "numpy.random.randint", "numpy.eye", "tensorflow.compat.v2.compat.v1.tpu.replicate", "tensorflow.compat.v2.reshape", "tensorflow.compat.v2.nn.l2_normalize", "tensorflow.compat.v2.Graph", "tensorflow.compat.v2.random.uniform", "numpy.zeros", "numpy.log", "tensorflow.compat.v2.test.main", "numpy.isnan", "tensorflow.compat.v2.one_hot", "tensorflow.compat.v2.constant", "numpy.array", "tensorflow.compat.v2.concat", "numpy.random.uniform", "tensorflow.compat.v2.keras.layers.Input" ], [ "tensorflow.keras.Input", "tensorflow.shape", "tensorflow.reduce_mean", "tensorflow.keras.layers.Dense", "tensorflow.reshape", "tensorflow.keras.Sequential", "tensorflow.keras.Model", "tensorflow.pad" ], [ "pandas.concat", "tensorflow.compat.v1.square", "numpy.sqrt", "numpy.random.choice", "numpy.squeeze", "sklearn.decomposition.FactorAnalysis", "numpy.linalg.eigh", "numpy.mean", "numpy.cov", "numpy.identity" ], [ "tensorflow.compat.v1.app.run", "tensorflow.contrib.training.HParams", "tensorflow.compat.v1.logging.set_verbosity" ], [ "tensorflow.compat.v1.reduce_mean", "tensorflow.compat.v1.zeros", "tensorflow.compat.v1.clip_by_value", "tensorflow.compat.v1.shape", "numpy.random.RandomState" ], [ "torch.enable_grad", "numpy.sqrt", "torch.sqrt", "torch.min", "torch.zeros_like", "torch.tensor", "torch.no_grad", "torch.cos" ], [ "tensorflow.convert_to_tensor", "tensorflow.train.CheckpointManager", "tensorflow.train.latest_checkpoint", "tensorflow.Variable", "numpy.random.choice", "tensorflow.reduce_mean", "tensorflow.keras.layers.Dense", "tensorflow.train.Checkpoint", "tensorflow.stop_gradient", "tensorflow.keras.losses.Huber", "tensorflow.keras.optimizers.Adam", "numpy.argmax", "numpy.random.rand", "tensorflow.one_hot", "tensorflow.argmax", "numpy.vstack", "tensorflow.GradientTape" ], [ "numpy.zeros" ], [ "tensorflow.keras.layers.Dense", "tensorflow.keras.layers.BatchNormalization", "tensorflow.nn.relu", "tensorflow.keras.layers.experimental.SyncBatchNormalization" ], [ "tensorflow.compat.v1.nn.l2_normalize", "tensorflow.compat.v1.stop_gradient" ], [ "numpy.random.seed", "matplotlib.pyplot.title", "tensorflow.io.gfile.GFile", "tensorflow.io.gfile.makedirs", "numpy.save", "matplotlib.pyplot.colorbar", "numpy.mean", "tensorflow.io.gfile.listdir", "numpy.prod", "scipy.stats.sem", "numpy.array", "matplotlib.pyplot.figure" ], [ "tensorflow.compat.v1.concat", "tensorflow.compat.v1.unique", "tensorflow.compat.v1.zeros_initializer", "tensorflow.compat.v1.compat.v1.Print", "tensorflow.compat.v1.shape", "tensorflow.compat.v1.RaggedTensor.from_value_rowids", "tensorflow.compat.v1.ones", "tensorflow.compat.v1.truncated_normal_initializer", "tensorflow.compat.v1.nn.top_k", "tensorflow.compat.v1.reduce_sum", "tensorflow.compat.v1.ones_initializer", "tensorflow.compat.v1.sparse.add", "tensorflow.compat.v1.variable_scope", "tensorflow.compat.v1.name_scope", "tensorflow.compat.v1.ones_like", "tensorflow.compat.v1.nn.softmax", "tensorflow.compat.v1.sparse.to_dense", "tensorflow.compat.v1.reduce_mean", "tensorflow.compat.v1.sparse.softmax", "tensorflow.compat.v1.sparse.retain", "tensorflow.compat.v1.SparseTensor", "tensorflow.compat.v1.minimum", "tensorflow.compat.v1.nn.l2_normalize", "tensorflow.compat.v1.cast", "tensorflow.compat.v1.reduce_max", "tensorflow.compat.v1.sparse.reduce_max", "tensorflow.compat.v1.stack", "tensorflow.compat.v1.device", "tensorflow.compat.v1.expand_dims", "tensorflow.compat.v1.argmax", "tensorflow.compat.v1.gather", "tensorflow.contrib.layers.fully_connected", "tensorflow.compat.v1.sparse.reorder", "tensorflow.compat.v1.matmul", "tensorflow.compat.v1.logging.info", "tensorflow.compat.v1.range", "tensorflow.compat.v1.nn.bias_add", "tensorflow.compat.v1.squeeze", "tensorflow.compat.v1.greater" ], [ "tensorflow.compat.v1.estimator.export.ServingInputReceiver", "tensorflow.compat.v1.data.experimental.parallel_interleave", "tensorflow.compat.v1.FixedLenFeature", "tensorflow.compat.v1.placeholder", "tensorflow.compat.v1.parse_single_example", "tensorflow.compat.v1.cast", "tensorflow.compat.v1.constant", "tensorflow.compat.v1.parse_example" ], [ "tensorflow.compat.v1.expand_dims", "tensorflow.compat.v1.linalg.matmul", "numpy.frombuffer", "tensorflow.compat.v1.nn.bias_add", "tensorflow.compat.v1.squeeze", "tensorflow.compat.v1.name_scope" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10" ] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "1.3", "0.19", "1.1", "1.5", "0.24", "0.20", "1.0", "0.25", "1.2" ], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "1.3", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "0.16", "1.8" ], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10" ] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "0.16", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "1.3", "1.8" ], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
bburan/cochlear
[ "1e7ea32730a794b9f6936440a32e4a82c4bf73e7" ]
[ "cochlear/noise_exposure.py" ]
[ "from __future__ import division\n\nimport logging\nlog = logging.getLogger(__name__)\n\nimport numpy as np\nfrom scipy import signal\n\nfrom traits.api import Instance, Float, Property, Int\nfrom traitsui.api import (View, Item, ToolBar, Action, ActionGroup, VGroup,\n HSplit, MenuBar, Menu, HGroup)\nfrom chaco.api import Plot, ArrayPlotData\nfrom enable.api import Component, ComponentEditor\nfrom pyface.api import ImageResource\n\nfrom experiment import (AbstractParadigm, Expression, AbstractData,\n AbstractController, AbstractExperiment, icon_dir)\nfrom experiment.channel import FileChannel\nfrom experiment.coroutine import blocked, rms\n\nfrom neurogen.block_definitions import (BandlimitedNoise, Cos2Envelope)\nfrom neurogen.calibration import InterpCalibration\nfrom neurogen.calibration.util import (psd, psd_freq, tone_power_conv_nf)\nfrom neurogen.util import db, dbtopa\n\nfrom cochlear.nidaqmx import (DAQmxDefaults, DAQmxChannel,\n ContinuousDAQmxPlayer, DAQmxAttenControl,\n ContinuousDAQmxSource)\n\nDAC_FS = 100e3\nADC_FS = 100e3\n\nclass NoiseExposureData(AbstractData):\n\n noise_channel = Instance('experiment.channel.Channel')\n\n def _noise_channel_default(self):\n return FileChannel(node=self.store_node, name='mic_input',\n expected_duration=60*60*2, dtype=np.float32)\n\n\nclass NoiseExposureParadigm(AbstractParadigm):\n\n kw = dict(context=True, log=True)\n center_frequency = \\\n Expression(6e3, label='Center frequency (Hz)', dtype=np.float, **kw)\n bandwidth = Expression(4e3, label='Bandwidth (Hz)', dtype=np.float, **kw)\n rs = Expression(85, label='Min. atten. in stop band (dB)',\n dtype=np.float, **kw)\n rp = Expression(0.3, label='Max. ripple in pass band (dB)',\n dtype=np.float, **kw)\n order = Expression(7, label='Filter order', dtype=np.float, **kw)\n\n level = Expression(100, label='Level (dB SPL)', dtype=np.float, **kw)\n seed = Expression(1, label='Noise seed', dtype=np.int, **kw)\n duration = Expression(60, label='Exposure duration (sec)',\n dtype=np.float, **kw)\n rise_time = Expression(0, label='Noise rise time (sec)',\n dtype=np.float, **kw)\n\n mic_sens = Float(2.7, label='Mic. sens. (mV/Pa)', dtype=np.float, **kw)\n mic_sens_dbv = Property(depends_on='mic_sens', dtype=np.float,\n label='Mic. sens. dB(V/Pa)', **kw)\n speaker_sens = Float(86.89, label='Speaker sens. (mV/Pa)', dtype=np.float,\n **kw)\n speaker_sens_dbv = Property(depends_on='speaker_sens', dtype=np.float,\n label='Speaker sens. dB(V/Pa)', **kw)\n\n def _get_mic_sens_dbv(self):\n return db(self.mic_sens*1e-3)\n\n def _get_speaker_sens_dbv(self):\n return db(self.speaker_sens*1e-3)\n\n traits_view = View(\n VGroup(\n VGroup(\n VGroup(\n 'center_frequency',\n 'bandwidth',\n 'rp',\n 'rs',\n 'order',\n label='Filter settings',\n show_border=True\n ),\n 'level',\n 'seed',\n 'duration',\n 'rise_time',\n label='Stimulus',\n show_border=True\n ),\n HGroup(\n VGroup('mic_sens', 'speaker_sens'),\n VGroup('mic_sens_dbv', 'speaker_sens_dbv', style='readonly'),\n label='Hardware settings',\n show_border=True\n ),\n )\n )\n\n\nclass NoiseExposureController(AbstractController, DAQmxDefaults):\n\n mic_cal = Instance('neurogen.calibration.InterpCalibration')\n poll_rate = 1\n\n def setup_experiment(self, info=None):\n # Set up the speaker output\n token = BandlimitedNoise(name='noise') >> Cos2Envelope(name='envelope')\n channel = DAQmxChannel(calibration=InterpCalibration.as_attenuation(),\n token=token, voltage_min=-10, voltage_max=10)\n iface_dac = ContinuousDAQmxPlayer(fs=DAC_FS, done_callback=self.stop)\n iface_dac.add_channel(channel, name='primary')\n\n # Set up the mic input\n adc_pipeline = blocked(int(ADC_FS*self.poll_rate), -1, self)\n iface_adc = ContinuousDAQmxSource(fs=ADC_FS, pipeline=adc_pipeline,\n callback_samples=25e3,\n input_line='/Dev1/ai1')\n\n # Save the results\n self.channel = channel\n self.iface_adc = iface_adc\n self.iface_dac = iface_dac\n self.token = token\n super(NoiseExposureController, self).setup_experiment(info)\n\n def send(self, data):\n self.model.update_plots(ADC_FS, data)\n self.model.data.noise_channel.send(data)\n\n def start_experiment(self, info=None):\n self.refresh_context(evaluate=True)\n self.iface_adc.start()\n self.iface_dac.play_continuous()\n self.log_trial()\n\n def stop_experiment(self, info=None):\n self.iface_adc.stop()\n self.iface_dac.stop()\n\n def set_duration(self, value):\n self.iface_dac.set_value('primary.envelope.duration', value)\n self.iface_dac.duration = value\n self.model.overall_rms_plot.index_range.high_setting = value\n\n def set_ramp_duration(self, value):\n self.iface_dac.set_value('primary.envelope.rise_time', value)\n self.iface_dac.duration = value\n\n def set_center_frequency(self, value):\n self.iface_dac.set_value('primary.noise.fc', value)\n\n def set_bandwidth(self, value):\n self.iface_dac.set_value('primary.noise.bandwidth', value)\n\n def set_level(self, value):\n self.iface_dac.set_value('primary.noise.level', value)\n\n def set_seed(self, value):\n self.iface_dac.set_value('primary.noise.seed', value)\n\n def set_rise_time(self, value):\n self.iface_dac.set_value('primary.envelope.rise_time', value)\n\n def set_order(self, value):\n self.iface_dac.set_value('primary.noise.order', value)\n\n def set_rs(self, value):\n self.iface_dac.set_value('primary.noise.rs', value)\n\n def set_rp(self, value):\n self.iface_dac.set_value('primary.noise.rp', value)\n\n def set_speaker_sens_dbv(self, value):\n self.channel.calibration = InterpCalibration([0, 100e3], [value, value])\n\n def set_mic_sens(self, value):\n level = self.get_current_value('level')\n max_value = dbtopa(level)*value*1e-3\n max_value_decade = 10**np.ceil(np.log10(max_value*2))*10\n self.iface_adc.expected_range = max_value_decade\n\n\nclass NoiseExposureExperiment(AbstractExperiment):\n\n paradigm = Instance(NoiseExposureParadigm, ())\n data = Instance(AbstractData, ())\n\n rms_data = Instance(ArrayPlotData)\n recent_rms_plot = Instance(Component)\n overall_rms_plot = Instance(Component)\n fft_plot = Instance(Component)\n\n current_time = Float(0)\n current_update = Int(0)\n\n current_spl = Float(np.nan, label='Current inst. output (dB SPL)')\n current_spl_average = Float(np.nan, label='Average of last min. (dB SPL)')\n overall_spl_average = Float(np.nan, label='Average output (dB SPL)')\n\n _coefs = None\n _zf = None\n\n def update_plots(self, fs, data):\n self.current_update += 1\n data = signal.detrend(data.ravel())\n\n # Plot RMS\n if self._coefs is None:\n self._coefs = signal.iirfilter(2, (400.0/(fs/2), 40e3/(fs/2)))\n b, a = self._coefs\n self._zf = signal.lfiltic(b, a, data[:len(a)-1], data[:len(b)-1])\n b, a = self._coefs\n\n data, self._zf = signal.lfilter(b, a, data, zi=self._zf)\n rms = np.mean(data**2)**0.5\n db_rms = db(rms)-self.paradigm.mic_sens_dbv-db(20e-6)\n self.append_data(time=self.current_time, rms=db_rms)\n self.current_time += len(data)/fs\n\n self.current_spl = db_rms\n self.current_spl_average = self.rms_data.get_data('rms')[-60:].mean()\n self.overall_spl_average = self.rms_data.get_data('rms').mean()\n\n w_frequency = psd_freq(data, fs)\n w_psd = psd(data, fs, 'hamming')\n w_psd_db = db(w_psd)-self.paradigm.mic_sens_dbv-db(20e-6)\n self.rms_data.update_data(frequency=w_frequency, psd=w_psd_db)\n\n def _rms_data_default(self):\n return ArrayPlotData(time=[], rms=[], frequency=[], psd=[])\n\n def append_data(self, **kwargs):\n for k, v in kwargs.items():\n kwargs[k] = np.append(self.rms_data.get_data(k), v)\n self.rms_data.update_data(**kwargs)\n\n def _overall_rms_plot_default(self):\n plot = Plot(self.rms_data)\n plot.index_range.low_setting = 0\n plot.plot(('time', 'rms'))\n return plot\n\n def _recent_rms_plot_default(self):\n plot = Plot(self.rms_data)\n plot.index_range.high_setting = 'auto'\n plot.index_range.low_setting = 'track'\n plot.index_range.tracking_amount = 30\n plot.value_range.high_setting = 'auto'\n plot.value_range.low_setting = 'track'\n plot.value_range.tracking_amount = 5\n plot.plot(('time', 'rms'))\n return plot\n\n def _fft_plot_default(self):\n plot = Plot(self.rms_data)\n plot.index_range.low_setting = 1e3\n plot.index_range.high_setting = 20e3\n plot.value_range.low_setting = 10\n plot.value_range.high_setting = 80\n plot.plot(('frequency', 'psd'))\n plot.index_scale = 'log'\n return plot\n\n traits_view = View(\n HSplit(\n VGroup(\n VGroup(\n Item('paradigm', style='custom', show_label=False,\n width=200),\n show_border=True,\n label='Settings',\n enabled_when=\"handler.state!='running'\",\n ),\n VGroup(\n 'current_spl',\n 'current_spl_average',\n 'overall_spl_average',\n style='readonly',\n show_border=True,\n label='Output',\n ),\n ),\n VGroup(\n HGroup(\n Item('overall_rms_plot',\n editor=ComponentEditor(width=200, height=200)),\n Item('recent_rms_plot',\n editor=ComponentEditor(width=200, height=200)),\n show_labels=False,\n ),\n Item('fft_plot', show_label=False,\n editor=ComponentEditor(width=200, height=200)),\n ),\n show_labels=False,\n ),\n resizable=True,\n toolbar=ToolBar(\n Action(name='Start', action='start',\n image=ImageResource('1rightarrow', icon_dir),\n enabled_when='handler.state==\"uninitialized\"'),\n Action(name='Stop', action='stop',\n image=ImageResource('stop', icon_dir),\n enabled_when='handler.state==\"running\"'),\n ),\n width=0.5,\n height=0.5,\n id='lbhb.NoiseExposureExperiment',\n )\n\n\ndef configure_logging(filename):\n time_format = '[%(asctime)s] :: %(name)s - %(levelname)s - %(message)s'\n simple_format = '%(name)s - %(message)s'\n\n logging_config = {\n 'version': 1,\n 'formatters': {\n 'time': {'format': time_format},\n 'simple': {'format': simple_format},\n },\n 'handlers': {\n # This is what gets printed out to the console\n 'console': {\n 'class': 'logging.StreamHandler',\n 'formatter': 'simple',\n 'level': 'DEBUG',\n },\n # This is what gets saved to the file\n 'file': {\n 'class': 'logging.FileHandler',\n 'formatter': 'time',\n 'filename': filename,\n 'level': 'DEBUG',\n }\n },\n 'loggers': {\n '__main__': {'level': 'ERROR'},\n 'cochlear': {'level': 'ERROR'},\n 'cochlear.nidaqmx': {'level': 'ERROR'},\n 'neurogen.block_definitions': {'level': 'DEBUG'},\n },\n 'root': {\n 'handlers': ['console', 'file'],\n },\n }\n logging.config.dictConfig(logging_config)\n\n\nif __name__ == '__main__':\n import logging.config\n import PyDAQmx as pyni\n import warnings\n import tables\n pyni.DAQmxResetDevice('Dev1')\n configure_logging('temp.log')\n log.debug('====================== MAIN =======================')\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n with tables.open_file('temp.hdf5', 'w') as fh:\n data = NoiseExposureData(store_node=fh.root)\n controller = NoiseExposureController()\n NoiseExposureExperiment(data=data) \\\n .configure_traits(handler=controller)\n" ]
[ [ "scipy.signal.lfilter", "numpy.log10", "numpy.mean", "scipy.signal.iirfilter" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "1.3", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "0.16", "1.8" ], "tensorflow": [] } ]
visinf/deblur-devil
[ "53cc4c72a4ddb9dcede5ee52dc53000c39ff5dab", "53cc4c72a4ddb9dcede5ee52dc53000c39ff5dab" ]
[ "contrib/cmap.py", "losses/classification_losses.py" ]
[ "# Author: Jochen Gast <[email protected]>\n\nimport numpy as np\nimport torch\nfrom matplotlib import cm\nfrom torch import nn\n\n# ----------------------------------------------------------------------------------------\n# See https://matplotlib.org/examples/color/colormaps_reference.html\n#\n# Typical choices are: 'gray', jet', 'viridis', 'hot'\n# ----------------------------------------------------------------------------------------\n\nCOLORMAPS = [\n\n # Perceptually Uniform Sequential\n 'viridis', 'plasma', 'inferno', 'magma',\n\n # Sequential\n 'Greys', 'Purples', 'Blues', 'Greens', 'Oranges', 'Reds',\n 'YlOrBr', 'YlOrRd', 'OrRd', 'PuRd', 'RdPu', 'BuPu',\n 'GnBu', 'PuBu', 'YlGnBu', 'PuBuGn', 'BuGn', 'YlGn',\n\n # Sequential (2)\n 'binary', 'gist_yarg', 'gist_gray', 'gray', 'bone', 'pink',\n 'spring', 'summer', 'autumn', 'winter', 'cool', 'Wistia',\n 'hot', 'afmhot', 'gist_heat', 'copper',\n\n # Diverging\n 'PiYG', 'PRGn', 'BrBG', 'PuOr', 'RdGy', 'RdBu',\n 'RdYlBu', 'RdYlGn', 'Spectral', 'coolwarm', 'bwr', 'seismic',\n\n # Qualitative,\n 'Pastel1', 'Pastel2', 'Paired', 'Accent',\n 'Dark2', 'Set1', 'Set2', 'Set3',\n 'tab10', 'tab20', 'tab20b', 'tab20c',\n\n # Miscellaneous\n 'flag', 'prism', 'ocean', 'gist_earth', 'terrain', 'gist_stern',\n 'gnuplot', 'gnuplot2', 'CMRmap', 'cubehelix', 'brg', 'hsv',\n 'gist_rainbow', 'rainbow', 'jet', 'nipy_spectral', 'gist_ncar'\n\n]\n\n\nclass ColorMap(nn.Module):\n #\n # Note: uint8 inputs are never normalized.\n # float inputs are normalized if normalize_floats=True\n #\n def __init__(self, cmap='jet', normalize_floats=True, output_dtype=torch.uint8):\n super().__init__()\n if cmap not in COLORMAPS:\n raise ValueError('Unknown colormap!')\n self.normalize_floats = normalize_floats\n self.cmap = torch.from_numpy(self.get_cmap_as_float_array(cmap)).view(-1, 3)\n if output_dtype == torch.uint8:\n self.cmap = (255 * self.cmap).byte()\n\n @staticmethod\n def get_cmap_as_float_array(cmap_name):\n raw_cmap = cm.get_cmap(cmap_name, 256)\n cmap_array = raw_cmap(np.arange(256))[:, 0:3] # remove alpha channels\n return cmap_array\n\n @staticmethod\n def min2d(tensor):\n b, c, h, w = tensor.size()\n return tensor.view(b, c, h * w).min(dim=2, keepdim=True)[0].unsqueeze(dim=3)\n\n @staticmethod\n def max2d(tensor):\n b, c, h, w = tensor.size()\n return tensor.view(b, c, h * w).max(dim=2, keepdim=True)[0].unsqueeze(dim=3)\n\n def forward(self, value):\n b, c, h, w = value.size()\n assert c == 1, 'ColorMap expects second dimension of size 1L'\n if not isinstance(value, torch.ByteTensor):\n if self.normalize_floats:\n cmin = self.min2d(value)\n cmax = self.max2d(value)\n normalized = (value - cmin) / torch.max(cmax - cmin, torch.ones_like(value) * 1e-5)\n normalized = (normalized * 255).long()\n else:\n normalized = (value * 255).long()\n else:\n normalized = value.long()\n self.cmap = self.cmap.to(value.device)\n z = torch.index_select(self.cmap, dim=0, index=normalized.view(-1))\n return z.transpose(0, 1).contiguous().view(b, 3, h, w)\n", "# Author: Jochen Gast <[email protected]>\r\n\r\nimport torch\r\nimport torch.nn as nn\r\n\r\nfrom losses import factory\r\n\r\n\r\nclass ClassificationLoss(nn.Module):\r\n def __init__(self, args, topk=(1, 2, 3), reduction='mean'):\r\n super().__init__()\r\n self.args = args\r\n self.cross_entropy = torch.nn.CrossEntropyLoss(reduction=reduction)\r\n self.topk = topk\r\n\r\n @staticmethod\r\n def accuracy(output, target, topk=(1,)):\r\n maxk = max(topk)\r\n batch_size = target.size(0)\r\n _, pred = output.topk(maxk, 1, True, True)\r\n pred = pred.t()\r\n correct = pred.eq(target.view(1, -1))\r\n res = []\r\n for k in topk:\r\n correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)\r\n res.append(correct_k.mul_(100.0 / batch_size))\r\n return res\r\n\r\n def forward(self, output_dict, target_dict):\r\n output = output_dict[\"output1\"]\r\n target = target_dict[\"target1\"]\r\n # compute actual losses\r\n cross_entropy = self.cross_entropy(output, target)\r\n # create dictonary for losses\r\n loss_dict = {\r\n \"xe\": cross_entropy,\r\n }\r\n acc_k = ClassificationLoss.accuracy(output, target, topk=self.topk)\r\n for acc, k in zip(acc_k, self.topk):\r\n loss_dict[\"top%i\" % k] = acc\r\n return loss_dict\r\n\r\n\r\nfactory.register(\"ClassificationLoss\", ClassificationLoss)\r\n" ]
[ [ "numpy.arange", "torch.ones_like", "matplotlib.cm.get_cmap" ], [ "torch.nn.CrossEntropyLoss" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
alshedivat/federated
[ "fe9f44a504bc51b603a3ab9a181148da0aa9612f", "fe9f44a504bc51b603a3ab9a181148da0aa9612f", "fe9f44a504bc51b603a3ab9a181148da0aa9612f", "fe9f44a504bc51b603a3ab9a181148da0aa9612f" ]
[ "optimization/main/federated_trainer.py", "gans/experiments/emnist/preprocessing/filtered_emnist_data_utils.py", "triehh/triehh_tf_test.py", "fedopt_guide/stackoverflow_transformer/centralized_main.py" ]
[ "# Copyright 2020, Google LLC.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Runs federated training on various tasks using a generalized form of FedAvg.\n\nSpecifically, we create (according to flags) an iterative processes that allows\nfor client and server learning rate schedules, as well as various client and\nserver optimization methods. For more details on the learning rate scheduling\nand optimization methods, see `shared/optimizer_utils.py`. For details on the\niterative process, see `shared/fed_avg_schedule.py`.\n\"\"\"\n\nimport collections\nimport os.path\nfrom typing import Callable\n\nfrom absl import app\nfrom absl import flags\nimport tensorflow as tf\nimport tensorflow_federated as tff\n\nfrom optimization.cifar100 import federated_cifar100\nfrom optimization.emnist import federated_emnist\nfrom optimization.emnist_ae import federated_emnist_ae\nfrom optimization.shakespeare import federated_shakespeare\nfrom optimization.shared import fed_avg_schedule\nfrom optimization.shared import optimizer_utils\nfrom optimization.shared import training_specs\nfrom optimization.stackoverflow import federated_stackoverflow\nfrom optimization.stackoverflow_lr import federated_stackoverflow_lr\nfrom utils import training_loop\nfrom utils import utils_impl\n\n_SUPPORTED_TASKS = [\n 'cifar100', 'emnist_cr', 'emnist_ae', 'shakespeare', 'stackoverflow_nwp',\n 'stackoverflow_lr'\n]\n\nwith utils_impl.record_hparam_flags() as optimizer_flags:\n # Defining optimizer flags\n optimizer_utils.define_optimizer_flags('client')\n optimizer_utils.define_optimizer_flags('server')\n optimizer_utils.define_lr_schedule_flags('client')\n optimizer_utils.define_lr_schedule_flags('server')\n\nwith utils_impl.record_hparam_flags() as shared_flags:\n # Federated training hyperparameters\n flags.DEFINE_integer('client_epochs_per_round', 1,\n 'Number of epochs in the client to take per round.')\n flags.DEFINE_integer('client_batch_size', 20, 'Batch size on the clients.')\n flags.DEFINE_integer('clients_per_round', 10,\n 'How many clients to sample per round.')\n flags.DEFINE_integer('client_datasets_random_seed', 1,\n 'Random seed for client sampling.')\n\n # Training loop configuration\n flags.DEFINE_string(\n 'experiment_name', None, 'The name of this experiment. Will be append to '\n '--root_output_dir to separate experiment results.')\n flags.mark_flag_as_required('experiment_name')\n flags.DEFINE_string('root_output_dir', '/tmp/fed_opt/',\n 'Root directory for writing experiment output.')\n flags.DEFINE_integer('total_rounds', 200, 'Number of total training rounds.')\n flags.DEFINE_integer(\n 'rounds_per_eval', 1,\n 'How often to evaluate the global model on the validation dataset.')\n flags.DEFINE_integer('rounds_per_checkpoint', 50,\n 'How often to checkpoint the global model.')\n\nwith utils_impl.record_hparam_flags() as task_flags:\n # Task specification\n flags.DEFINE_enum('task', None, _SUPPORTED_TASKS,\n 'Which task to perform federated training on.')\n\nwith utils_impl.record_hparam_flags() as cifar100_flags:\n # CIFAR-100 flags\n flags.DEFINE_integer('cifar100_crop_size', 24, 'The height and width of '\n 'images after preprocessing.')\n flags.DEFINE_bool(\n 'cifar100_distort_train_images', True, 'If set to True, '\n 'train images will be randomly cropped. Otherwise, all '\n 'images will simply be resized.')\n\nwith utils_impl.record_hparam_flags() as emnist_cr_flags:\n # EMNIST CR flags\n flags.DEFINE_enum(\n 'emnist_cr_model', 'cnn', ['cnn', '2nn'], 'Which model to '\n 'use. This can be a convolutional model (cnn) or a two '\n 'hidden-layer densely connected network (2nn).')\n\nwith utils_impl.record_hparam_flags() as shakespeare_flags:\n # Shakespeare flags\n flags.DEFINE_integer(\n 'shakespeare_sequence_length', 80,\n 'Length of character sequences to use for the RNN model.')\n\nwith utils_impl.record_hparam_flags() as so_nwp_flags:\n # Stack Overflow NWP flags\n flags.DEFINE_integer('so_nwp_vocab_size', 10000, 'Size of vocab to use.')\n flags.DEFINE_integer('so_nwp_num_oov_buckets', 1,\n 'Number of out of vocabulary buckets.')\n flags.DEFINE_integer('so_nwp_sequence_length', 20,\n 'Max sequence length to use.')\n flags.DEFINE_integer('so_nwp_max_elements_per_user', 1000, 'Max number of '\n 'training sentences to use per user.')\n flags.DEFINE_integer(\n 'so_nwp_num_validation_examples', 10000, 'Number of examples '\n 'to use from test set for per-round validation.')\n\nwith utils_impl.record_hparam_flags() as so_lr_flags:\n # Stack Overflow LR flags\n flags.DEFINE_integer('so_lr_vocab_tokens_size', 10000,\n 'Vocab tokens size used.')\n flags.DEFINE_integer('so_lr_vocab_tags_size', 500, 'Vocab tags size used.')\n flags.DEFINE_integer(\n 'so_lr_num_validation_examples', 10000, 'Number of examples '\n 'to use from test set for per-round validation.')\n flags.DEFINE_integer('so_lr_max_elements_per_user', 1000,\n 'Max number of training '\n 'sentences to use per user.')\n\nFLAGS = flags.FLAGS\n\nTASK_FLAGS = collections.OrderedDict(\n cifar100=cifar100_flags,\n emnist_cr=emnist_cr_flags,\n shakespeare=shakespeare_flags,\n stackoverflow_nwp=so_nwp_flags,\n stackoverflow_lr=so_lr_flags)\n\n\ndef _write_hparam_flags():\n \"\"\"Creates an ordered dictionary of hyperparameter flags and writes to CSV.\"\"\"\n hparam_dict = utils_impl.lookup_flag_values(shared_flags)\n\n # Update with optimizer flags corresponding to the chosen optimizers.\n opt_flag_dict = utils_impl.lookup_flag_values(optimizer_flags)\n opt_flag_dict = optimizer_utils.remove_unused_flags('client', opt_flag_dict)\n opt_flag_dict = optimizer_utils.remove_unused_flags('server', opt_flag_dict)\n hparam_dict.update(opt_flag_dict)\n\n # Update with task-specific flags.\n task_name = FLAGS.task\n if task_name in TASK_FLAGS:\n task_hparam_dict = utils_impl.lookup_flag_values(TASK_FLAGS[task_name])\n hparam_dict.update(task_hparam_dict)\n\n results_dir = os.path.join(FLAGS.root_output_dir, 'results',\n FLAGS.experiment_name)\n utils_impl.create_directory_if_not_exists(results_dir)\n hparam_file = os.path.join(results_dir, 'hparams.csv')\n utils_impl.atomic_write_series_to_csv(hparam_dict, hparam_file)\n\n\ndef main(argv):\n if len(argv) > 1:\n raise app.UsageError('Expected no command-line arguments, '\n 'got: {}'.format(argv))\n\n client_optimizer_fn = optimizer_utils.create_optimizer_fn_from_flags('client')\n server_optimizer_fn = optimizer_utils.create_optimizer_fn_from_flags('server')\n\n client_lr_schedule = optimizer_utils.create_lr_schedule_from_flags('client')\n server_lr_schedule = optimizer_utils.create_lr_schedule_from_flags('server')\n\n def iterative_process_builder(\n model_fn: Callable[[],\n tff.learning.Model]) -> tff.templates.IterativeProcess:\n \"\"\"Creates an iterative process using a given TFF `model_fn`.\n\n Args:\n model_fn: A no-arg function returning a `tff.learning.Model`.\n\n Returns:\n A `tff.templates.IterativeProcess`.\n \"\"\"\n if FLAGS.task == 'shakespeare' or FLAGS.task == 'stackoverflow_nwp':\n\n def client_weight_fn(local_outputs):\n return tf.cast(tf.squeeze(local_outputs['num_tokens']), tf.float32)\n else:\n client_weight_fn = None\n\n return fed_avg_schedule.build_fed_avg_process(\n model_fn=model_fn,\n client_optimizer_fn=client_optimizer_fn,\n client_lr=client_lr_schedule,\n server_optimizer_fn=server_optimizer_fn,\n server_lr=server_lr_schedule,\n client_weight_fn=client_weight_fn)\n\n task_spec = training_specs.TaskSpec(\n iterative_process_builder=iterative_process_builder,\n client_epochs_per_round=FLAGS.client_epochs_per_round,\n client_batch_size=FLAGS.client_batch_size,\n clients_per_round=FLAGS.clients_per_round,\n client_datasets_random_seed=FLAGS.client_datasets_random_seed)\n\n if FLAGS.task == 'cifar100':\n runner_spec = federated_cifar100.configure_training(\n task_spec,\n crop_size=FLAGS.cifar100_crop_size,\n distort_train_images=FLAGS.cifar100_distort_train_images)\n elif FLAGS.task == 'emnist_cr':\n runner_spec = federated_emnist.configure_training(\n task_spec, model=FLAGS.emnist_cr_model)\n elif FLAGS.task == 'emnist_ae':\n runner_spec = federated_emnist_ae.configure_training(task_spec)\n elif FLAGS.task == 'shakespeare':\n runner_spec = federated_shakespeare.configure_training(\n task_spec, sequence_length=FLAGS.shakespeare_sequence_length)\n elif FLAGS.task == 'stackoverflow_nwp':\n runner_spec = federated_stackoverflow.configure_training(\n task_spec,\n vocab_size=FLAGS.so_nwp_vocab_size,\n num_oov_buckets=FLAGS.so_nwp_num_oov_buckets,\n sequence_length=FLAGS.so_nwp_sequence_length,\n max_elements_per_user=FLAGS.so_nwp_max_elements_per_user,\n num_validation_examples=FLAGS.so_nwp_num_validation_examples)\n elif FLAGS.task == 'stackoverflow_lr':\n runner_spec = federated_stackoverflow_lr.configure_training(\n task_spec,\n vocab_tokens_size=FLAGS.so_lr_vocab_tokens_size,\n vocab_tags_size=FLAGS.so_lr_vocab_tags_size,\n max_elements_per_user=FLAGS.so_lr_max_elements_per_user,\n num_validation_examples=FLAGS.so_lr_num_validation_examples)\n else:\n raise ValueError(\n '--task flag {} is not supported, must be one of {}.'.format(\n FLAGS.task, _SUPPORTED_TASKS))\n\n _write_hparam_flags()\n\n training_loop.run(\n iterative_process=runner_spec.iterative_process,\n client_datasets_fn=runner_spec.client_datasets_fn,\n validation_fn=runner_spec.validation_fn,\n test_fn=runner_spec.test_fn,\n total_rounds=FLAGS.total_rounds,\n experiment_name=FLAGS.experiment_name,\n root_output_dir=FLAGS.root_output_dir,\n rounds_per_eval=FLAGS.rounds_per_eval,\n rounds_per_checkpoint=FLAGS.rounds_per_checkpoint)\n\n\nif __name__ == '__main__':\n app.run(main)\n", "# Copyright 2019, Google LLC.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Utility for filtering (via class. accuracy) the Federated EMNIST dataset.\"\"\"\n\nimport csv\nimport functools\nimport os.path\n\nimport tensorflow as tf\nimport tensorflow_federated as tff\n\nfrom gans.experiments.emnist import emnist_data_utils\n\nBASE_URL = 'https://storage.googleapis.com/tff-experiments-public/'\nCSVS_BASE_PATH = 'gans/csvs/'\n\n\[email protected]_cache(maxsize=1)\ndef get_unfiltered_client_data_for_training(batch_size):\n r\"\"\"Returns `tff.simulation.datasets.ClientData` of unfiltered Federated EMNIST data.\n\n The data returned will neither be filtered by user nor by example, so training\n can take place with all users and all examples for each user.\n\n Args:\n batch_size: Batch size of output dataset. If None, don't batch.\n\n Returns:\n A tff.simulation.datasets.ClientData` of real images of numbers/letters. The\n data has\n not been filtered.\n \"\"\"\n return get_filtered_client_data_for_training(None, None, batch_size)\n\n\[email protected]_cache(maxsize=1)\ndef get_filtered_by_user_client_data_for_training(invert_imagery_probability,\n accuracy_threshold,\n batch_size,\n cache_dir=None):\n r\"\"\"Returns `tff.simulation.datasets.ClientData` of filtered Federated EMNIST data.\n\n Input data gets filtered on a per-user basis; users get selected via the\n `accuracy_threshold` criterion, and then training can take place with all\n examples from only the selected users.\n\n Args:\n invert_imagery_probability: The probability that a user\\'s image data has\n pixel intensity inverted. E.g., `0p1` corresponds to 0.1, or a 10%\n probability that a user\\'s data is flipped. Note that to save time in\n experiment execution, this is precomputed via the ./filter_users.py\n script, and the selection here controls which file to read from.\n accuracy_threshold: Indicates the classification threshold by which a user\n is included in the training population. E.g., `lt0p882` means any user\n who\\'s data cumulatively classifies with <0.882 accuracy would be used for\n training; `gt0p939` means any user who\\'s data cumulatively classifies\n with >0.939 accuracy would be used for training. To save time in\n experiment execution, this assignment is precomputed via the\n ./filter_users.py script, and the flag selection here is to indicate which\n file to read from.\n batch_size: Batch size of output dataset. If None, don't batch.\n cache_dir: (Optional) base directory to cache the downloaded files. If None,\n caches in Keras' default cache directory.\n\n Returns:\n A tff.simulation.datasets.ClientData` of real images of numbers/letters. The\n data has\n been filtered by user classification accuracy as per the input arguments.\n \"\"\"\n path_to_data = os.path.join(CSVS_BASE_PATH,\n 'inv_prob_{}'.format(invert_imagery_probability),\n 'filter_by_user',\n 'acc_{}'.format(accuracy_threshold))\n\n try:\n filename = 'client_ids.csv'\n path_to_read_inversions_csv = tf.keras.utils.get_file(\n fname=filename,\n cache_subdir=path_to_data,\n cache_dir=cache_dir,\n origin=os.path.join(BASE_URL, path_to_data, filename))\n except Exception:\n msg = ('A URL fetch failure was encountered when trying to retrieve '\n 'filter-by-user generated csv file with invert_imagery_probability '\n '`{}` and accuracy_threshold `{}`. Please run the ./filter_users.py '\n 'script to generate the missing data, and use the `cache_dir` '\n 'argument to this method to specify the location of the generated '\n 'data csv file.'.format(invert_imagery_probability,\n accuracy_threshold))\n raise ValueError(msg)\n\n return get_filtered_client_data_for_training(path_to_read_inversions_csv,\n None, batch_size)\n\n\[email protected]_cache(maxsize=1)\ndef get_filtered_by_example_client_data_for_training(invert_imagery_probability,\n min_num_examples,\n example_class_selection,\n batch_size,\n cache_dir=None):\n r\"\"\"Returns `tff.simulation.datasets.ClientData` of filtered Federated EMNIST data.\n\n Input data gets filtered on a per-example basis. Any user meeting the\n `min_num_examples` criterion is included. The examples are limited to those\n that classified according to the `example_class_selection` criterion.\n\n Args:\n invert_imagery_probability: The probability that a user\\'s image data has\n pixel intensity inverted. E.g., `0p1` corresponds to 0.1, or a 10%\n probability that a user\\'s data is flipped. Note that to save time in\n experiment execution, this is precomputed via the ./filter_examples.py\n scripts, and the selection here controls which file to read from.\n min_num_examples: Indicates the minimum number of examples that are either\n correct or incorrect (as set by the `example_class_selection` argument) in\n a client\\'s local dataset for that client to be considered as part of\n training sub-population. To save time in experiment execution, this\n assignment is precomputed via the ./filter_examples.py script, and the\n flag selection here is to indicate which file to read from.\n example_class_selection: Indicates whether to train on a client\\'s correct\n or incorrect examples. To save time in experiment execution, this\n assignment is precomputed via the ./filter_examples.py script, and the\n flag selection here is to indicate which file to read from.\n batch_size: Batch size of output dataset. If None, don't batch.\n cache_dir: (Optional) base directory to cache the downloaded files. If None,\n caches in Keras' default cache directory.\n\n Returns:\n A `tff.simulation.datasets.ClientData` of real images of numbers/letters.\n The data\n has been filtered as per the input arguments (either not filtered, filtered\n by user classification accuracy, or filtered by example classification\n correctness).\n \"\"\"\n path_to_data = os.path.join(CSVS_BASE_PATH,\n 'inv_prob_{}'.format(invert_imagery_probability),\n 'filter_by_example',\n 'min_num_examples_{}'.format(min_num_examples),\n '{}'.format(example_class_selection))\n\n try:\n filename = 'client_ids.csv'\n path_to_read_inversions_csv = tf.keras.utils.get_file(\n fname=filename,\n cache_subdir=path_to_data,\n cache_dir=cache_dir,\n origin=os.path.join(BASE_URL, path_to_data, filename))\n\n filename = 'example_indices_map.csv'\n path_to_read_example_indices_csv = tf.keras.utils.get_file(\n fname=filename,\n cache_subdir=path_to_data,\n cache_dir=cache_dir,\n origin=os.path.join(BASE_URL, path_to_data, filename))\n except Exception:\n msg = ('A URL fetch failure was encountered when trying to retrieve '\n 'filter-by-example generated csv files with '\n 'invert_imagery_probability `{}`, min_num_examples `{}`, and '\n 'example_class_selection `{}`. Please run the ./filter_examples.py '\n 'script to generate the missing data, and use the `cache_dir` '\n 'argument to this method to specify the location of the generated '\n 'data csv files.'.format(invert_imagery_probability,\n min_num_examples, example_class_selection))\n raise ValueError(msg)\n\n return get_filtered_client_data_for_training(\n path_to_read_inversions_csv, path_to_read_example_indices_csv, batch_size)\n\n\ndef get_filtered_client_data_for_training(path_to_read_inversions_csv,\n path_to_read_example_indices_csv,\n batch_size):\n \"\"\"Form ClientData using paths to pixel inversion, example selection data.\"\"\"\n\n raw_client_data = emnist_data_utils.create_real_images_tff_client_data(\n 'train')\n client_ids = raw_client_data.client_ids\n\n selected_client_ids_inversion_map = None\n client_ids_example_indices_map = None\n # If filter-by-user or filter-by-example, load the csv data into maps, and\n # update the client IDs to just the users that will be part of training.\n if path_to_read_inversions_csv is not None:\n selected_client_ids_inversion_map, client_ids_example_indices_map = (\n _get_client_ids_inversion_and_example_indices_maps(\n path_to_read_inversions_csv, path_to_read_example_indices_csv))\n client_ids = list(selected_client_ids_inversion_map.keys())\n\n def _get_dataset(client_id):\n \"\"\"Retrieve/preprocess a tf.data.Dataset for a given client_id.\"\"\"\n raw_ds = raw_client_data.create_tf_dataset_for_client(client_id)\n\n invert_imagery = False\n if selected_client_ids_inversion_map:\n invert_imagery = selected_client_ids_inversion_map[client_id]\n\n # If filter-by-example, do it here.\n if client_ids_example_indices_map:\n raw_ds = _filter_by_example(raw_ds, client_ids_example_indices_map,\n client_id)\n\n return emnist_data_utils.preprocess_img_dataset(\n raw_ds,\n invert_imagery=invert_imagery,\n include_label=False,\n batch_size=batch_size,\n shuffle=True,\n repeat=False)\n\n return tff.simulation.datasets.ClientData.from_clients_and_fn(\n client_ids, _get_dataset)\n\n\ndef _filter_by_example(raw_ds, client_ids_example_indices_map, client_id):\n \"\"\"Form a tf.data.Dataset from the examples in the map for the client_id.\"\"\"\n example_indices = client_ids_example_indices_map[client_id]\n # B/c the csv stores the list as a string, we need to do some slightly\n # klugey conversion from a string to list. (We strip off the first and\n # last characters in the string, which are [ and ], and then split on\n # commas as delimiters, to recover the original list of ints.\n example_indices = [int(s) for s in example_indices[1:-1].split(',')]\n\n # Get the elements (OrderedDicts) in the raw data which are at the indices\n # indicated by the list above.\n elements = []\n index = 0\n for element in raw_ds:\n if index in example_indices:\n elements.append(element)\n index += 1\n\n # Bind the elements (via a generator fn) into a new tf.data.Dataset.\n def _generator():\n for element in elements:\n yield element\n\n return tf.data.Dataset.from_generator(_generator, raw_ds.output_types,\n raw_ds.output_shapes)\n\n\ndef _get_client_ids_inversion_and_example_indices_maps(\n path_to_read_inversions_csv, path_to_read_example_indices_csv):\n \"\"\"Return paths to csv files storing maps indicating the data to train on.\"\"\"\n if path_to_read_inversions_csv is None:\n raise ValueError(\n 'No path provided to the CSV file that stores map from client ids to '\n 'image inversion data.')\n\n # Load (from CSV file) the specific client IDs that the GAN will train on, and\n # whether or not the images on that client are inverted.\n selected_client_ids_inversion_map = {}\n with tf.io.gfile.GFile(path_to_read_inversions_csv, 'r') as csvfile:\n csvreader = csv.reader(csvfile)\n for [key, val] in csvreader:\n selected_client_ids_inversion_map[key] = (val == 'True')\n\n # If specified (via CSV file), the specific examples on each client ID that\n # the GAN will be trained on.\n client_ids_example_indices_map = None\n if path_to_read_example_indices_csv:\n client_ids_example_indices_map = {}\n with tf.io.gfile.GFile(path_to_read_example_indices_csv, 'r') as csvfile:\n csvreader = csv.reader(csvfile)\n for [key, val] in csvreader:\n client_ids_example_indices_map[key] = val\n\n set_1 = set(client_ids_example_indices_map.keys())\n set_2 = set(selected_client_ids_inversion_map.keys())\n symmetric_diff = set_1 ^ set_2\n if symmetric_diff:\n raise ValueError(\n 'The CSV files at path_to_read_inversions_csv and '\n 'path_to_read_example_indices_csv contain different keys.')\n\n return selected_client_ids_inversion_map, client_ids_example_indices_map\n", "# Copyright 2020, Google LLC.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport string\n\nimport tensorflow as tf\nimport tensorflow_federated as tff\n\nfrom analytics.heavy_hitters import heavy_hitters_testcase as hh_test\nfrom triehh import triehh_tf\n\n\nclass TriehhTfTest(hh_test.HeavyHittersTest):\n\n def test_accumulate_client_votes_works_as_expected(self):\n possible_prefix_extensions = tf.constant(\n ['a', 'b', 'c', 'd', 'e', triehh_tf.DEFAULT_TERMINATOR],\n dtype=tf.string)\n discovered_prefixes = tf.constant(['a', 'b', 'c', 'd'], dtype=tf.string)\n round_num = tf.constant(1)\n num_sub_rounds = tf.constant(1)\n example1 = tf.constant('ab', dtype=tf.string)\n\n discovered_prefixes_table = tf.lookup.StaticHashTable(\n tf.lookup.KeyValueTensorInitializer(\n discovered_prefixes, tf.range(tf.shape(discovered_prefixes)[0])),\n triehh_tf.DEFAULT_VALUE)\n\n possible_prefix_extensions_table = tf.lookup.StaticHashTable(\n tf.lookup.KeyValueTensorInitializer(\n possible_prefix_extensions,\n tf.range(tf.shape(possible_prefix_extensions)[0])),\n triehh_tf.DEFAULT_VALUE)\n\n accumulate_client_votes = triehh_tf.make_accumulate_client_votes_fn(\n round_num, num_sub_rounds, discovered_prefixes_table,\n possible_prefix_extensions_table,\n tf.constant(triehh_tf.DEFAULT_TERMINATOR, dtype=tf.string))\n\n initial_votes = tf.constant(\n [[1, 2, 1, 0, 0], [1, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n accumulated_votes = accumulate_client_votes(initial_votes, example1)\n\n expected_accumulated_votes = tf.constant(\n [[1, 3, 1, 0, 0], [1, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n self.assertAllEqual(accumulated_votes, expected_accumulated_votes)\n\n # An example that the prefix is not in the discovered prefixes.\n # The expected result is that the vote is not counted.\n example2 = tf.constant('ea', dtype=tf.string)\n accumulated_votes = accumulate_client_votes(initial_votes, example2)\n self.assertAllEqual(accumulated_votes, initial_votes)\n\n def test_client_update_works_as_expected(self):\n max_num_prefixes = tf.constant(10)\n max_user_contribution = tf.constant(10)\n possible_prefix_extensions = tf.constant(\n ['a', 'b', 'c', 'd', 'e', triehh_tf.DEFAULT_TERMINATOR],\n dtype=tf.string)\n discovered_prefixes = tf.constant(['a', 'b', 'c', 'd', 'e'],\n dtype=tf.string)\n round_num = tf.constant(1)\n num_sub_rounds = tf.constant(1)\n sample_data = tf.data.Dataset.from_tensor_slices(\n ['a', '', 'abc', 'bac', 'abb', 'aaa', 'acc', 'hi'])\n client_output = triehh_tf.client_update(\n sample_data, discovered_prefixes, possible_prefix_extensions, round_num,\n num_sub_rounds, max_num_prefixes, max_user_contribution,\n tf.constant(triehh_tf.DEFAULT_TERMINATOR, dtype=tf.string))\n\n # Each string is attached with triehh_tf.DEFAULT_TERMINATOR before the\n # client votes, so 'a$' get a vote here.\n expected_client_votes = tf.constant(\n [[1, 2, 1, 0, 0, 1], [1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n self.assertAllEqual(client_output.client_votes, expected_client_votes)\n\n def test_client_update_works_on_empty_local_datasets(self):\n max_num_prefixes = tf.constant(10)\n max_user_contribution = tf.constant(10)\n possible_prefix_extensions = tf.constant(\n ['a', 'b', 'c', 'd', 'e', triehh_tf.DEFAULT_TERMINATOR],\n dtype=tf.string)\n discovered_prefixes = tf.constant(['a', 'b', 'c', 'd', 'e'],\n dtype=tf.string)\n round_num = tf.constant(1)\n num_sub_rounds = tf.constant(1)\n # Force an empty dataset that yields tf.string. Using `from_tensor_slices`\n # defaults to yielding tf.int32 values.\n sample_data = tf.data.Dataset.from_generator(\n generator=lambda: iter(()), output_types=tf.string, output_shapes=())\n client_output = triehh_tf.client_update(\n sample_data, discovered_prefixes, possible_prefix_extensions, round_num,\n num_sub_rounds, max_num_prefixes, max_user_contribution,\n tf.constant(triehh_tf.DEFAULT_TERMINATOR, dtype=tf.string))\n\n expected_client_votes = tf.constant(\n [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n self.assertAllEqual(client_output.client_votes, expected_client_votes)\n\n def test_client_update_works_on_empty_discovered_prefixes(self):\n max_num_prefixes = tf.constant(10)\n max_user_contribution = tf.constant(10)\n possible_prefix_extensions = tf.constant(\n ['a', 'b', 'c', 'd', 'e', triehh_tf.DEFAULT_TERMINATOR],\n dtype=tf.string)\n discovered_prefixes = tf.constant([], dtype=tf.string)\n round_num = tf.constant(1)\n num_sub_rounds = tf.constant(1)\n sample_data = tf.data.Dataset.from_tensor_slices(\n ['a', '', 'abc', 'bac', 'abb', 'aaa', 'acc', 'hi'])\n client_output = triehh_tf.client_update(\n sample_data, discovered_prefixes, possible_prefix_extensions, round_num,\n num_sub_rounds, max_num_prefixes, max_user_contribution,\n tf.constant(triehh_tf.DEFAULT_TERMINATOR, dtype=tf.string))\n\n expected_client_votes = tf.constant(\n [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n self.assertAllEqual(client_output.client_votes, expected_client_votes)\n\n def test_get_extended_prefix_candidates_works_as_expected(self):\n extensions_wo_terminator = tf.constant(['a', 'b', 'c', 'd'],\n dtype=tf.string)\n discovered_prefixes = tf.constant(['a', 'b', 'c'], dtype=tf.string)\n extended_prefix_candidates = triehh_tf.get_extended_prefix_candidates(\n discovered_prefixes, extensions_wo_terminator)\n expected_extended_prefix_candidates = tf.constant([\n 'aa', 'ab', 'ac', 'ad', 'ba', 'bb', 'bc', 'bd', 'ca', 'cb', 'cc', 'cd'\n ],\n dtype=tf.string)\n self.assertSetAllEqual(extended_prefix_candidates,\n expected_extended_prefix_candidates)\n\n def test_extend_prefixes_works_as_expected(self):\n extensions_wo_terminator = tf.constant(['a', 'b', 'c', 'd'],\n dtype=tf.string)\n discovered_prefixes = tf.constant(['a', 'b', 'c'], dtype=tf.string)\n threshold = threshold = tf.constant(1)\n max_num_prefixes = tf.constant(3)\n prefixes_votes = tf.constant([4, 2, 3, 0, 7, 1, 0, 0, 0, 0, 0, 8],\n dtype=tf.int32)\n extended_prefixes = triehh_tf.extend_prefixes(prefixes_votes,\n discovered_prefixes,\n extensions_wo_terminator,\n max_num_prefixes, threshold)\n expected_extended_prefixes = tf.constant(['cd', 'ba', 'aa'],\n dtype=tf.string)\n self.assertSetAllEqual(extended_prefixes, expected_extended_prefixes)\n\n def test_extend_prefixes_with_threshold_works_as_expected(self):\n extensions_wo_terminator = tf.constant(['a', 'b', 'c', 'd'],\n dtype=tf.string)\n discovered_prefixes = tf.constant(['a', 'b', 'c'], dtype=tf.string)\n threshold = threshold = tf.constant(3)\n max_num_prefixes = tf.constant(20)\n prefixes_votes = tf.constant([4, 2, 3, 0, 7, 1, 0, 0, 0, 0, 0, 8],\n dtype=tf.int32)\n extended_prefixes = triehh_tf.extend_prefixes(prefixes_votes,\n discovered_prefixes,\n extensions_wo_terminator,\n max_num_prefixes, threshold)\n expected_extended_prefixes = tf.constant(['aa', 'ac', 'ba', 'cd'],\n dtype=tf.string)\n self.assertSetAllEqual(extended_prefixes, expected_extended_prefixes)\n\n def test_accumulate_server_votes_works_as_expected(self):\n discovered_prefixes = ['a', 'b']\n discovered_heavy_hitters = []\n heavy_hitters_counts = []\n initial_votes = tf.constant(\n [[1, 2, 1, 0, 0], [1, 2, 1, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.ServerState(\n discovered_heavy_hitters=tf.constant(\n discovered_heavy_hitters, dtype=tf.string),\n heavy_hitters_counts=tf.constant(heavy_hitters_counts, dtype=tf.int32),\n discovered_prefixes=tf.constant(discovered_prefixes, dtype=tf.string),\n round_num=tf.constant(0, dtype=tf.int32),\n accumulated_votes=initial_votes)\n\n sub_round_votes = tf.constant(\n [[1, 2, 1, 0, 0], [1, 2, 1, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.accumulate_server_votes(server_state,\n sub_round_votes)\n expected_accumulated_votes = tf.constant(\n [[2, 4, 2, 0, 0], [2, 4, 2, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n self.assertAllEqual(server_state.accumulated_votes,\n expected_accumulated_votes)\n\n def test_accumulate_server_votes_and_decode_works_as_expected(self):\n max_num_prefixes = tf.constant(4)\n threshold = tf.constant(1)\n possible_prefix_extensions = [\n 'a', 'n', 's', 't', 'u', triehh_tf.DEFAULT_TERMINATOR\n ]\n possible_prefix_extensions = tf.constant(\n possible_prefix_extensions, dtype=tf.string)\n discovered_prefixes = ['su', 'st']\n discovered_heavy_hitters = []\n heavy_hitters_counts = []\n initial_votes = tf.constant([[1, 2, 1, 0, 0, 0], [1, 2, 1, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.ServerState(\n discovered_heavy_hitters=tf.constant(\n discovered_heavy_hitters, dtype=tf.string),\n heavy_hitters_counts=tf.constant(heavy_hitters_counts, dtype=tf.int32),\n discovered_prefixes=tf.constant(discovered_prefixes, dtype=tf.string),\n round_num=tf.constant(3, dtype=tf.int32),\n accumulated_votes=initial_votes)\n\n sub_round_votes = tf.constant([[3, 3, 1, 0, 0, 0], [5, 1, 1, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.accumulate_server_votes_and_decode(\n server_state, possible_prefix_extensions, sub_round_votes,\n max_num_prefixes, threshold)\n\n expected_accumulated_votes = tf.constant(\n [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n expected_discovered_prefixes = tf.constant(['sta', 'sun', 'sua', 'stn'],\n dtype=tf.string)\n expected_discovered_heavy_hitters = tf.constant([], dtype=tf.string)\n expected_heavy_hitters_counts = tf.constant([], dtype=tf.int32)\n\n self.assertAllEqual(server_state.accumulated_votes,\n expected_accumulated_votes)\n self.assertSetAllEqual(server_state.discovered_prefixes,\n expected_discovered_prefixes)\n self.assertSetAllEqual(server_state.discovered_heavy_hitters,\n expected_discovered_heavy_hitters)\n self.assertHistogramsEqual(server_state.discovered_heavy_hitters,\n server_state.heavy_hitters_counts,\n expected_discovered_heavy_hitters,\n expected_heavy_hitters_counts)\n\n def test_accumulate_server_votes_and_decode_threhold_works_as_expected(self):\n max_num_prefixes = tf.constant(4)\n threshold = tf.constant(5)\n possible_prefix_extensions = [\n 'a', 'n', 's', 't', 'u', triehh_tf.DEFAULT_TERMINATOR\n ]\n possible_prefix_extensions = tf.constant(\n possible_prefix_extensions, dtype=tf.string)\n discovered_prefixes = ['su', 'st']\n discovered_heavy_hitters = []\n heavy_hitters_counts = []\n initial_votes = tf.constant([[1, 2, 1, 0, 0, 0], [1, 2, 1, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.ServerState(\n discovered_heavy_hitters=tf.constant(\n discovered_heavy_hitters, dtype=tf.string),\n heavy_hitters_counts=tf.constant(heavy_hitters_counts, dtype=tf.int32),\n discovered_prefixes=tf.constant(discovered_prefixes, dtype=tf.string),\n round_num=tf.constant(3, dtype=tf.int32),\n accumulated_votes=initial_votes)\n\n sub_round_votes = tf.constant([[3, 3, 1, 0, 0, 0], [5, 1, 1, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.accumulate_server_votes_and_decode(\n server_state, possible_prefix_extensions, sub_round_votes,\n max_num_prefixes, threshold)\n\n expected_accumulated_votes = tf.constant(\n [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n expected_discovered_prefixes = tf.constant(['sta', 'sun'], dtype=tf.string)\n expected_discovered_heavy_hitters = tf.constant([], dtype=tf.string)\n expected_heavy_hitters_counts = tf.constant([], dtype=tf.int32)\n\n self.assertAllEqual(server_state.accumulated_votes,\n expected_accumulated_votes)\n self.assertSetAllEqual(server_state.discovered_prefixes,\n expected_discovered_prefixes)\n self.assertSetAllEqual(server_state.discovered_heavy_hitters,\n expected_discovered_heavy_hitters)\n self.assertHistogramsEqual(server_state.discovered_heavy_hitters,\n server_state.heavy_hitters_counts,\n expected_discovered_heavy_hitters,\n expected_heavy_hitters_counts)\n\n def test_server_update_works_as_expected(self):\n max_num_prefixes = tf.constant(10)\n threshold = tf.constant(1)\n num_sub_rounds = tf.constant(1, dtype=tf.int32)\n possible_prefix_extensions = [\n 'a', 'b', 'c', 'd', 'e', triehh_tf.DEFAULT_TERMINATOR\n ]\n possible_prefix_extensions = tf.constant(\n possible_prefix_extensions, dtype=tf.string)\n discovered_prefixes = ['a', 'b', 'c', 'd', 'e']\n discovered_heavy_hitters = []\n heavy_hitters_counts = []\n\n server_state = triehh_tf.ServerState(\n discovered_heavy_hitters=tf.constant(\n discovered_heavy_hitters, dtype=tf.string),\n heavy_hitters_counts=tf.constant(heavy_hitters_counts, dtype=tf.int32),\n discovered_prefixes=tf.constant(discovered_prefixes, dtype=tf.string),\n round_num=tf.constant(1, dtype=tf.int32),\n accumulated_votes=tf.zeros(\n dtype=tf.int32,\n shape=[max_num_prefixes,\n len(possible_prefix_extensions)]))\n\n sub_round_votes = tf.constant(\n [[10, 9, 8, 7, 6, 0], [5, 4, 3, 2, 1, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.server_update(server_state,\n possible_prefix_extensions,\n sub_round_votes, num_sub_rounds,\n max_num_prefixes, threshold)\n expected_discovered_prefixes = tf.constant(\n ['aa', 'ab', 'ac', 'ad', 'ae', 'ba', 'bb', 'bc', 'bd', 'be'],\n dtype=tf.string)\n expected_discovered_heavy_hitters = tf.constant([], dtype=tf.string)\n expected_heavy_hitters_counts = tf.constant([], dtype=tf.int32)\n expected_accumulated_votes = tf.constant(\n [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n self.assertSetAllEqual(server_state.discovered_prefixes,\n expected_discovered_prefixes)\n self.assertSetAllEqual(server_state.discovered_heavy_hitters,\n expected_discovered_heavy_hitters)\n self.assertHistogramsEqual(server_state.discovered_heavy_hitters,\n server_state.heavy_hitters_counts,\n expected_discovered_heavy_hitters,\n expected_heavy_hitters_counts)\n self.assertAllEqual(server_state.accumulated_votes,\n expected_accumulated_votes)\n\n def test_server_update_works_on_empty_discovered_prefixes(self):\n max_num_prefixes = tf.constant(10)\n threshold = tf.constant(1)\n num_sub_rounds = tf.constant(1, dtype=tf.int32)\n possible_prefix_extensions = [\n 'a', 'b', 'c', 'd', 'e', triehh_tf.DEFAULT_TERMINATOR\n ]\n possible_prefix_extensions = tf.constant(\n possible_prefix_extensions, dtype=tf.string)\n discovered_prefixes = []\n discovered_heavy_hitters = []\n heavy_hitters_counts = []\n\n server_state = triehh_tf.ServerState(\n discovered_heavy_hitters=tf.constant(\n discovered_heavy_hitters, dtype=tf.string),\n heavy_hitters_counts=tf.constant(heavy_hitters_counts, dtype=tf.int32),\n discovered_prefixes=tf.constant(discovered_prefixes, dtype=tf.string),\n round_num=tf.constant(1, dtype=tf.int32),\n accumulated_votes=tf.zeros(\n dtype=tf.int32,\n shape=[max_num_prefixes,\n len(possible_prefix_extensions)]))\n\n sub_round_votes = tf.constant(\n [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.server_update(server_state,\n possible_prefix_extensions,\n sub_round_votes, num_sub_rounds,\n max_num_prefixes, threshold)\n expected_discovered_prefixes = tf.constant([], dtype=tf.string)\n expected_discovered_heavy_hitters = tf.constant([], dtype=tf.string)\n expected_heavy_hitters_counts = tf.constant([], dtype=tf.int32)\n expected_accumulated_votes = tf.constant(\n [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n self.assertSetAllEqual(server_state.discovered_prefixes,\n expected_discovered_prefixes)\n self.assertSetAllEqual(server_state.discovered_heavy_hitters,\n expected_discovered_heavy_hitters)\n self.assertHistogramsEqual(server_state.discovered_heavy_hitters,\n server_state.heavy_hitters_counts,\n expected_discovered_heavy_hitters,\n expected_heavy_hitters_counts)\n self.assertAllEqual(server_state.accumulated_votes,\n expected_accumulated_votes)\n\n def test_server_update_threshold_works_as_expected(self):\n max_num_prefixes = tf.constant(10)\n threshold = tf.constant(5)\n num_sub_rounds = tf.constant(1, dtype=tf.int32)\n possible_prefix_extensions = [\n 'a', 'b', 'c', 'd', 'e', triehh_tf.DEFAULT_TERMINATOR\n ]\n possible_prefix_extensions = tf.constant(\n possible_prefix_extensions, dtype=tf.string)\n discovered_prefixes = ['a', 'b', 'c', 'd', 'e']\n discovered_heavy_hitters = []\n heavy_hitters_counts = []\n\n server_state = triehh_tf.ServerState(\n discovered_heavy_hitters=tf.constant(\n discovered_heavy_hitters, dtype=tf.string),\n heavy_hitters_counts=tf.constant(heavy_hitters_counts, dtype=tf.int32),\n discovered_prefixes=tf.constant(discovered_prefixes, dtype=tf.string),\n round_num=tf.constant(1, dtype=tf.int32),\n accumulated_votes=tf.zeros(\n dtype=tf.int32,\n shape=[max_num_prefixes,\n len(possible_prefix_extensions)]))\n\n sub_round_votes = tf.constant(\n [[10, 9, 8, 7, 6, 0], [5, 4, 3, 2, 1, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.server_update(server_state,\n possible_prefix_extensions,\n sub_round_votes, num_sub_rounds,\n max_num_prefixes, threshold)\n expected_discovered_prefixes = tf.constant(\n ['aa', 'ab', 'ac', 'ad', 'ae', 'ba'], dtype=tf.string)\n expected_discovered_heavy_hitters = tf.constant([], dtype=tf.string)\n expected_heavy_hitters_counts = tf.constant([], dtype=tf.int32)\n expected_accumulated_votes = tf.constant(\n [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n self.assertSetAllEqual(server_state.discovered_prefixes,\n expected_discovered_prefixes)\n self.assertSetAllEqual(server_state.discovered_heavy_hitters,\n expected_discovered_heavy_hitters)\n self.assertHistogramsEqual(server_state.discovered_heavy_hitters,\n server_state.heavy_hitters_counts,\n expected_discovered_heavy_hitters,\n expected_heavy_hitters_counts)\n self.assertAllEqual(server_state.accumulated_votes,\n expected_accumulated_votes)\n\n def test_server_update_finds_heavy_hitters(self):\n max_num_prefixes = tf.constant(10)\n threshold = tf.constant(1)\n num_sub_rounds = tf.constant(1, dtype=tf.int32)\n possible_prefix_extensions = [\n 'a', 'b', 'c', 'd', triehh_tf.DEFAULT_TERMINATOR\n ]\n possible_prefix_extensions = tf.constant(\n possible_prefix_extensions, dtype=tf.string)\n discovered_prefixes = ['a', 'b', 'c', 'd', triehh_tf.DEFAULT_TERMINATOR]\n discovered_heavy_hitters = []\n heavy_hitters_counts = []\n\n server_state = triehh_tf.ServerState(\n discovered_heavy_hitters=tf.constant(\n discovered_heavy_hitters, dtype=tf.string),\n heavy_hitters_counts=tf.constant(heavy_hitters_counts, dtype=tf.int32),\n discovered_prefixes=tf.constant(discovered_prefixes, dtype=tf.string),\n round_num=tf.constant(1, dtype=tf.int32),\n accumulated_votes=tf.zeros(\n dtype=tf.int32,\n shape=[max_num_prefixes,\n len(possible_prefix_extensions)]))\n\n sub_round_votes = tf.constant(\n [[10, 9, 8, 7, 6], [5, 4, 3, 0, 4], [2, 1, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.server_update(server_state,\n possible_prefix_extensions,\n sub_round_votes, num_sub_rounds,\n max_num_prefixes, threshold)\n expected_discovered_prefixes = tf.constant(\n ['aa', 'ab', 'ac', 'ad', 'ba', 'bb', 'bc', 'ca', 'cb'], dtype=tf.string)\n expected_discovered_heavy_hitters = tf.constant(['a', 'b'], dtype=tf.string)\n expected_heavy_hitters_counts = tf.constant([6, 4], dtype=tf.int32)\n expected_accumulated_votes = tf.constant(\n [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n self.assertSetAllEqual(server_state.discovered_prefixes,\n expected_discovered_prefixes)\n self.assertSetAllEqual(server_state.discovered_heavy_hitters,\n expected_discovered_heavy_hitters)\n self.assertHistogramsEqual(server_state.discovered_heavy_hitters,\n server_state.heavy_hitters_counts,\n expected_discovered_heavy_hitters,\n expected_heavy_hitters_counts)\n self.assertAllEqual(server_state.accumulated_votes,\n expected_accumulated_votes)\n\n def test_server_update_finds_heavy_hitters_with_threshold(self):\n max_num_prefixes = tf.constant(10)\n threshold = tf.constant(5)\n num_sub_rounds = tf.constant(1, dtype=tf.int32)\n possible_prefix_extensions = [\n 'a', 'b', 'c', 'd', triehh_tf.DEFAULT_TERMINATOR\n ]\n possible_prefix_extensions = tf.constant(\n possible_prefix_extensions, dtype=tf.string)\n discovered_prefixes = ['a', 'b', 'c', 'd', triehh_tf.DEFAULT_TERMINATOR]\n discovered_heavy_hitters = []\n heavy_hitters_counts = []\n\n server_state = triehh_tf.ServerState(\n discovered_heavy_hitters=tf.constant(\n discovered_heavy_hitters, dtype=tf.string),\n heavy_hitters_counts=tf.constant(heavy_hitters_counts, dtype=tf.int32),\n discovered_prefixes=tf.constant(discovered_prefixes, dtype=tf.string),\n round_num=tf.constant(1, dtype=tf.int32),\n accumulated_votes=tf.zeros(\n dtype=tf.int32,\n shape=[max_num_prefixes,\n len(possible_prefix_extensions)]))\n\n sub_round_votes = tf.constant(\n [[10, 9, 8, 7, 6], [5, 4, 3, 0, 4], [2, 1, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.server_update(server_state,\n possible_prefix_extensions,\n sub_round_votes, num_sub_rounds,\n max_num_prefixes, threshold)\n expected_discovered_prefixes = tf.constant(['aa', 'ab', 'ac', 'ad', 'ba'],\n dtype=tf.string)\n expected_discovered_heavy_hitters = tf.constant(['a'], dtype=tf.string)\n expected_heavy_hitters_counts = tf.constant([6], dtype=tf.int32)\n expected_accumulated_votes = tf.constant(\n [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n self.assertSetAllEqual(server_state.discovered_prefixes,\n expected_discovered_prefixes)\n self.assertSetAllEqual(server_state.discovered_heavy_hitters,\n expected_discovered_heavy_hitters)\n self.assertHistogramsEqual(server_state.discovered_heavy_hitters,\n server_state.heavy_hitters_counts,\n expected_discovered_heavy_hitters,\n expected_heavy_hitters_counts)\n self.assertAllEqual(server_state.accumulated_votes,\n expected_accumulated_votes)\n\n def test_server_update_does_not_decode_in_a_subround(self):\n max_num_prefixes = tf.constant(10)\n threshold = tf.constant(1)\n num_sub_rounds = tf.constant(2, dtype=tf.int32)\n possible_prefix_extensions = [\n 'a', 'b', 'c', 'd', 'e', triehh_tf.DEFAULT_TERMINATOR\n ]\n possible_prefix_extensions = tf.constant(\n possible_prefix_extensions, dtype=tf.string)\n discovered_prefixes = ['']\n discovered_heavy_hitters = []\n heavy_hitters_counts = []\n\n server_state = triehh_tf.ServerState(\n discovered_heavy_hitters=tf.constant(\n discovered_heavy_hitters, dtype=tf.string),\n heavy_hitters_counts=tf.constant(heavy_hitters_counts, dtype=tf.int32),\n discovered_prefixes=tf.constant(discovered_prefixes, dtype=tf.string),\n round_num=tf.constant(0, dtype=tf.int32),\n accumulated_votes=tf.zeros(\n dtype=tf.int32,\n shape=[max_num_prefixes,\n len(possible_prefix_extensions)]))\n\n sub_round_votes = tf.constant(\n [[1, 2, 1, 2, 0, 0], [2, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]],\n dtype=tf.int32)\n\n server_state = triehh_tf.server_update(server_state,\n possible_prefix_extensions,\n sub_round_votes, num_sub_rounds,\n max_num_prefixes, threshold)\n expected_discovered_prefixes = tf.constant([''], dtype=tf.string)\n expected_discovered_heavy_hitters = tf.constant([], dtype=tf.string)\n expected_heavy_hitters_counts = tf.constant([], dtype=tf.int32)\n expected_accumulated_votes = sub_round_votes\n\n self.assertSetAllEqual(server_state.discovered_prefixes,\n expected_discovered_prefixes)\n self.assertSetAllEqual(server_state.discovered_heavy_hitters,\n expected_discovered_heavy_hitters)\n self.assertHistogramsEqual(server_state.discovered_heavy_hitters,\n server_state.heavy_hitters_counts,\n expected_discovered_heavy_hitters,\n expected_heavy_hitters_counts)\n self.assertAllEqual(server_state.accumulated_votes,\n expected_accumulated_votes)\n\n def test_all_tf_functions_work_together(self):\n clients = 3\n num_sub_rounds = 4\n max_rounds = 6\n max_num_prefixes = 3\n threshold = 1\n max_user_contribution = 100\n roots = (\n string.ascii_lowercase + string.digits + \"'@#-;*:./\" +\n triehh_tf.DEFAULT_TERMINATOR)\n possible_prefix_extensions = list(roots)\n possible_prefix_extensions_num = len(possible_prefix_extensions)\n possible_prefix_extensions = tf.constant(\n possible_prefix_extensions, dtype=tf.string)\n\n server_state = triehh_tf.ServerState(\n discovered_heavy_hitters=tf.constant([], dtype=tf.string),\n heavy_hitters_counts=tf.constant([], dtype=tf.int32),\n discovered_prefixes=tf.constant([''], dtype=tf.string),\n round_num=tf.constant(0, dtype=tf.int32),\n accumulated_votes=tf.zeros(\n dtype=tf.int32,\n shape=[max_num_prefixes, possible_prefix_extensions_num]))\n\n def create_dataset_fn(client_id):\n del client_id\n return tf.data.Dataset.from_tensor_slices(['hello', 'hey', 'hi'])\n\n client_ids = list(range(100))\n\n client_data = tff.simulation.datasets.ClientData.from_clients_and_fn(\n client_ids=client_ids,\n create_tf_dataset_for_client_fn=create_dataset_fn)\n\n for round_num in range(max_rounds * num_sub_rounds):\n sampled_clients = list(range(clients))\n sampled_datasets = [\n client_data.create_tf_dataset_for_client(client_id)\n for client_id in sampled_clients\n ]\n accumulated_votes = tf.zeros(\n dtype=tf.int32,\n shape=[max_num_prefixes, possible_prefix_extensions_num])\n\n # This is a workaround to clear the graph cache in the `tf.function`; this\n # is necessary because we need to construct a new lookup table every round\n # based on new prefixes.\n client_update = tf.function(triehh_tf.client_update.python_function)\n\n for dataset in sampled_datasets:\n client_output = client_update(\n dataset, server_state.discovered_prefixes,\n possible_prefix_extensions, round_num, tf.constant(num_sub_rounds),\n tf.constant(max_num_prefixes, dtype=tf.int32),\n tf.constant(max_user_contribution, dtype=tf.int32),\n tf.constant(triehh_tf.DEFAULT_TERMINATOR, dtype=tf.string))\n accumulated_votes += client_output.client_votes\n\n server_state = triehh_tf.server_update(\n server_state, possible_prefix_extensions, accumulated_votes,\n tf.constant(num_sub_rounds, dtype=tf.int32),\n tf.constant(max_num_prefixes, dtype=tf.int32),\n tf.constant(threshold, dtype=tf.int32))\n\n expected_discovered_heavy_hitters = tf.constant(['hi', 'hey', 'hello'],\n dtype=tf.string)\n expected_heavy_hitters_counts = tf.constant([12, 12, 12], dtype=tf.int32)\n self.assertSetAllEqual(server_state.discovered_heavy_hitters,\n expected_discovered_heavy_hitters)\n self.assertHistogramsEqual(server_state.discovered_heavy_hitters,\n server_state.heavy_hitters_counts,\n expected_discovered_heavy_hitters,\n expected_heavy_hitters_counts)\n\n def test_all_tf_functions_work_together_high_threshold(self):\n clients = 3\n num_sub_rounds = 4\n max_rounds = 6\n max_num_prefixes = 3\n threshold = 100\n max_user_contribution = 100\n roots = (\n string.ascii_lowercase + string.digits + \"'@#-;*:./\" +\n triehh_tf.DEFAULT_TERMINATOR)\n possible_prefix_extensions = list(roots)\n possible_prefix_extensions_num = len(possible_prefix_extensions)\n possible_prefix_extensions = tf.constant(\n possible_prefix_extensions, dtype=tf.string)\n\n server_state = triehh_tf.ServerState(\n discovered_heavy_hitters=tf.constant([], dtype=tf.string),\n heavy_hitters_counts=tf.constant([], dtype=tf.int32),\n discovered_prefixes=tf.constant([''], dtype=tf.string),\n round_num=tf.constant(0, dtype=tf.int32),\n accumulated_votes=tf.zeros(\n dtype=tf.int32,\n shape=[max_num_prefixes, possible_prefix_extensions_num]))\n\n def create_dataset_fn(client_id):\n del client_id\n return tf.data.Dataset.from_tensor_slices(['hello', 'hey', 'hi'])\n\n client_ids = list(range(100))\n\n client_data = tff.simulation.datasets.ClientData.from_clients_and_fn(\n client_ids=client_ids,\n create_tf_dataset_for_client_fn=create_dataset_fn)\n\n for round_num in range(max_rounds * num_sub_rounds):\n sampled_clients = list(range(clients))\n sampled_datasets = [\n client_data.create_tf_dataset_for_client(client_id)\n for client_id in sampled_clients\n ]\n accumulated_votes = tf.zeros(\n dtype=tf.int32,\n shape=[max_num_prefixes, possible_prefix_extensions_num])\n\n # This is a workaround to clear the graph cache in the `tf.function`; this\n # is necessary because we need to construct a new lookup table every round\n # based on new prefixes.\n client_update = tf.function(triehh_tf.client_update.python_function)\n\n for dataset in sampled_datasets:\n client_output = client_update(\n dataset, server_state.discovered_prefixes,\n possible_prefix_extensions, round_num, tf.constant(num_sub_rounds),\n tf.constant(max_num_prefixes, dtype=tf.int32),\n tf.constant(max_user_contribution, dtype=tf.int32),\n tf.constant(triehh_tf.DEFAULT_TERMINATOR, dtype=tf.string))\n accumulated_votes += client_output.client_votes\n\n server_state = triehh_tf.server_update(\n server_state, possible_prefix_extensions, accumulated_votes,\n tf.constant(num_sub_rounds, dtype=tf.int32),\n tf.constant(max_num_prefixes, dtype=tf.int32),\n tf.constant(threshold, dtype=tf.int32))\n\n expected_discovered_heavy_hitters = tf.constant([], dtype=tf.string)\n expected_heavy_hitters_counts = tf.constant([], dtype=tf.int32)\n expected_discovered_prefixes = tf.constant([], dtype=tf.string)\n\n self.assertSetAllEqual(server_state.discovered_heavy_hitters,\n expected_discovered_heavy_hitters)\n self.assertHistogramsEqual(server_state.discovered_heavy_hitters,\n server_state.heavy_hitters_counts,\n expected_discovered_heavy_hitters,\n expected_heavy_hitters_counts)\n self.assertSetAllEqual(server_state.discovered_prefixes,\n expected_discovered_prefixes)\n\n\nif __name__ == '__main__':\n tf.test.main()\n", "# Copyright 2021, Google LLC.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Centralized experiments on the Stackoverflow datasets.\"\"\"\n\nfrom typing import Any, Mapping, Optional\n\nimport tensorflow as tf\n\nfrom fedopt_guide.stackoverflow_transformer import transformer_models\nfrom optimization.shared import keras_metrics\nfrom utils import centralized_training_loop\nfrom utils.datasets import stackoverflow_word_prediction\n\n\ndef run_centralized(optimizer: tf.keras.optimizers.Optimizer,\n num_epochs: int,\n batch_size: int,\n decay_epochs: Optional[int] = None,\n lr_decay: Optional[float] = None,\n vocab_size: int = 10000,\n num_oov_buckets: int = 1,\n dim_embed: int = 96,\n dim_model: int = 512,\n dim_hidden: int = 2048,\n num_heads: int = 8,\n num_layers: int = 1,\n max_position_encoding: int = 1000,\n dropout: float = 0.1,\n num_validation_examples: int = 10000,\n sequence_length: int = 20,\n experiment_name: str = 'centralized_stackoverflow',\n root_output_dir: str = '/tmp/fedopt_guide',\n hparams_dict: Optional[Mapping[str, Any]] = None,\n max_batches: Optional[int] = None):\n \"\"\"Trains an Transformer on the Stack Overflow next word prediction task.\n\n Args:\n optimizer: A `tf.keras.optimizers.Optimizer` used to perform training.\n num_epochs: The number of training epochs.\n batch_size: The batch size, used for train, validation, and test.\n decay_epochs: The number of epochs of training before decaying the learning\n rate. If None, no decay occurs.\n lr_decay: The amount to decay the learning rate by after `decay_epochs`\n training epochs have occurred.\n vocab_size: Vocab size for normal tokens.\n num_oov_buckets: Number of out of vocabulary buckets.\n dim_embed: Dimension of the token embeddings.\n dim_model: Dimension of features of MultiHeadAttention layers.\n dim_hidden: Dimension of hidden layers of the FFN.\n num_heads: Number of attention heads.\n num_layers: Number of Transformer blocks.\n max_position_encoding: Maximum number of positions for position embeddings.\n dropout: Dropout rate.\n num_validation_examples: The number of test examples to use for validation.\n sequence_length: The maximum number of words to take for each sequence.\n experiment_name: The name of the experiment. Part of the output directory.\n root_output_dir: The top-level output directory for experiment runs. The\n `experiment_name` argument will be appended, and the directory will\n contain tensorboard logs, metrics written as CSVs, and a CSV of\n hyperparameter choices (if `hparams_dict` is used).\n hparams_dict: A mapping with string keys representing the hyperparameters\n and their values. If not None, this is written to CSV.\n max_batches: If set to a positive integer, datasets are capped to at most\n that many batches. If set to None or a nonpositive integer, the full\n datasets are used.\n \"\"\"\n\n train_dataset, validation_dataset, test_dataset = stackoverflow_word_prediction.get_centralized_datasets(\n vocab_size,\n sequence_length,\n train_batch_size=batch_size,\n num_validation_examples=num_validation_examples,\n num_oov_buckets=num_oov_buckets,\n )\n\n if max_batches and max_batches >= 1:\n train_dataset = train_dataset.take(max_batches)\n validation_dataset = validation_dataset.take(max_batches)\n test_dataset = test_dataset.take(max_batches)\n\n model = transformer_models.create_transformer_lm(\n vocab_size=vocab_size,\n num_oov_buckets=num_oov_buckets,\n dim_embed=dim_embed,\n dim_model=dim_model,\n dim_hidden=dim_hidden,\n num_heads=num_heads,\n num_layers=num_layers,\n max_position_encoding=max_position_encoding,\n dropout=dropout,\n name='stackoverflow-transformer')\n\n special_tokens = stackoverflow_word_prediction.get_special_tokens(\n vocab_size=vocab_size, num_oov_buckets=num_oov_buckets)\n pad_token = special_tokens.pad\n oov_tokens = special_tokens.oov\n eos_token = special_tokens.eos\n\n model.compile(\n loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),\n optimizer=optimizer,\n metrics=[\n keras_metrics.MaskedCategoricalAccuracy(\n name='accuracy_with_oov', masked_tokens=[pad_token]),\n keras_metrics.MaskedCategoricalAccuracy(\n name='accuracy_no_oov', masked_tokens=[pad_token] + oov_tokens),\n keras_metrics.MaskedCategoricalAccuracy(\n name='accuracy_no_oov_or_eos',\n masked_tokens=[pad_token, eos_token] + oov_tokens),\n ])\n\n centralized_training_loop.run(\n keras_model=model,\n train_dataset=train_dataset,\n validation_dataset=validation_dataset,\n test_dataset=test_dataset,\n experiment_name=experiment_name,\n root_output_dir=root_output_dir,\n num_epochs=num_epochs,\n hparams_dict=hparams_dict,\n decay_epochs=decay_epochs,\n lr_decay=lr_decay)\n" ]
[ [ "tensorflow.squeeze" ], [ "tensorflow.io.gfile.GFile", "tensorflow.data.Dataset.from_generator" ], [ "tensorflow.constant", "tensorflow.zeros", "tensorflow.shape", "tensorflow.data.Dataset.from_tensor_slices", "tensorflow.test.main", "tensorflow.function" ], [ "tensorflow.keras.losses.SparseCategoricalCrossentropy" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10", "1.12", "1.4", "1.13", "1.5", "1.7", "0.12", "1.0", "1.2" ] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10" ] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "2.7", "2.6", "2.2", "2.3", "2.4", "2.9", "2.5", "2.8", "2.10" ] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "2.7", "2.2", "2.3", "2.4", "2.5", "2.6" ] } ]
lighthall-lab/NiPype
[ "80d3f05d9aa006fa3055785327892e8a89530a80" ]
[ "nipype/utils/misc.py" ]
[ "# -*- coding: utf-8 -*-\n# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-\n# vi: set ft=python sts=4 ts=4 sw=4 et:\n\"\"\"Miscellaneous utility functions\n\"\"\"\nfrom __future__ import (print_function, unicode_literals, division,\n absolute_import)\nfrom builtins import next, str\n\nimport sys\nimport re\nfrom collections import Iterator\n\nfrom distutils.version import LooseVersion\n\nimport numpy as np\nfrom future.utils import raise_from\nfrom future import standard_library\ntry:\n from textwrap import indent as textwrap_indent\nexcept ImportError:\n\n def textwrap_indent(text, prefix):\n \"\"\" A textwrap.indent replacement for Python < 3.3 \"\"\"\n if not prefix:\n return text\n splittext = text.splitlines(True)\n return prefix + prefix.join(splittext)\n\n\nstandard_library.install_aliases()\n\n\ndef human_order_sorted(l):\n \"\"\"Sorts string in human order (i.e. 'stat10' will go after 'stat2')\"\"\"\n\n def atoi(text):\n return int(text) if text.isdigit() else text\n\n def natural_keys(text):\n if isinstance(text, tuple):\n text = text[0]\n return [atoi(c) for c in re.split('(\\d+)', text)]\n\n return sorted(l, key=natural_keys)\n\n\ndef trim(docstring, marker=None):\n if isinstance(docstring, bytes):\n docstring = str(docstring, 'utf-8')\n\n if not docstring:\n return ''\n # Convert tabs to spaces (following the normal Python rules)\n # and split into a list of lines:\n lines = docstring.expandtabs().splitlines()\n # Determine minimum indentation (first line doesn't count):\n indent = sys.maxsize\n for line in lines[1:]:\n stripped = line.lstrip()\n if stripped:\n indent = min(indent, len(line) - len(stripped))\n # Remove indentation (first line is special):\n trimmed = [lines[0].strip()]\n if indent < sys.maxsize:\n for line in lines[1:]:\n # replace existing REST marker with doc level marker\n stripped = line.lstrip().strip().rstrip()\n if marker is not None and stripped and \\\n all([s == stripped[0] for s in stripped]) and \\\n stripped[0] not in [':']:\n line = line.replace(stripped[0], marker)\n trimmed.append(line[indent:].rstrip())\n # Strip off trailing and leading blank lines:\n while trimmed and not trimmed[-1]:\n trimmed.pop()\n while trimmed and not trimmed[0]:\n trimmed.pop(0)\n # Return a single string:\n return '\\n'.join(trimmed)\n\n\ndef find_indices(condition):\n \"Return the indices where ravel(condition) is true\"\n res, = np.nonzero(np.ravel(condition))\n return res\n\n\ndef is_container(item):\n \"\"\"Checks if item is a container (list, tuple, dict, set)\n\n Parameters\n ----------\n item : object\n object to check for .__iter__\n\n Returns\n -------\n output : Boolean\n True if container\n False if not (eg string)\n \"\"\"\n if isinstance(item, str):\n return False\n elif hasattr(item, '__iter__'):\n return True\n else:\n return False\n\n\ndef container_to_string(cont):\n \"\"\"Convert a container to a command line string.\n\n Elements of the container are joined with a space between them,\n suitable for a command line parameter.\n\n If the container `cont` is only a sequence, like a string and not a\n container, it is returned unmodified.\n\n Parameters\n ----------\n cont : container\n A container object like a list, tuple, dict, or a set.\n\n Returns\n -------\n cont_str : string\n Container elements joined into a string.\n\n \"\"\"\n if hasattr(cont, '__iter__') and not isinstance(cont, str):\n cont = ' '.join(cont)\n return str(cont)\n\n\n# Dependency checks. Copied this from Nipy, with some modificiations\n# (added app as a parameter).\ndef package_check(pkg_name,\n version=None,\n app=None,\n checker=LooseVersion,\n exc_failed_import=ImportError,\n exc_failed_check=RuntimeError):\n \"\"\"Check that the minimal version of the required package is installed.\n\n Parameters\n ----------\n pkg_name : string\n Name of the required package.\n version : string, optional\n Minimal version number for required package.\n app : string, optional\n Application that is performing the check. For instance, the\n name of the tutorial being executed that depends on specific\n packages. Default is *Nipype*.\n checker : object, optional\n The class that will perform the version checking. Default is\n distutils.version.LooseVersion.\n exc_failed_import : Exception, optional\n Class of the exception to be thrown if import failed.\n exc_failed_check : Exception, optional\n Class of the exception to be thrown if version check failed.\n\n Examples\n --------\n package_check('numpy', '1.3')\n package_check('scipy', '0.7', 'tutorial1')\n\n \"\"\"\n\n if app:\n msg = '%s requires %s' % (app, pkg_name)\n else:\n msg = 'Nipype requires %s' % pkg_name\n if version:\n msg += ' with version >= %s' % (version, )\n try:\n mod = __import__(pkg_name)\n except ImportError as e:\n raise_from(exc_failed_import(msg), e)\n if not version:\n return\n try:\n have_version = mod.__version__\n except AttributeError as e:\n raise_from(\n exc_failed_check('Cannot find version for %s' % pkg_name), e)\n if checker(have_version) < checker(version):\n raise exc_failed_check(msg)\n\n\ndef str2bool(v):\n if isinstance(v, bool):\n return v\n lower = v.lower()\n if lower in (\"yes\", \"true\", \"t\", \"1\"):\n return True\n elif lower in (\"no\", \"false\", \"n\", \"f\", \"0\"):\n return False\n else:\n raise ValueError(\"%s cannot be converted to bool\" % v)\n\n\ndef flatten(S):\n if S == []:\n return S\n if isinstance(S[0], list):\n return flatten(S[0]) + flatten(S[1:])\n return S[:1] + flatten(S[1:])\n\n\ndef unflatten(in_list, prev_structure):\n if not isinstance(in_list, Iterator):\n in_list = iter(in_list)\n\n if not isinstance(prev_structure, list):\n return next(in_list)\n\n out = []\n for item in prev_structure:\n out.append(unflatten(in_list, item))\n return out\n\n\ndef normalize_mc_params(params, source):\n \"\"\"\n Normalize a single row of motion parameters to the SPM format.\n\n SPM saves motion parameters as:\n x Right-Left (mm)\n y Anterior-Posterior (mm)\n z Superior-Inferior (mm)\n rx Pitch (rad)\n ry Yaw (rad)\n rz Roll (rad)\n \"\"\"\n if source.upper() == 'FSL':\n params = params[[3, 4, 5, 0, 1, 2]]\n elif source.upper() in ('AFNI', 'FSFAST'):\n params = params[np.asarray([4, 5, 3, 1, 2, 0]) + (len(params) > 6)]\n params[3:] = params[3:] * np.pi / 180.\n elif source.upper() == 'NIPY':\n from nipy.algorithms.registration import to_matrix44, aff2euler\n matrix = to_matrix44(params)\n params = np.zeros(6)\n params[:3] = matrix[:3, 3]\n params[-1:2:-1] = aff2euler(matrix)\n\n return params\n\n\ndef dict_diff(dold, dnew, indent=0):\n \"\"\"Helper to log what actually changed from old to new values of\n dictionaries.\n\n typical use -- log difference for hashed_inputs\n \"\"\"\n # First check inputs, since they usually are lists of tuples\n # and dicts are required.\n if isinstance(dnew, list):\n dnew = dict(dnew)\n if isinstance(dold, list):\n dold = dict(dold)\n\n # Compare against hashed_inputs\n # Keys: should rarely differ\n new_keys = set(dnew.keys())\n old_keys = set(dold.keys())\n\n diff = []\n if new_keys - old_keys:\n diff += [\" * keys not previously seen: %s\" % (new_keys - old_keys)]\n\n if old_keys - new_keys:\n diff += [\" * keys not presently seen: %s\" % (old_keys - new_keys)]\n\n # Add topical message\n if diff:\n diff.insert(0, \"Dictionaries had differing keys:\")\n\n diffkeys = len(diff)\n\n # Values in common keys would differ quite often,\n # so we need to join the messages together\n for k in new_keys.intersection(old_keys):\n same = False\n try:\n new, old = dnew[k], dold[k]\n same = new == old\n if not same:\n # Since JSON does not discriminate between lists and\n # tuples, we might need to cast them into the same type\n # as the last resort. And lets try to be more generic\n same = old.__class__(new) == old\n except Exception:\n same = False\n if not same:\n diff += [\" * %s: %r != %r\" % (k, dnew[k], dold[k])]\n\n if len(diff) > diffkeys:\n diff.insert(diffkeys, \"Some dictionary entries had differing values:\")\n\n return textwrap_indent('\\n'.join(diff), ' ' * indent)\n" ]
[ [ "numpy.asarray", "numpy.ravel", "numpy.zeros" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]
drienyov/treadmill
[ "812109e31c503a6eddaee2d3f2e1faf2833b6aaf" ]
[ "lib/python/treadmill/cli/scheduler/__init__.py" ]
[ "\"\"\"Top level command for Treadmill reports.\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport json\n\nimport click\nimport pandas as pd\nimport tabulate\n\nfrom six.moves import urllib_parse\n\nfrom treadmill import cli\nfrom treadmill import context\nfrom treadmill import plugin_manager\nfrom treadmill import restclient\n\n\ndef fetch_report(cell_api, report_type, match=None, partition=None):\n \"\"\"Fetch a report of the given type and return it as a DataFrame.\"\"\"\n api_urls = context.GLOBAL.cell_api(cell_api)\n path = '/scheduler/{}'.format(report_type)\n\n query = {}\n if match:\n query['match'] = match\n if partition:\n query['partition'] = partition\n\n if query:\n path += '?' + urllib_parse.urlencode(query)\n\n response = restclient.get(api_urls, path).json()\n return pd.DataFrame(response['data'], columns=response['columns'])\n\n\ndef print_report(frame):\n \"\"\"Pretty-print the report.\"\"\"\n if cli.OUTPUT_FORMAT is None:\n frame.replace(True, ' ', inplace=True)\n frame.replace(False, 'X', inplace=True)\n dict_ = frame.to_dict(orient='split')\n del dict_['index']\n\n cli.out(\n tabulate.tabulate(\n dict_['data'], dict_['columns'], tablefmt='simple'\n )\n )\n cli.echo_green('\\nX: designates the factor that prohibits scheduling '\n 'the instance on the given server')\n elif cli.OUTPUT_FORMAT == 'yaml':\n fmt = plugin_manager.load('treadmill.formatters', 'yaml')\n cli.out(fmt.format(frame.to_dict(orient='records')))\n elif cli.OUTPUT_FORMAT == 'json':\n cli.out(frame.to_json(orient='records'))\n elif cli.OUTPUT_FORMAT == 'csv':\n cli.out(frame.to_csv(index=False))\n else:\n cli.out(tabulate.tabulate(frame, frame.columns, tablefmt='simple'))\n\n\ndef init():\n \"\"\"Return top level command handler.\"\"\"\n\n @click.group(cls=cli.make_commands(__name__))\n @click.option(\n '--cell',\n help='Treadmill cell',\n envvar='TREADMILL_CELL',\n callback=cli.handle_context_opt,\n expose_value=False,\n required=True\n )\n @click.option(\n '--api',\n help='Cell API URL',\n metavar='URL',\n envvar='TREADMILL_CELLAPI'\n )\n @click.pass_context\n def run(ctx, api):\n \"\"\"Report scheduler state.\"\"\"\n if not ctx.obj:\n ctx.obj = {} # Doesn't seem to exist in testing\n ctx.obj['api'] = api\n\n return run\n" ]
[ [ "pandas.DataFrame" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] } ]
ruomingp/lingvo
[ "ba59e8c46471be77d5d3c48177f0f0dd8d5d44e9", "ba59e8c46471be77d5d3c48177f0f0dd8d5d44e9", "ba59e8c46471be77d5d3c48177f0f0dd8d5d44e9", "ba59e8c46471be77d5d3c48177f0f0dd8d5d44e9" ]
[ "lingvo/jax/eval.py", "lingvo/jax/base_input.py", "lingvo/core/conformer_layer_test.py", "lingvo/core/attention.py" ]
[ "# Lint as: python3\n# Copyright 2021 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Evaluation loop for lingvo Jax model.\"\"\"\n\nimport contextlib\nimport functools\nimport hashlib\nimport os\nimport time\nfrom typing import List, Optional, Sequence\n\nfrom absl import logging\nimport jax\nfrom jax.experimental import maps\nfrom jax.experimental import mesh_utils\nfrom lingvo.jax import base_input\nfrom lingvo.jax import base_layer\nfrom lingvo.jax import base_metrics\nfrom lingvo.jax import base_model_params\nfrom lingvo.jax import base_task\nfrom lingvo.jax import checkpoint_pb2\nfrom lingvo.jax import model_utils\nfrom lingvo.jax import py_utils\nfrom lingvo.jax import pytypes\nfrom lingvo.jax import summary_utils\nfrom lingvo.jax import train_states\nfrom lingvo.jax import trainer_lib\nimport tensorflow.compat.v2 as tf\n\nfrom lingvo.jax import checkpoints\nfrom lingvo.jax import io_utils\n\nBaseModelParamsT = base_model_params.BaseModelParamsT\nCheckpointType = checkpoint_pb2.CheckpointType\nInstantiableParams = py_utils.InstantiableParams\nNestedMap = py_utils.NestedMap\nJTensor = pytypes.JTensor\nNestedJTensor = pytypes.NestedJTensor\nTrainState = train_states.TrainState\nSummaryWriter = tf.summary.SummaryWriter\n\n\ndef maybe_ema(model_states):\n \"\"\"Finds the ema state from optimizer states.\"\"\"\n if not model_states.opt_states:\n return model_states\n for i in range(len(model_states.opt_states[0])):\n if 'ema' in model_states.opt_states[0][i]:\n return TrainState(\n step=model_states.step,\n mdl_vars=model_states.opt_states[0][i].ema,\n opt_states={})\n return model_states\n\n\ndef evaluate(\n model_name: str,\n job_log_dir: Optional[str],\n multi_host_checkpointing: Optional[bool],\n maybe_use_persistence_checkpointing: bool,\n) -> None:\n \"\"\"Runs the evaluation loop on the entire eval data set.\n\n Args:\n model_name: The name of the model from the registry to evaluate.\n job_log_dir: The directory for the job logs.\n multi_host_checkpointing: Whether to use multi-host checkpointing.\n maybe_use_persistence_checkpointing: If set, it will try to use\n persistence-based checkpointing if suitable.\n \"\"\"\n model_config = model_utils.get_model(model_name)()\n task_p = model_config.task()\n model_p = task_p.model\n eval_input_p = [v for v in model_config.datasets() if not v.is_training]\n for inp in eval_input_p:\n inp.num_infeed_hosts = jax.process_count()\n inp.infeed_host_index = jax.process_index()\n\n if model_p.device_mesh is not None:\n checkpoint_type = checkpoints.retrieve_checkpoint_type(\n multi_host_checkpointing, maybe_use_persistence_checkpointing, task_p)\n evaluate_spmd_model(task_p, eval_input_p, job_log_dir, checkpoint_type)\n else:\n evaluate_pmap_model(task_p, eval_input_p, job_log_dir)\n\n\ndef evaluate_pmap_model(\n task_p: InstantiableParams,\n eval_input_p: Sequence[InstantiableParams],\n job_log_dir: Optional[str],\n) -> None:\n \"\"\"Runs the evaluation loop on the entire test dataset for PMAP model.\n\n Args:\n task_p: Params for the task encapsulating the data parallel model.\n eval_input_p: List of params for the eval data input pipelines.\n job_log_dir: Directory for the job logs.\n \"\"\"\n logging.info('Using pmap for data parallelism.')\n jax_task = task_p.Instantiate()\n eval_input_pipelines = [input_p.Instantiate() for input_p in eval_input_p]\n # TODO(shafey): Retrieve the seeds from the model definition instead.\n prng_key = jax.random.PRNGKey(1234)\n prng_key, init_key = jax.random.split(prng_key)\n\n checkpoint_dir = os.path.join(job_log_dir, 'checkpoints')\n # Restore flax checkpoints still required bak variables in TrainState\n # TODO(pax): add is_eval=True to initialize_model_state\n model_states = trainer_lib.initialize_model_state(jax_task, init_key)\n # Pmap does not use GDA, and so global_mesh and mesh_axes are None.\n model_states = checkpoints.restore_checkpoint(model_states, checkpoint_dir)\n replicated_model_states = trainer_lib.replicate_model_state(model_states)\n logging.info('replicated_model_states: %s',\n jax.tree_map(lambda x: x.shape, replicated_model_states))\n # From now on, different replicas should use different random seeds.\n # Here, each process will have its unique prng_key.\n # prng_key will be further split so that each core on a host will get\n # different prng_key.\n prng_key = jax.random.fold_in(prng_key, jax.process_index())\n logging.info('root prng_key: %s', prng_key)\n\n def eval_step(mdl_states, prng_key, inputs):\n mdl_states = trainer_lib.train_state_for_eval_step(mdl_states)\n return trainer_lib.eval_step_single_learner(\n jax_task,\n mdl_states,\n prng_key,\n inputs,\n data_parallel_axis_name='batch',\n fprop_dtype=jax_task.model.fprop_dtype)\n\n num_devices = jax.local_device_count()\n prng_key, eval_key = jax.random.split(prng_key)\n eval_prng_seed = jax.random.split(eval_key, num=num_devices)\n logging.info('eval prng_seed: %s', eval_prng_seed)\n\n p_eval_step = jax.pmap(eval_step, axis_name='batch')\n\n logging.info('Evaluation loop starting...')\n summary_base_dir = os.path.join(job_log_dir, 'summaries')\n summary_eval_dirs = [\n os.path.join(summary_base_dir, f'eval_test_{split}')\n for split, _ in enumerate(eval_input_p)\n ]\n\n num_steps = [\n -1 if p.reset_for_eval else p.eval_loop_num_batches for p in eval_input_p\n ]\n last_checkpoint = checkpoints.latest_checkpoint(checkpoint_dir)\n with contextlib.ExitStack() as exit_stack:\n eval_summary_writers = [\n exit_stack.enter_context(summary_utils.get_summary_writer(d))\n for d in summary_eval_dirs\n ]\n\n while True:\n step_i = int(jax.device_get(replicated_model_states.step)[0])\n eval_step = functools.partial(p_eval_step,\n maybe_ema(replicated_model_states),\n eval_prng_seed)\n # Run the eval loop.\n model_utils.run_eval_loop_over_test_splits(\n num_steps,\n eval_step,\n eval_summary_writers,\n step_i,\n eval_input_pipelines,\n reshard_inputs=True)\n # If the last check point evaluated matches max train steps, exit.\n if last_checkpoint is not None:\n last_ckpt_step = checkpoints.get_step_from_checkpoint_asset(\n last_checkpoint)\n exceeded_ckpt = last_ckpt_step + task_p.train.save_interval_steps\n if exceeded_ckpt >= task_p.train.num_train_steps:\n break\n # Release replicated_model_states.\n del replicated_model_states\n new_checkpoint = checkpoints.latest_checkpoint(checkpoint_dir)\n while new_checkpoint == last_checkpoint:\n # Sleep for a minute.\n time.sleep(60)\n new_checkpoint = checkpoints.latest_checkpoint(checkpoint_dir)\n # There must be a new checkpoint here.\n logging.info('Found new checkpoint: %s', new_checkpoint)\n model_states = checkpoints.restore_checkpoint(model_states,\n checkpoint_dir)\n replicated_model_states = trainer_lib.replicate_model_state(model_states)\n last_checkpoint = new_checkpoint\n\n\ndef evaluate_spmd_model(\n task_p: InstantiableParams,\n eval_input_p: Sequence[InstantiableParams],\n job_log_dir: Optional[str],\n checkpoint_type: CheckpointType,\n) -> None:\n \"\"\"Runs the evaluation loop on the entire test dataset for SPMD model.\n\n Args:\n task_p: Params of the task encapsulating an SPMD model.\n eval_input_p: List of Params for the eval data pipelines.\n job_log_dir: Directory for the job logs.\n checkpoint_type: Type of model checkpointing method to use.\n \"\"\"\n logging.info('Using SPMD sharding for model parallelism.')\n eval_input_pipelines = [input_p.Instantiate() for input_p in eval_input_p]\n # TODO(bf-jax): Retrieve the seeds from the model definition instead.\n prng_key = jax.random.PRNGKey(1234)\n prng_key, init_key = jax.random.split(prng_key)\n\n checkpoint_dir = os.path.join(job_log_dir, 'checkpoints')\n # Note that GDA checkpoint requires all processes to participate in\n # checkpointing but it does not require a separate checkpoint_dir per process.\n if checkpoint_type == CheckpointType.CHECKPOINT_MULTI_HOST_FLAX:\n checkpoint_task_dir = os.path.join(checkpoint_dir,\n f'{jax.process_index():03d}')\n else:\n checkpoint_task_dir = checkpoint_dir\n\n multi_host_checkpointing = bool(checkpoint_type in {\n CheckpointType.CHECKPOINT_MULTI_HOST_FLAX, CheckpointType.CHECKPOINT_GDA\n })\n\n def get_shape_dtype(x):\n y = jax.ShapeDtypeStruct(x.shape, x.dtype)\n return y\n\n # Do not ues eval_input_pipelines[0] directly.\n sample_model_inputs = eval_input_p[0].Instantiate().get_next()\n inputs_shape = tf.nest.map_structure(get_shape_dtype, sample_model_inputs)\n\n jax_task = task_p.Instantiate()\n model_p = task_p.model\n mesh_shape = model_p.device_mesh.shape\n device_mesh = mesh_utils.create_device_mesh(mesh_shape)\n logging.info('device_mesh: %s', device_mesh)\n global_mesh = maps.Mesh(device_mesh, model_p.mesh_axis_names)\n use_gda_checkpoint = jax.config.jax_parallel_functions_output_gda\n with global_mesh:\n jax_task.model.instantiate_variable_configs()\n # Restore flax checkpoints still required backward variables in TrainState\n # TODO(pax): set is_eval=True for all ckpt types.\n if use_gda_checkpoint:\n partitioned_specs = jax_task.create_train_state_partition_specs(\n jax_task.model.vars, discard_opt_states=True)\n partitioned_train_state = checkpoints.restore_checkpoint(\n None,\n checkpoint_task_dir,\n global_mesh=global_mesh,\n checkpoint_type=checkpoint_type,\n state_specs=partitioned_specs)\n eval_step, inputs_partition_specs = (\n trainer_lib.get_partitioned_spmd_model_step_fn(\n jax_task,\n init_key,\n partitioned_specs,\n inputs_shape,\n is_eval=True))\n else:\n (partitioned_train_state, partitioned_specs, inputs_partition_specs, _,\n eval_step, _) = trainer_lib.partition_spmd_model(task_p, init_key,\n inputs_shape)\n partitioned_train_state = checkpoints.restore_checkpoint(\n partitioned_train_state,\n checkpoint_task_dir,\n global_mesh=global_mesh,\n checkpoint_type=checkpoint_type,\n state_specs=partitioned_specs)\n\n logging.info('partitioned_train_state: %s',\n jax.tree_map(lambda x: x.shape, partitioned_train_state))\n if multi_host_checkpointing:\n py_utils.sync_global_devices(f'checkpointer:restored:{checkpoint_dir}')\n\n # We do not fold in jax.process_index in contrast to the pmap version and\n # use a single global key instead to rely on pjit to split for different\n # replicas.\n logging.info('root prng_key: %s', prng_key)\n prng_key, eval_key = jax.random.split(prng_key)\n logging.info('eval prng_key: %s', eval_key)\n\n logging.info('Evaluation loop starting...')\n summary_base_dir = os.path.join(job_log_dir, 'summaries')\n summary_eval_dirs = [\n os.path.join(summary_base_dir, f'eval_{split}')\n for split, _ in enumerate(eval_input_p)\n ]\n\n num_steps = [-1 if p.reset_for_eval else 1 for p in eval_input_p]\n last_checkpoint = checkpoints.latest_checkpoint(checkpoint_dir)\n with contextlib.ExitStack() as exit_stack:\n eval_summary_writers = [\n exit_stack.enter_context(summary_utils.get_summary_writer(d))\n for d in summary_eval_dirs\n ]\n while True:\n step_i = int(jax.device_get(partitioned_train_state.step))\n eval_step_fn = functools.partial(\n eval_step,\n trainer_lib.train_state_for_eval_step(partitioned_train_state),\n eval_key)\n # Run the eval loop.\n model_utils.run_eval_loop_over_test_splits(\n num_steps,\n eval_step_fn,\n eval_summary_writers,\n step_i,\n eval_input_pipelines,\n inputs_partition_specs,\n inputs_shape,\n global_mesh,\n reshard_inputs=False)\n # If the last check point evaluated matches max train steps, exit.\n if last_checkpoint is not None:\n last_ckpt_step = checkpoints.get_step_from_checkpoint_asset(\n last_checkpoint)\n exceeded_ckpt = last_ckpt_step + task_p.train.save_interval_steps\n if exceeded_ckpt >= task_p.train.num_train_steps:\n break\n new_checkpoint = checkpoints.latest_checkpoint(checkpoint_dir)\n while new_checkpoint == last_checkpoint:\n # Sleep for a minute.\n time.sleep(60)\n new_checkpoint = checkpoints.latest_checkpoint(checkpoint_dir)\n # There must be a new checkpoint here.\n logging.info('Found new checkpoint: %s', new_checkpoint)\n partitioned_train_state = checkpoints.restore_checkpoint(\n None if use_gda_checkpoint else partitioned_train_state,\n checkpoint_task_dir,\n global_mesh=global_mesh,\n checkpoint_type=checkpoint_type,\n state_specs=partitioned_specs)\n if multi_host_checkpointing:\n py_utils.sync_global_devices(\n f'checkpointer:restored:{checkpoint_dir}')\n last_checkpoint = new_checkpoint\n\n\ndef decode(\n model_name: str,\n job_log_dir: Optional[str],\n multi_host_checkpointing: Optional[bool],\n maybe_use_persistence_checkpointing: bool,\n restore_checkpoint_dir: Optional[str],\n restore_checkpoint_step: Optional[int],\n continuous_decode: bool,\n) -> None:\n \"\"\"Runs decoding once on the decoder datasets.\n\n Args:\n model_name: The name of the model from the registry to evaluate.\n job_log_dir: The directory for the job logs.\n multi_host_checkpointing: Whether to use multi-host checkpointing.\n maybe_use_persistence_checkpointing: If set, it will try to use\n persistence-based checkpointing if suitable.\n restore_checkpoint_dir: The directory from which to restore checkpoint.\n restore_checkpoint_step: If set, the checkpoint step to restore. If unset,\n try to restore from the latest checkpoint if any.\n continuous_decode: whether to continuously decode on the latest ckpt.\n \"\"\"\n logging.info('running decode_once on model %s restored from %s', model_name,\n restore_checkpoint_dir)\n model_config = model_utils.get_model(model_name)()\n task_p = model_config.task()\n model_p = task_p.model\n decoder_inputs = model_config.decoder_datasets()\n if not decoder_inputs:\n return\n for inp in decoder_inputs:\n inp.num_infeed_hosts = jax.process_count()\n inp.infeed_host_index = jax.process_index()\n\n if model_p.device_mesh is not None:\n if continuous_decode:\n raise NotImplementedError('http://b/214589358: not supported')\n checkpoint_type = checkpoints.retrieve_checkpoint_type(\n multi_host_checkpointing, maybe_use_persistence_checkpointing, task_p)\n decode_once_spmd_model(task_p, decoder_inputs, job_log_dir, checkpoint_type,\n restore_checkpoint_dir, restore_checkpoint_step)\n else:\n decode_pmap_model(task_p, decoder_inputs, job_log_dir,\n restore_checkpoint_dir, restore_checkpoint_step,\n continuous_decode)\n\n\ndef _get_dir_names(input_p: Sequence[InstantiableParams]) -> Sequence[str]:\n \"\"\"Returns a list of same length for parent dir names for each dataset.\"\"\"\n uniq_names = set()\n ret = []\n for idx, p in enumerate(input_p):\n name = p.name or f'decode_test_{idx}'\n if p.name and p.name in uniq_names:\n name = f'{p.name}_{idx}'\n if name in uniq_names:\n suffix = hashlib.md5(name.encode()).hexdigest()[-5:]\n name = f'{name}_{suffix}'\n assert name not in uniq_names\n uniq_names.add(name)\n ret.append(name)\n return ret\n\n\ndef _get_step(step: base_layer.JTensorOrPartitionSpec) -> int:\n \"\"\"Returns an int for the current global step.\"\"\"\n if step.ndim == 0:\n return jax.device_get(step)\n if step.ndim == 1:\n return jax.device_get(step[0])\n raise ValueError(\n f'Expecting a replicated 1D global step (got ndim=`{step.ndim}`).')\n\n\ndef _get_filename(step: base_layer.JTensorOrPartitionSpec) -> str:\n \"\"\"Returns a filename for the given step.\"\"\"\n step_num = _get_step(step)\n return f'decoder_out_{step_num}_shard_{jax.process_index()}'\n\n\ndef decode_pmap_model(\n task_p: InstantiableParams,\n input_p: Sequence[InstantiableParams],\n job_log_dir: Optional[str],\n restore_checkpoint_dir: Optional[str],\n restore_checkpoint_step: Optional[int],\n continuous_decode: bool,\n) -> None:\n \"\"\"Runs the decoding on the entire decoder datasets for a PMAP model.\n\n Args:\n task_p: Params of the task encapsulating a the data parallel model.\n input_p: List of input params to be decoded.\n job_log_dir: Directory for the job logs.\n restore_checkpoint_dir: The directory from which to restore checkpoint. If\n None, uses job_log_dir.\n restore_checkpoint_step: If set, the checkpoint step to restore. If unset,\n try to restore from the latest checkpoint if any.\n continuous_decode: whether to continuously decode on the latest ckpt.\n \"\"\"\n if continuous_decode and restore_checkpoint_step is not None:\n raise ValueError('Continuous decoding mode requires restore_checkpoint_step'\n '=None, actual restore_checkpoint_step='\n f'{restore_checkpoint_step}')\n restore_checkpoint_dir = restore_checkpoint_dir or os.path.join(\n job_log_dir, 'checkpoints')\n\n # TODO(shafey): Retrieve the seeds from the model definition instead.\n prng_key = jax.random.PRNGKey(1234)\n prng_key, init_key = jax.random.split(prng_key)\n\n # From now on, different replicas should use different random seeds.\n # Here, each process will have its unique prng_key.\n # prng_key will be further split so that each core on a host will get\n # different prng_key.\n prng_key = jax.random.fold_in(prng_key, jax.process_index())\n logging.info('root prng_key: %s', prng_key)\n prng_key, eval_key = jax.random.split(prng_key)\n prng_seed = jax.random.split(eval_key, num=jax.local_device_count())\n logging.info('decoder prng_seed: %s', prng_seed)\n\n inputs = [p.Instantiate() for p in input_p]\n summary_base_dir = os.path.join(job_log_dir, 'summaries')\n dirnames = _get_dir_names(input_p)\n summary_decode_dirs = [\n os.path.join(summary_base_dir, f'decode_test_{dirnames[split]}')\n for split, _ in enumerate(input_p)\n ]\n with contextlib.ExitStack() as exit_stack:\n summary_writers = [\n exit_stack.enter_context(summary_utils.get_summary_writer(d))\n for d in summary_decode_dirs\n ]\n\n jax_task = task_p.Instantiate()\n # Restore flax checkpoints still required bak variables in TrainState\n # TODO(pax): add is_eval=True to initialize_model_state\n model_states = trainer_lib.initialize_model_state(jax_task, init_key)\n model_states = checkpoints.restore_checkpoint(\n model_states, restore_checkpoint_dir, step=restore_checkpoint_step)\n replicated_model_states = trainer_lib.replicate_model_state(model_states)\n logging.info('replicated_model_states: %s',\n jax.tree_map(lambda x: x.shape, replicated_model_states))\n last_checkpoint = checkpoints.latest_checkpoint(restore_checkpoint_dir)\n\n while True:\n _decode_once_pmap_model(jax_task, task_p, inputs, input_p, prng_seed,\n job_log_dir, replicated_model_states,\n summary_writers)\n if not continuous_decode:\n break\n if last_checkpoint is not None:\n last_ckpt_step = int(last_checkpoint.split('_')[-1])\n exceeded_ckpt = last_ckpt_step + task_p.train.save_interval_steps\n if exceeded_ckpt >= task_p.train.num_train_steps:\n break\n # Release replicated_model_states.\n del replicated_model_states\n new_checkpoint = checkpoints.latest_checkpoint(restore_checkpoint_dir)\n while new_checkpoint == last_checkpoint:\n time.sleep(60)\n new_checkpoint = checkpoints.latest_checkpoint(restore_checkpoint_dir)\n logging.info('Found new checkpoint: %s', new_checkpoint)\n model_states = checkpoints.restore_checkpoint(model_states,\n restore_checkpoint_dir)\n replicated_model_states = trainer_lib.replicate_model_state(model_states)\n last_checkpoint = new_checkpoint\n\n\ndef _decode_once_pmap_model(\n jax_task: base_task.SingleTask,\n task_p: InstantiableParams,\n inputs: List[base_input.BaseInput],\n input_p: Sequence[InstantiableParams],\n prng_seed: JTensor,\n job_log_dir: Optional[str],\n replicated_model_states: train_states.TrainState,\n summary_writers: List[SummaryWriter],\n) -> None:\n \"\"\"Runs the decoding on the entire decoder datasets for a PMAP model.\n\n Args:\n jax_task: instantiated model from task_p.\n task_p: Params for the task encapsulating a data parallel model.\n inputs: instantiated inputs.\n input_p: List of input params to be decoded.\n prng_seed: The prng seed used for decoding.\n job_log_dir: Directory for the job logs.\n replicated_model_states: A TrainState object.\n summary_writers: The summary writer objects to log summaries.\n \"\"\"\n model = jax_task.model\n model_p = task_p.model\n metrics_p = task_p.metrics\n if not metrics_p:\n metrics_p = base_metrics.MeanMetrics.Params()\n decode_metrics = metrics_p.Instantiate()\n process_decode_metrics = metrics_p.Instantiate()\n\n step_i = _get_step(replicated_model_states.step)\n pmap_axis_name = 'batch'\n\n def decode_step(mdl_states, prng_key, inputs):\n mdl_states = trainer_lib.train_state_for_eval_step(mdl_states)\n metrics, out = trainer_lib.decode_step(model, mdl_states, prng_key, inputs,\n model_p.fprop_dtype)\n metrics = decode_metrics.aggregate(metrics)\n return metrics, out\n\n # As an example, suppose the output leaf from trainer_lib.decoder_step()\n # for each core has shape: [per_core_batch_size, decoding_length].\n # In the all_gather we set tiled=True, so the output chunks are all\n # concatenated into the existing batch axis, so we get shape\n # [num_cores x per_core_batch_size, decoding_length].\n # In the pmap call we set out_axes=None to not have to manually unreplicate,\n # so the output of pmap_decode_step() will have the same shape.\n #\n # Example code snippet showing this:\n # # shape (8, 3, 2)\n # x = jnp.tile(jnp.arange(8)[:, None, None],[1, 3, 2])\n # # shape (24, 2)\n # z = jax.pmap(\n # lambda y: jax.lax.all_gather(y+1, axis_name='i', tiled=True),\n # axis_name='i', out_axes=None)(x)\n #\n # We only aggregate metrics, not `out`, hence the tuple for out_axes.\n pmap_decode_step = jax.pmap(\n decode_step, axis_name=pmap_axis_name, out_axes=(None, 0))\n decode_step_func = functools.partial(pmap_decode_step,\n maybe_ema(replicated_model_states),\n prng_seed)\n\n num_steps = [\n -1 if p.reset_for_eval else p.eval_loop_num_batches for p in input_p\n ]\n decodes = [list() for _ in input_p]\n for split, num_split_steps in enumerate(num_steps):\n logging.info('Start decoding on input %s', input_p[split].name)\n step_num = 0\n while num_split_steps < 0 or step_num < num_split_steps:\n step_num += 1\n try:\n batch = inputs[split].get_next()\n except (tf.errors.OutOfRangeError, StopIteration):\n inputs[split].reset()\n break\n batch = tf.nest.map_structure(py_utils.reshard, batch)\n batch_metrics, out = decode_step_func(batch)\n # we store the metric directly as it has already been aggregated in\n # side decode_step_fun\n decode_metrics.store(batch_metrics)\n logging.info('Finished decoding input batch %d', step_num)\n\n out = tf.nest.map_structure(py_utils.unshard, out)\n process_metrics, processed = model.process_decode_out(inputs[split], out)\n decodes[split].extend(processed)\n logging.info('Finished processing decoded input batch %d', step_num)\n\n # Reshard the metrics for pmap.\n process_decode_metrics.update(process_metrics)\n\n with summary_writers[split].as_default():\n decode_metrics.summarize(step_i, 'decode_metrics')\n process_decode_metrics.summarize(step_i, 'process_decode_metrics')\n\n basedir = os.path.join(job_log_dir, 'decoder_out')\n dirnames = _get_dir_names(input_p)\n filename = _get_filename(replicated_model_states.step)\n for s in dirnames:\n dir_path = os.path.join(basedir, s)\n if not tf.io.gfile.exists(dir_path):\n tf.io.gfile.makedirs(dir_path)\n filenames = [os.path.join(basedir, s, filename) for s in dirnames]\n for split, output_file in enumerate(filenames):\n logging.info('Writing decoder output to %s with %d entries', output_file,\n len(decodes[split]))\n io_utils.WriteKeyValuePairs(output_file, decodes[split])\n\n\ndef decode_once_spmd_model(\n task_p: InstantiableParams,\n input_p: Sequence[InstantiableParams],\n job_log_dir: Optional[str],\n checkpoint_type: CheckpointType,\n restore_checkpoint_dir: str,\n restore_checkpoint_step: Optional[int],\n) -> None:\n \"\"\"Runs the decoding once on the entire decoder datasets for SPMD model.\n\n Args:\n task_p: Params for the task that encapsulates an SPMD model.\n input_p: List of input params to be decoded.\n job_log_dir: Directory for the job logs.\n checkpoint_type: Type of model checkpointing method to use.\n restore_checkpoint_dir: The directory from which to restore checkpoint.\n restore_checkpoint_step: If set, the checkpoint step to restore. If unset,\n try to restore from the latest checkpoint if any.\n \"\"\"\n # TODO(bf-jax): Retrieve the seeds from the model definition instead.\n prng_key = jax.random.PRNGKey(1234)\n prng_key, init_key = jax.random.split(prng_key)\n\n if restore_checkpoint_dir:\n restore_checkpoint_parent_dir = restore_checkpoint_dir\n if checkpoint_type == CheckpointType.CHECKPOINT_MULTI_HOST_FLAX:\n # TODO(zhouwk): add sanity check on number of subdirs and number of\n # processes and fail early if unequal.\n restore_checkpoint_dir = os.path.join(restore_checkpoint_dir,\n f'{jax.process_index():03d}')\n\n multi_host_checkpointing = bool(checkpoint_type in {\n CheckpointType.CHECKPOINT_MULTI_HOST_FLAX, CheckpointType.CHECKPOINT_GDA\n })\n\n sample_inputs = input_p[0].Instantiate().get_next()\n inputs_shape = tf.nest.map_structure(py_utils.get_global_input_shape_dtype,\n sample_inputs)\n\n model_p = task_p.model\n # TODO(b/198356509): This is a hack for now as we need to change some\n # annotations for mode='decode'. A future cl will move this logic\n # to a more generic model_p.update_sharding_params_v1(mode='decode').\n model_p.lm = model_p.lm.cls.set_sharding_params_v1(\n model_p.lm,\n replica_axis=model_p.lm.mesh_axis_names[0],\n data_axis=model_p.lm.mesh_axis_names[1],\n mdl_axis=model_p.lm.mesh_axis_names[2],\n device_ids_mesh=model_p.lm.device_mesh,\n mesh_axis_names=model_p.lm.mesh_axis_names,\n mode='decode')\n\n mesh_shape = model_p.device_mesh.shape\n device_mesh = mesh_utils.create_device_mesh(mesh_shape)\n logging.info('device_mesh: %s', device_mesh)\n jax_task = task_p.Instantiate()\n global_mesh = maps.Mesh(device_mesh, model_p.mesh_axis_names)\n with global_mesh:\n if restore_checkpoint_dir:\n model = jax_task.model\n model.instantiate_variable_configs()\n # Get the metadata from variables instead of actually instantiating them.\n partitioned_specs = jax_task.create_train_state_partition_specs(\n model.vars, discard_opt_states=True)\n # Instantiate the TrainState directly from the checkpoint.\n partitioned_train_state = checkpoints.restore_checkpoint(\n None,\n restore_checkpoint_dir,\n global_mesh=global_mesh,\n checkpoint_type=checkpoint_type,\n state_specs=partitioned_specs,\n step=restore_checkpoint_step)\n if multi_host_checkpointing:\n py_utils.sync_global_devices(\n f'checkpointer:restored:{restore_checkpoint_parent_dir}')\n decode_step_fn, inputs_partition_spec = (\n trainer_lib.get_partitioned_spmd_model_decode_fn(\n jax_task, init_key, partitioned_specs, inputs_shape))\n else:\n # When restore is not specified, randomly initiate the train_state.\n (partitioned_train_state, inputs_partition_spec, partitioned_specs,\n decode_step_fn) = trainer_lib.partition_spmd_model_decode(\n task_p, init_key, inputs_shape)\n logging.info('partitioned_train_state: %s',\n jax.tree_map(lambda x: x.shape, partitioned_train_state))\n # We do not fold in jax.process_index in contrast to the pmap version and\n # use a single global key instead to rely on pjit to split for different\n # replicas.\n logging.info('root prng_key: %s', prng_key)\n prng_key, decode_key = jax.random.split(prng_key)\n logging.info('eval prng_key: %s', decode_key)\n spmd_decode_step_fn = functools.partial(\n decode_step_fn,\n trainer_lib.train_state_for_eval_step(partitioned_train_state),\n decode_key)\n\n num_steps = [\n -1 if p.reset_for_eval else p.eval_loop_num_batches for p in input_p\n ]\n inputs = [p.Instantiate() for p in input_p]\n decodes = [list() for _ in input_p]\n process_id = jax.process_index()\n\n for split, num_split_steps in enumerate(num_steps):\n logging.info('Start decoding on input %s', input_p[split].name)\n step_num = 0\n while num_split_steps < 0 or step_num < num_split_steps:\n step_num += 1\n try:\n batch = inputs[split].get_next()\n except (tf.errors.OutOfRangeError, StopIteration):\n break\n if jax.config.jax_parallel_functions_output_gda:\n batch = py_utils.create_gda(batch, inputs_shape, global_mesh,\n inputs_partition_spec)\n _, out = spmd_decode_step_fn(batch)\n # Output is fully replicated now, so it's ok to unreplicate it by\n # retrieving from device 0 only.\n out = py_utils.maybe_unreplicate_gda(out)\n global_batch_size = next(iter(out.values())).shape[0]\n logging.info('Finished decoding input batch %d with %d examples',\n step_num, global_batch_size)\n # Manually shard the output per each jax process.\n # We require that all fields in the output is batch major.\n if global_batch_size % jax.process_count() != 0:\n raise ValueError(f'Global batch size {global_batch_size} must divide '\n f'jax process count {jax.process_count()}')\n for k, v in out.items():\n if v.shape[0] != global_batch_size:\n raise ValueError('We require that all fields in the decode output '\n 'to have batch size as the first dim, got shape='\n f'{v.shape} with key={k}, expect batch size = '\n f'{global_batch_size}')\n per_process_batch_size = global_batch_size // jax.process_count()\n\n def shard(x, per_process_batch_size=per_process_batch_size):\n return x[(process_id *\n per_process_batch_size):((process_id + 1) *\n per_process_batch_size)]\n\n out = jax.tree_map(shard, out)\n _, processed = jax_task.model.process_decode_out(inputs[split], out)\n decodes[split].extend(processed)\n logging.info('Finished processing decoded input batch %d', step_num)\n\n basedir = os.path.join(job_log_dir, 'decoder_out')\n dirnames = _get_dir_names(input_p)\n filename = _get_filename(\n py_utils.maybe_unreplicate_gda(partitioned_train_state.step))\n for s in dirnames:\n dir_path = os.path.join(basedir, s)\n if not tf.io.gfile.exists(dir_path):\n tf.io.gfile.makedirs(dir_path)\n filenames = [os.path.join(basedir, s, filename) for s in dirnames]\n for split, output_file in enumerate(filenames):\n logging.info('Writing decoder output to %s with %d entries', output_file,\n len(decodes[split]))\n io_utils.WriteKeyValuePairs(output_file, decodes[split])\n", "# Lint as: python3\n# Copyright 2021 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Base classes for the lingvo Jax input layers.\"\"\"\n\nimport copy\nfrom typing import List, Optional\n\nfrom absl import logging\nfrom lingvo.core import cluster_factory\nfrom lingvo.core import datasource\nfrom lingvo.jax import py_utils\nfrom lingvo.jax import pytypes\nimport tensorflow.compat.v2 as tf\n\nNestedMap = py_utils.NestedMap\nNestedJTensor = pytypes.NestedJTensor\nInstantiableParams = py_utils.InstantiableParams\nParamsT = pytypes.ParamsT\n\n\nclass BaseInputParams(InstantiableParams):\n \"\"\"A convenient base type for the params of a dataset.\"\"\"\n\n def __init__(self, cls) -> None:\n super().__init__(cls)\n self.Define('name', 'input', 'Name of this input dataset.')\n\n self.Define(\n 'batch_size', None, 'The (Jax per process) Batch size. '\n 'Each call to get_next() returns a batch with this '\n 'batch size.')\n\n # Sharding behavior.\n self.Define(\n 'num_infeed_hosts', 1,\n 'Usually set to jax.process_count(). Implementation must '\n 'ensure that the data is sharded into this many shard.')\n self.Define(\n 'infeed_host_index', 0,\n 'Usually set to jax.process_index(). Implementation must '\n 'ensure that each instance returns a shard with this index.')\n\n # Deterministic randomness.\n self.Define(\n 'input_random_seed', None,\n 'If set, implementation must ensure that this is used to seed '\n 'randomness, e.g. when shuffling in a deterministic manner.')\n\n self.Define(\n 'reset_for_eval', False,\n 'If set, eval will continue until tf.errors.OutOfRange is raised, '\n 'and reset() will called for each eval. Implementation must ensure that'\n ' all variant p.infeed_host_index instances raise after the same number'\n ' of calls to get_next() to ensure synchronization across hosts. If not'\n ' set, get_next() must never raise.')\n self.Define(\n 'eval_loop_num_batches', 1,\n 'Num of batches to process per eval loop. Must be >= 1. This value'\n ' is ignored if reset_for_eval is set True, in which case, this value'\n ' is dynamically determined by the number of available batches. If '\n ' reset_for_eval is set to False, then each eval loop will process'\n ' this many batches. Metrics over those batches will be aggregated'\n ' and then reported.')\n self.Define('is_training', False,\n 'Whether or not this dataset is used for model traning.')\n\n\nclass BaseInput:\n \"\"\"Base class for Jax input classes.\n\n During Lingvo Jax's train, on each host an input instance will be\n created (input_p.Instantiate()), and then get_next() is iteratively\n called in eager mode to generate one batch of data for each step\n of train/eval/etc.\n\n If supported, for eval, reset() is called after each eval step.\n See p.reset_for_eval below.\n\n A tf.data based input should inherit this class directly and implement\n get_next() and reset(). For an example of how to handle sharding for both\n training and eval data, please refer to the implementation of\n TFRecordBertInput at tasks/lm/input_generator.py.\n\n If there is already an Lingvo TF input generator that one would like to\n use directly, please use LingvoInputAdaptor below.\n \"\"\"\n _VALIDATE_BATCH_SIZE_NOT_NONE = True\n\n @classmethod\n def Params(cls) -> InstantiableParams: # pylint:disable=invalid-name\n \"\"\"Common Params for all inputs.\"\"\"\n return BaseInputParams(cls)\n\n def __init__(self, p: ParamsT) -> None:\n if self._VALIDATE_BATCH_SIZE_NOT_NONE and (p.batch_size is None):\n raise ValueError('Must specify p.batch_size.')\n self._params = p.Copy()\n\n @property\n def params(self) -> ParamsT:\n return self._params\n\n def get_next(self) -> NestedJTensor:\n raise NotImplementedError\n\n def reset(self) -> None:\n pass\n\n def ids_to_strings(self,\n ids: pytypes.NpTensor,\n lengths: pytypes.NpTensor,\n key: Optional[str] = None) -> List[str]:\n \"\"\"Converts int ids into strings.\n\n Args:\n ids: A matrix of shape [batch, seqlen], each row is a sequence to be\n converted.\n lengths: A vector of shape [batch]. lens[i] is the sequence length of the\n i-th row. Only the first lens[i] tokens in ids[i, :] are valid tokens.\n key: Optional argument to specify whether a tokenizer to use is the source\n or target. This is useful for example in a sequence model where the\n source and targets have different tokenizers. For the source corpus the\n key should be `src` while for the target corpus the key should be `tgt`.\n\n Returns:\n A list strings of shape [batch]. The converted texts.\n \"\"\"\n raise NotImplementedError\n\n\nclass LingvoInputAdaptor(BaseInput):\n \"\"\"Syntactic sugar for adapting a Lingvo style input for Jax.\n\n This should be able to wrap any Lingvo TF input generator to be used in\n Lingvo Jax. Remember to set `p.is_training=True` on the training dataset.\n\n Some usage caveats below.\n\n For eval, `p.num_samples` or other similar params like samples_per_summary are\n completely ignored by Lingvo Jax. Caller should instead set `p.num_batches` to\n (p.num_samples // batch_size) with `p.reset_for_eval=True` so that each eval\n step reads (approximately) one epoch of eval data. This might not be needed if\n the input already is finite (e.g. with p.repeat_count=1).\n\n When multiple infeed hosts are used, one must take care to ensure that the\n Lingvo input either already uses InfeedContextScope for proper sharding, or\n alternatively do not use the same random seed on all hosts. In other words,\n one must avoid the failure case where each host emits identical training data.\n See also p.allow_fixed_file_random_seed below.\n \"\"\"\n _VALIDATE_BATCH_SIZE_NOT_NONE = False\n _VALIDATE_BATCH_SIZE_NONE = True\n\n @classmethod\n def Params(cls) -> InstantiableParams:\n p = super().Params()\n p.Define('input', None, 'Params of a Lingvo input generator.')\n p.Define(\n 'num_batches', None,\n 'If specified and positive, raises tf.errors.OutOfRange after this many'\n ' batches have been produced. This forces a raise after get_next() is '\n 'called this many times, to support p.reset_for_eval=True.')\n p.Define(\n 'allow_fixed_file_random_seed', False,\n 'If not set, disallows a fixed, non-zero p.input.file_random_seed. '\n 'We disallow by default to avoid having identical input batches across '\n 'different infeed hosts. If set, random seeds are adjusted by '\n 'p.infeed_host_index to ensure different random seeds.')\n p.Define(\n 'cluster_do_eval', False,\n 'Whether to set cluster.do_eval to True for non-training data. '\n 'Note that if set to True, this will change '\n 'cluster.require_sequential_input_order to True as a result. '\n 'Ignored when p.is_training is True.')\n return p\n\n def __init__(self, p):\n if self._VALIDATE_BATCH_SIZE_NONE and p.batch_size is not None:\n raise ValueError('LingvoInputAdaptor does not support p.batch_size. '\n 'Please specify batch size on p.input, e.g. with '\n 'p.input.bucket_batch_limit = [4] or '\n 'p.input.args.batch=4, depeding the Lingvo input '\n f'used. Currently: p.batch_size={p.batch_size}, '\n 'it must be None.')\n super().__init__(p)\n self._cluster = copy.deepcopy(cluster_factory.Current())\n # For Lingvo's Cluster context that may impact the behavior of this input\n # generator, we always set use_tpu to True, and optionally set do_eval\n # for non-training data when configured to do so. All other Cluster params\n # use the default value.\n self._cluster.params.xla_device = 'tpu'\n self._cluster.params.enable_asserts = False\n # This indirectly sets cluster.require_sequential_input_order as well.\n self._cluster.params.do_eval = (not p.is_training and p.cluster_do_eval)\n self._initialize()\n\n def _initialize(self) -> None:\n \"\"\"Initializes the relevant fields of this adaptor input.\"\"\"\n p = self.params\n if hasattr(p.input, 'file_random_seed') and p.input.file_random_seed:\n if not p.allow_fixed_file_random_seed:\n raise ValueError(\n 'Training data using fixed non-zero file_random_seed: '\n f'p.input.file_random_seed={p.input.file_random_seed}. '\n 'This means each host *might* infeed identical batches. You can set '\n 'p.input.file_random_seed = 0, or if certain this is intended, '\n 'suppress this error by setting p.allow_fixed_file_random_seed = '\n 'True.')\n # Make sure each host uses a different random seed.\n p.input.file_random_seed += p.infeed_host_index\n # We make self.input public so that users can access its methods like\n # IdsToStrings if needed.\n with py_utils.infeed_context_scope(\n infeed_host_index=p.infeed_host_index,\n num_infeed_hosts=p.num_infeed_hosts), self._cluster:\n self.input = p.input.Instantiate()\n\n if hasattr(self.input, 'datasource') and isinstance(\n self.input.datasource, datasource.TFDatasetSource):\n # For the special case when the input is implemented by a tf.data.Dataset,\n # call eagerly. Using tf.function may result in returning duplicate\n # batches.\n self._get_next_fn = self._get_batch\n else:\n self._get_next_fn = tf.function(self._get_batch)\n self._num_batches_produced = 0\n\n def _get_batch(self) -> NestedMap:\n p = self.params\n with py_utils.infeed_context_scope(\n infeed_host_index=p.infeed_host_index,\n num_infeed_hosts=p.num_infeed_hosts), self._cluster:\n ret = self.input.GetPreprocessedInputBatch()\n # Remove unsupported string (byte) array from input.\n return ret.Filter(lambda v: v.dtype != tf.string)\n\n def get_next(self) -> NestedJTensor:\n p = self.params\n if p.num_batches is not None and p.num_batches > 0:\n if self._num_batches_produced >= p.num_batches:\n raise tf.errors.OutOfRangeError(\n node_def=None,\n op=None,\n message=f'num_batches exceeding {self._num_batches_produced}')\n self._num_batches_produced += 1\n ret = self._get_next_fn()\n return tf.nest.map_structure(lambda x: x.numpy(), ret)\n\n def reset(self) -> None:\n if hasattr(self.input, 'datasource') and isinstance(\n self.input.datasource, datasource.TFDatasetSource):\n self.input.datasource.Reset()\n # reset counter to 0.\n self._num_batches_produced = 0\n return\n # reinstantiate the input and retrace self._get_batch.\n self._initialize()\n\n def ids_to_strings(self,\n ids: pytypes.NpTensor,\n lengths: pytypes.NpTensor,\n key: Optional[str] = None) -> List[str]:\n \"\"\"Converts int ids into strings.\"\"\"\n bytes_list = self.input.IdsToStrings(ids, lengths, key=key).numpy()\n return [b.decode('utf-8') for b in bytes_list]\n\n\nclass LingvoInputAdaptorNewBatchSize(LingvoInputAdaptor):\n \"\"\"A similar adapter as LingvoInputAdaptor supporting a new batch size.\n\n LingvoInputAdaptor uses the batch size specified by the underlying Lingvo\n input. This class, however, allows specifying a smaller p.batch_size.\n This can be useful when the Lingvo input expects a large batch size,\n but the user wants a smaller batch size, e.g. when the Lingvo input uses\n a fixed packing factor to do packing, which can more efficiently pack with\n more data.\n\n We require that the batch size of the underlying Lingvo input must divide\n p.batch_size. Internally this class acts as a cache, retrieving the large\n batches from the parent class size, and consuming it by slicing it to the\n smaller batch size specified by the user.\n\n Example usage:\n p = ChangeBatchSizeInput.Params().Set(...)\n p.input.packing_factor = 3.5\n p.input.bucket_batch_limit = [4096]\n p.batch_size = 4\n \"\"\"\n _VALIDATE_BATCH_SIZE_NOT_NONE = True\n _VALIDATE_BATCH_SIZE_NONE = False\n\n def __init__(self, p):\n super().__init__(p)\n self._current_batch = super().get_next()\n self._inner_batch_size = next(iter(self._current_batch.values())).shape[0]\n logging.info(\n 'The wrapped Lingvo input has batch size %d, the actual input '\n 'has batch size %d.', self._inner_batch_size, p.batch_size)\n if self._inner_batch_size % p.batch_size != 0:\n raise ValueError(f'Lingvo input batch size {self._inner_batch_size} '\n 'must be a multiple of p.batch_size={p.batch_size}.')\n self._current_batch_index = 0\n\n def get_next(self):\n p = self.params\n if self._current_batch_index >= self._inner_batch_size:\n self._current_batch = super().get_next()\n self._current_batch_index = 0\n\n def _get_subrows(b):\n start = self._current_batch_index\n return b[start:start + p.batch_size]\n\n ret = tf.nest.map_structure(_get_subrows, self._current_batch)\n self._current_batch_index += p.batch_size\n return ret\n\n def reset(self):\n super().reset()\n self._current_batch = super().get_next()\n self._current_batch_index = 0\n", "# Lint as: python3\n# Copyright 2020 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Tests for conformer layers as in https://arxiv.org/abs/2005.08100.\"\"\"\n# Lint as: PY3\nfrom unittest import mock\nfrom absl.testing import flagsaver\nfrom absl.testing import parameterized\n\nfrom lingvo import compat as tf\nfrom lingvo.core import batch_major_attention\nfrom lingvo.core import bn_layers\nfrom lingvo.core import cluster_factory\nfrom lingvo.core import conformer_layer\nfrom lingvo.core import conv_layers_with_time_padding\nfrom lingvo.core import gshard_builder\nfrom lingvo.core import layers as lingvo_layers\nfrom lingvo.core import py_utils\nfrom lingvo.core import stream_step_test_base\nfrom lingvo.core import test_utils\n\nimport numpy as np\n\n\nclass LConvLayerTest(test_utils.TestCase, parameterized.TestCase):\n\n @parameterized.named_parameters(\n ('BN',),\n ('GN', 'gn'),\n )\n def testBasic(self, norm='bn'):\n batch_size, seqlen, dim = 2, 16, 4\n inputs = tf.zeros([batch_size, seqlen, dim])\n paddings = tf.zeros([batch_size, seqlen])\n\n p = conformer_layer.LConvLayer.CommonParams(input_dim=dim, kernel_size=3)\n p.name = 'lconv_layer'\n if norm == 'gn':\n # default is bn\n p.conv_norm_layer_tpl = (\n bn_layers.GroupNormLayer.Params().Set(num_groups=2))\n elif norm != 'bn':\n raise ValueError('Only gn and bn are supported.')\n\n l = p.Instantiate()\n outputs = l.FPropDefaultTheta(inputs, paddings)\n\n with self.session() as sess:\n tf.global_variables_initializer().run()\n out_vals = sess.run(outputs)\n print([x.shape for x in out_vals])\n\n\nclass LConvLayerStreamStepTest(stream_step_test_base.StreamStepTestBase):\n\n def _GetParams(self, **kwargs):\n input_dim = kwargs['input_dim']\n kernel = kwargs['kernel']\n norm_type = kwargs['norm_type']\n\n p = conformer_layer.LConvLayer.CommonParams(\n input_dim=input_dim, is_causal=True, kernel_size=kernel)\n if norm_type == 'ln':\n p.conv_norm_layer_tpl = lingvo_layers.LayerNorm.Params()\n else:\n p.conv_norm_layer_tpl = bn_layers.GroupNormLayer.Params().Set(\n num_groups=2, cumulative=True)\n p.name = 'lconv'\n return p\n\n def _FProp(self, layer, inputs, paddings):\n return layer.FProp(layer.theta, inputs, paddings)\n\n def _StreamStep(self, layer, step_inputs, step_paddings, state):\n return layer.StreamStep(layer.theta, step_inputs, step_paddings, state)\n\n def _GetFPropOutput(self, fprop_out):\n return fprop_out[0], fprop_out[1]\n\n @parameterized.named_parameters(\n ('Basic',),\n ('BasicGN', False, 'gn'),\n ('SkipNorm', True),\n )\n def testLeftContext(self, testonly_skip_norm_layers=False, norm_type='ln'):\n with flagsaver.flagsaver(testonly_skip_norm_layers=testonly_skip_norm_layers\n ), cluster_factory.SetEval(True):\n assert norm_type in ('ln', 'gn')\n input_dim, kernel = 2, 3\n self._TestStreamStepHelper(\n num_heads=2, input_dim=input_dim, kernel=kernel, norm_type=norm_type)\n\n\nclass ConformerLayerTest(test_utils.TestCase, parameterized.TestCase):\n\n def __init__(self, *args):\n super().__init__(*args)\n self.batch_size = 2\n self.maxlen = 32\n self.dim = 4\n self.heads = 2\n self.context = 2\n\n def _GetCommonParamsKwargs(self):\n return dict(\n input_dim=self.dim,\n atten_num_heads=self.heads,\n atten_left_context=self.context + 1,\n atten_right_context=self.context,\n kernel_size=3,\n fflayer_hidden_dim=4 * self.dim)\n\n def _GetParams(self, **custom_kwargs):\n kwargs = self._GetCommonParamsKwargs()\n kwargs.update(custom_kwargs)\n p = conformer_layer.ConformerLayer.CommonParams(**kwargs)\n p.name = 'conformer_layer'\n return p\n\n def _GetInputs(self, dtype=tf.float32):\n inputs = np.random.rand(self.batch_size, self.maxlen,\n self.dim).astype(np.float32)\n paddings = np.zeros((self.batch_size, self.maxlen), np.float32)\n\n seqlen = np.random.randint(0, self.maxlen, size=(self.batch_size,))\n for i in range(self.batch_size):\n for j in range(self.maxlen):\n paddings[i][j] = 1. if j >= seqlen[i] else 0.\n return tf.constant(inputs, dtype=dtype), tf.constant(paddings, dtype=dtype)\n\n def _GetGrad(self, l, inputs, paddings):\n in_nmap = py_utils.NestedMap(features=inputs, paddings=paddings)\n out_nmap = l.FPropDefaultTheta(in_nmap)\n loss = tf.reduce_sum(out_nmap.features)\n grads = tf.gradients(\n loss,\n l.vars.Flatten(),\n unconnected_gradients=tf.UnconnectedGradients.ZERO)\n return out_nmap.features, grads\n\n @parameterized.named_parameters(\n ('Base',),\n ('Reordered', 'conv_before_mhsa'),\n ('NoLConv', 'mhsa', False),\n ('NoMhsa', 'conv', True),\n ('NoFFStart', 'mhsa_before_conv', True, False),\n ('Transformer', 'mhsa', False, False),\n )\n def testBasic(self,\n layer_order='mhsa_before_conv',\n has_lconv=True,\n has_fflayer_start=True):\n p = self._GetParams()\n p.layer_order = layer_order\n if not has_lconv:\n p.lconv_tpl = None\n if not has_fflayer_start:\n p.fflayer_start_tpl = None\n\n l = p.Instantiate()\n inputs, paddings = self._GetInputs()\n outputs, grads = self._GetGrad(l, inputs, paddings)\n\n with self.session() as sess:\n tf.global_variables_initializer().run()\n out_vals = sess.run(outputs)\n grad_vals = sess.run(grads)\n print([x.shape for x in out_vals])\n print([g.shape for g in grad_vals])\n\n @parameterized.named_parameters(\n ('F32FPropF32Input', tf.float32, tf.float32),\n ('F32FPropBF16Input', tf.float32, tf.bfloat16),\n ('BF16FPropF32Input', tf.bfloat16, tf.float32),\n ('BF16FPropBF16Input', tf.bfloat16, tf.bfloat16),\n )\n def testFPropDtypes(self, fprop_dtype, input_dtype):\n p = self._GetParams()\n # batch_norm does not support bfloat16 on CPU.\n p.lconv_tpl.conv_norm_layer_tpl = (\n bn_layers.GroupNormLayer.Params().Set(num_groups=2))\n p.cls.SetFPropDtype(p, fprop_dtype)\n\n l = p.Instantiate()\n inputs, paddings = self._GetInputs(dtype=input_dtype)\n outputs, grads = self._GetGrad(l, inputs, paddings)\n\n with self.session() as sess:\n tf.global_variables_initializer().run()\n out_vals = sess.run(outputs)\n grad_vals = sess.run(grads)\n print([x.shape for x in out_vals])\n print([g.shape for g in grad_vals])\n\n @parameterized.named_parameters(\n ('DefaultFp32', [], tf.float32),\n ('RegexDtypeFp16', [(r'.*(fflayer_[01]|linear_start|post)/w$', tf.float16)\n ], tf.float16),\n )\n def testFPropDtypesWithListRegexDtypes(self, regex_dtypes, target_dtype):\n p = self._GetParams()\n p.lconv_tpl.conv_norm_layer_tpl = (\n bn_layers.GroupNormLayer.Params().Set(num_groups=2))\n p.list_regex_dtypes = regex_dtypes\n\n l = p.Instantiate()\n inputs, paddings = self._GetInputs()\n outputs, grads = self._GetGrad(l, inputs, paddings)\n\n with self.session() as sess:\n tf.global_variables_initializer().run()\n out_vals = sess.run(outputs)\n grad_vals = sess.run(grads)\n print([x.shape for x in out_vals])\n print([g.shape for g in grad_vals])\n\n tf.assert_type(l.vars.fflayer_start.fflayer.fc[0].w, target_dtype)\n tf.assert_type(l.vars.fflayer_start.fflayer.fc[1].w, target_dtype)\n tf.assert_type(l.vars.fflayer_end.fflayer.fc[0].w, target_dtype)\n tf.assert_type(l.vars.fflayer_end.fflayer.fc[1].w, target_dtype)\n tf.assert_type(l.vars.lconv.linear_start.w, target_dtype)\n tf.assert_type(l.vars.trans_atten.atten.post.w, target_dtype)\n\n @parameterized.named_parameters(\n ('Start', True, False),\n ('End', False, True),\n ('StartAndEnd', True, True),\n ('None', False, False),\n )\n def testMoEFFLayerClassMethodInitParity(self, use_fflayer_start_moe,\n use_fflayer_end_moe):\n \"\"\"Tests Conformer-MoE initializations via classmethods and explicitly.\"\"\"\n\n num_experts, num_groups, num_devices, per_expert_capacity_dim = 2, 2, 2, 2\n # Create params setting MoEBuilder params explicitly.\n ref_kwargs = dict()\n if use_fflayer_start_moe:\n # Set MoEBuilder params explicitly.\n ref_kwargs['fflayer_start_tpl'] = gshard_builder.MoEBuilder.Params().Set(\n e_dim=num_experts,\n c_dim=per_expert_capacity_dim,\n num_devices=num_devices,\n num_groups=num_groups)\n if use_fflayer_end_moe:\n ref_kwargs['fflayer_end_tpl'] = gshard_builder.MoEBuilder.Params().Set(\n e_dim=num_experts,\n c_dim=per_expert_capacity_dim,\n num_devices=num_devices,\n num_groups=num_groups)\n ref_p = self._GetParams(**ref_kwargs)\n\n # Params setting MoEBuilder params via classmethod.\n moe_kwargs = dict()\n if use_fflayer_start_moe:\n # Set MoEBuilder params via classmethod.\n moe_kwargs['fflayer_start_tpl'] = conformer_layer.GShardMoELayerParams(\n num_devices, num_groups, num_experts, per_expert_capacity_dim)\n if use_fflayer_end_moe:\n moe_kwargs['fflayer_end_tpl'] = conformer_layer.GShardMoELayerParams(\n num_devices, num_groups, num_experts, per_expert_capacity_dim)\n moe_p = self._GetParams(**moe_kwargs)\n # Verify layer params are equal in both cases.\n with self.subTest('testParamsParity'):\n self.assertCountEqual(ref_p.ToText().split('\\n'),\n moe_p.ToText().split('\\n'))\n\n # Test both initializations and verify moe sublayer.\n with self.subTest('testInit'):\n ref_p.name = 'ref_moe_conformer_layer'\n ref_layer = ref_p.Instantiate()\n moe_p.name = 'classmethod_moe_conformer_layer'\n moe_layer = moe_p.Instantiate()\n for layer in (ref_layer, moe_layer):\n if use_fflayer_start_moe:\n self.assertNotIn('fflayer_start', layer.children)\n self.assertIn('fflayer_start_moe', layer.children)\n if use_fflayer_end_moe:\n self.assertNotIn('fflayer_end', layer.children)\n self.assertIn('fflayer_end_moe', layer.children)\n\n @parameterized.named_parameters(\n ('Start', True, False, 0.593693),\n ('End', False, True, 0.4582923),\n ('StartAndEnd', True, True, 1.0213419),\n ('None', False, False, 0.0),\n )\n def testMoEFFLayerFProp(self, use_fflayer_start_moe, use_fflayer_end_moe,\n expected_aux_loss):\n kwargs = {}\n if use_fflayer_start_moe:\n kwargs['fflayer_start_tpl'] = gshard_builder.MoEBuilder.Params().Set(\n e_dim=2, c_dim=2, num_devices=2)\n if use_fflayer_end_moe:\n kwargs['fflayer_end_tpl'] = gshard_builder.MoEBuilder.Params().Set(\n e_dim=2, c_dim=2, num_devices=2)\n p = self._GetParams(**kwargs)\n l = p.Instantiate()\n inputs, paddings = self._GetInputs()\n inputs = tf.convert_to_tensor(inputs)\n paddings = tf.convert_to_tensor(paddings)\n in_nmap = py_utils.NestedMap(features=inputs, paddings=paddings)\n in_nmap.aux_loss = tf.convert_to_tensor(0., py_utils.FPropDtype(p))\n out_nmap = l.FPropDefaultTheta(in_nmap)\n self.assertIn('aux_loss', out_nmap)\n loss = tf.reduce_sum(out_nmap.features) + 0.01 * out_nmap.aux_loss\n grads = tf.gradients(\n loss,\n l.vars.Flatten(),\n unconnected_gradients=tf.UnconnectedGradients.ZERO)\n\n with self.session() as sess:\n tf.global_variables_initializer().run()\n out_vals = sess.run(out_nmap.features)\n grad_vals = sess.run(grads)\n self.assertEqual(out_nmap.aux_loss.shape, ())\n aux_loss = sess.run(out_nmap.aux_loss)\n self.assertAlmostEqual(expected_aux_loss, aux_loss, places=5)\n print([x.shape for x in out_vals])\n print([g.shape for g in grad_vals])\n\n def testRemat(self):\n inputs, paddings = self._GetInputs()\n base_p = self._GetParams()\n base_p.name = 'base'\n base_p.layer_order = 'conv_before_mhsa'\n\n new_p = base_p.Copy()\n new_p.name = 'new'\n new_p.remat = True\n\n base_l = base_p.Instantiate()\n new_l = new_p.Instantiate()\n\n _, base_grads = self._GetGrad(base_l, inputs, paddings)\n base_grads = base_l.vars.Pack(base_grads)\n\n _, new_grads = self._GetGrad(new_l, inputs, paddings)\n new_grads = new_l.vars.Pack(new_grads)\n\n assign_op = [\n tf.assign(dst, src)\n for (src, dst) in zip(base_l.vars.Flatten(), new_l.vars.Flatten())\n ]\n init_op = tf.global_variables_initializer()\n with self.session() as sess:\n sess.run(init_op)\n sess.run(assign_op)\n base_grads_val = sess.run(base_grads)\n new_grads_val = sess.run(new_grads)\n\n for (k, v1), (_, v2) in zip(base_grads_val.FlattenItems(),\n new_grads_val.FlattenItems()):\n self.assertAllClose(v1, v2, msg=k)\n\n @parameterized.named_parameters(\n ('Basic',),\n ('NegativeLocalContext', -1),\n ('NegativeLeftContext', None, -1, None),\n ('NegativeRightContext', None, None, -1),\n ('NegativeContext1', -1, None, -1),\n ('NegativeContext2', None, -1, -1),\n ('NegativeContext3', -1, -1, None),\n ('NegativeContext4', -1, None, -1),\n ('NegativeContext5', -1, -1, -1),\n ('NegativeContext6', None, None, None),\n )\n def testAttenContextParams(self,\n local_context=None,\n left_context=None,\n right_context=None):\n \"\"\"Tests atten context cfg params.\"\"\"\n inputs, paddings = self._GetInputs()\n base_p_kwargs = self._GetCommonParamsKwargs()\n base_p_kwargs['atten_local_context'] = None\n base_p_kwargs['atten_left_context'] = None\n base_p_kwargs['atten_right_context'] = None\n base_p = conformer_layer.ConformerLayer.CommonParams(**base_p_kwargs)\n base_p.name = 'base'\n base_p.layer_order = 'conv_before_mhsa'\n\n new_p_kwargs = self._GetCommonParamsKwargs()\n new_p_kwargs['atten_local_context'] = local_context\n new_p_kwargs['atten_left_context'] = left_context\n new_p_kwargs['atten_right_context'] = right_context\n new_p = conformer_layer.ConformerLayer.CommonParams(**new_p_kwargs)\n new_p.name = 'new'\n new_p.layer_order = 'conv_before_mhsa'\n\n base_l = base_p.Instantiate()\n new_l = new_p.Instantiate()\n\n _, base_grads = self._GetGrad(base_l, inputs, paddings)\n base_grads = base_l.vars.Pack(base_grads)\n\n _, new_grads = self._GetGrad(new_l, inputs, paddings)\n new_grads = new_l.vars.Pack(new_grads)\n\n assign_op = [\n tf.assign(dst, src)\n for (src, dst) in zip(base_l.vars.Flatten(), new_l.vars.Flatten())\n ]\n init_op = tf.global_variables_initializer()\n with self.session() as sess:\n sess.run(init_op)\n sess.run(assign_op)\n base_grads_val = sess.run(base_grads)\n new_grads_val = sess.run(new_grads)\n\n for (k, v1), (_, v2) in zip(base_grads_val.FlattenItems(),\n new_grads_val.FlattenItems()):\n self.assertAllClose(v1, v2, msg=k)\n\n def testCustomAttentionLayer(self):\n p = self._GetParams()\n # Use a custom atten_tpl.\n p.trans_atten_tpl.atten_tpl = (\n batch_major_attention.MultiHeadedFavorAttention.Params().Set(\n num_random_features=4))\n layer = p.Instantiate()\n self.assertIsInstance(layer.trans_atten.atten,\n batch_major_attention.MultiHeadedFavorAttention)\n\n @parameterized.named_parameters(\n ('Basic', 8, 'SWISH', 0.5),\n ('BasicReLU', 16, 'RELU', 1.),\n )\n def testFFlayerParams(self,\n fflayer_hidden_dim=None,\n fflayer_activation=None,\n fflayer_residual_weight=0.5):\n p = self._GetParams(\n fflayer_hidden_dim=fflayer_hidden_dim,\n fflayer_activation=fflayer_activation,\n fflayer_residual_weight=fflayer_residual_weight)\n layer = p.Instantiate()\n\n start_fflayer = layer.fflayer_start\n actual_start_hidden_dim = start_fflayer.params.hidden_dim\n actual_start_activation = start_fflayer.params.activation\n actual_start_residual_weight = start_fflayer.params.residual_weight\n end_fflayer = layer.fflayer_end\n actual_end_hidden_dim = end_fflayer.params.hidden_dim\n actual_end_activation = end_fflayer.params.activation\n actual_end_residual_weight = end_fflayer.params.residual_weight\n\n self.assertEqual(fflayer_hidden_dim, actual_start_hidden_dim)\n self.assertEqual(fflayer_activation, actual_start_activation)\n self.assertEqual(fflayer_residual_weight, actual_start_residual_weight)\n self.assertEqual(fflayer_hidden_dim, actual_end_hidden_dim)\n self.assertEqual(fflayer_activation, actual_end_activation)\n self.assertEqual(fflayer_residual_weight, actual_end_residual_weight)\n\n @parameterized.named_parameters(\n ('shared', True),\n ('not_shared', False),\n )\n def testFFlayerWeightSharing(self, fflayer_weight_sharing):\n p = self._GetParams()\n p.fflayer_weight_sharing = fflayer_weight_sharing\n layer = p.Instantiate()\n\n # FFLayer variables will all have same full name iif weights are shared.\n def _VarNamesDebugString(vars_):\n return py_utils.Transform(lambda x: x.name, vars_).DebugString()\n\n fflayer_start_var_names = _VarNamesDebugString(layer.fflayer_start.vars)\n fflayer_end_var_names = _VarNamesDebugString(layer.fflayer_end.vars)\n\n self.assertEqual(fflayer_weight_sharing,\n (fflayer_start_var_names == fflayer_end_var_names))\n\n def testCommonParamsAbuse(self):\n \"\"\"Checks CommonParams() is not called in __init__().\"\"\"\n p = self._GetParams()\n with mock.patch(\n 'lingvo.core.conformer_layer.ConformerLayer.CommonParams',\n autospec=True) as m1:\n with mock.patch(\n 'lingvo.core.conformer_layer.LConvLayer.CommonParams',\n autospec=True) as m2:\n p.Instantiate()\n self.assertFalse(m1.called)\n self.assertFalse(m2.called)\n\n @parameterized.named_parameters(\n ('WithoutRelPosAtten', False),\n ('WithRelPosAtten', True),\n )\n def testApplyGShard(self, use_relative_atten):\n with self.session() as sess:\n conformer_p = conformer_layer.ConformerLayer.CommonParams(\n input_dim=self.dim,\n atten_num_heads=self.heads,\n atten_local_context=self.context,\n use_relative_atten=use_relative_atten,\n kernel_size=2,\n fflayer_hidden_dim=4 * self.dim)\n conformer_p.name = 'conformer_layer'\n conformer_layer.ApplyGshard(\n conformer_p,\n device_mesh=[1, 2],\n proj_w_split_list=[[0, 1], [1, 0]],\n proj_activation_split_list=[[0, -1, 1], [0, -1, -1]],\n atten_dnh_w_split=[0, 1, -1],\n atten_blnh_activation_split=[0, -1, 1, -1],\n atten_bld_activation_split=[0, -1, -1],\n lconv_df_w_split=[0, 1],\n lconv_hwim_w_split=[-1, -1, 1, -1],\n lconv_fd_w_split=[-1, -1],\n lconv_blf_activation_split=[0, -1, 1],\n lconv_bld_activation_split=[0, -1, -1])\n inputs, paddings = self._GetInputs()\n conformer_l = conformer_p.Instantiate()\n outputs = conformer_l.FProp(\n conformer_l.theta,\n py_utils.NestedMap(\n features=tf.convert_to_tensor(inputs),\n paddings=tf.convert_to_tensor(paddings)))\n tf.logging.info('outputs=%s', outputs)\n tf.global_variables_initializer().run()\n out_vals = sess.run(outputs)\n print([x.shape for x in out_vals.Flatten()])\n\n @parameterized.named_parameters(\n ('Dropout', 'dropout_prob', 0.1),\n ('LayerOrder', 'layer_order', 'conv_before_mhsa'),\n ('FFLayerActivation', 'fflayer_activation', 'GELU'),\n ('UseRelativeAttentionTrue', 'use_relative_atten', True),\n ('UseRelativeAttentionFalse', 'use_relative_atten', False),\n ('IsCausal', 'is_causal', True),\n ('ListRegexDtypes', 'list_regex_dtypes', [('test_regex', tf.float16)]))\n def testCommonParamsSet(self, param_name, param_val):\n \"\"\"Checks values set in CommonParams() correctly.\"\"\"\n\n def _GetMinimalCommonParamsKwargs():\n \"\"\"These args are required to be set to call CommonParams.\"\"\"\n return dict(\n input_dim=2, atten_num_heads=4, kernel_size=3, fflayer_hidden_dim=8)\n\n kwargs = _GetMinimalCommonParamsKwargs()\n kwargs.update({param_name: param_val})\n if param_name == 'is_causal' and param_val:\n kwargs['atten_right_context'] = 0\n kwargs['use_relative_atten'] = False\n p = conformer_layer.ConformerLayer.CommonParams(**kwargs)\n p.name = 'conformer_layer'\n if param_name == 'use_relative_atten':\n atten_cls = p.trans_atten_tpl.atten_tpl.cls\n if param_val:\n self.assertTrue(\n issubclass(atten_cls, batch_major_attention.MultiHeadedAttentionXL),\n msg=atten_cls)\n else:\n self.assertTrue(\n issubclass(atten_cls, batch_major_attention.MultiHeadedAttention),\n msg=atten_cls)\n elif param_name == 'fflayer_activation':\n self.assertEqual(p.fflayer_start_tpl.activation, param_val)\n self.assertEqual(p.fflayer_end_tpl.activation, param_val)\n else:\n self.assertEqual(p.Get(param_name), param_val)\n\n\nclass ConformerLayerStreamStepTest(stream_step_test_base.StreamStepTestBase):\n\n def _GetParams(self, **kwargs):\n input_dim = kwargs['input_dim']\n kernel = kwargs['kernel']\n layer_order = kwargs['layer_order']\n num_heads = kwargs['num_heads']\n left_context = kwargs['left_context']\n right_context = kwargs['right_context']\n ffn_dim = kwargs['ffn_dim']\n # optional params.\n norm_type = kwargs.get('norm_type', 'gn')\n has_lconv = kwargs.get('has_lconv', 'conv2d')\n has_fflayer_start = kwargs.get('has_fflayer_start', True)\n query_stride = kwargs.get('query_stride', 1)\n num_groups = kwargs.get('num_groups', 2)\n\n if layer_order == 'mhsa':\n kernel = None\n has_lconv = False\n p = conformer_layer.ConformerLayer.CommonParams(\n input_dim=input_dim,\n is_causal=True,\n atten_num_heads=num_heads,\n atten_left_context=left_context,\n atten_right_context=right_context,\n use_relative_atten=False,\n query_stride=query_stride,\n fflayer_hidden_dim=ffn_dim,\n kernel_size=kernel,\n layer_order=layer_order)\n if not has_lconv:\n p.lconv_tpl = None\n else:\n if norm_type == 'ln':\n p.lconv_tpl.conv_norm_layer_tpl = lingvo_layers.LayerNorm.Params()\n else:\n p.lconv_tpl.conv_norm_layer_tpl = bn_layers.GroupNormLayer.Params().Set(\n num_groups=num_groups, cumulative=True)\n if has_lconv == 'conv2d':\n p.lconv_tpl.depthwise_conv_tpl = (\n conv_layers_with_time_padding.CausalConv2DLayerWithPadding.Params())\n else:\n assert has_lconv == 'depthwise'\n if not has_fflayer_start:\n p.fflayer_start_tpl = None\n\n p.name = 'conformer'\n return p\n\n def _FProp(self, layer, inputs, paddings):\n return layer.FProp(layer.theta,\n py_utils.NestedMap(features=inputs, paddings=paddings))\n\n def _StreamStep(self, layer, step_inputs, step_paddings, state):\n return layer.StreamStep(layer.theta, step_inputs, step_paddings, state)\n\n def _GetFPropOutput(self, fprop_out):\n return fprop_out.features, fprop_out.paddings\n\n @parameterized.named_parameters(\n {\n 'testcase_name': 'Basic',\n },\n {\n 'testcase_name': 'BasicGN',\n 'norm_type': 'gn'\n },\n {\n 'testcase_name': 'BasicGN1',\n 'norm_type': 'gn',\n 'num_groups': 1\n },\n {\n 'testcase_name': 'BasicGN8',\n 'norm_type': 'gn',\n 'num_groups': 8\n },\n {\n 'testcase_name': 'SkipNorm',\n 'testonly_skip_norm_layers': True\n },\n {\n 'testcase_name': 'SkipNormGN',\n 'testonly_skip_norm_layers': True,\n 'norm_type': 'gn'\n },\n {\n 'testcase_name': 'SkipNormGNR1',\n 'testonly_skip_norm_layers': True,\n 'norm_type': 'gn',\n 'right_context': 1,\n },\n {\n 'testcase_name': 'SkipNormGNR2',\n 'testonly_skip_norm_layers': True,\n 'norm_type': 'gn',\n 'right_context': 2,\n },\n {\n 'testcase_name': 'SkipNormGNStride2',\n 'testonly_skip_norm_layers': True,\n 'norm_type': 'gn',\n 'stride': 2\n },\n {\n 'testcase_name': 'SkipNormGNStride4',\n 'testonly_skip_norm_layers': True,\n 'norm_type': 'gn',\n 'stride': 4\n },\n {\n 'testcase_name': 'SkipNormGNStride2R1',\n 'testonly_skip_norm_layers': True,\n 'norm_type': 'gn',\n 'stride': 2,\n 'right_context': 1\n },\n {\n 'testcase_name': 'SkipNormGNStride4R2',\n 'testonly_skip_norm_layers': True,\n 'norm_type': 'gn',\n 'stride': 4,\n 'right_context': 2\n },\n {\n 'testcase_name': 'Reordered',\n 'layer_order': 'mhsa_before_conv'\n },\n {\n 'testcase_name': 'Conv2D',\n 'has_lconv': 'conv2d',\n },\n {\n 'testcase_name': 'NoLConv',\n 'layer_order': 'mhsa',\n 'has_lconv': False\n },\n {\n 'testcase_name': 'NoMhsa',\n 'layer_order': 'conv'\n },\n {\n 'testcase_name': 'NoFFStart',\n 'layer_order': 'conv_before_mhsa',\n 'has_fflayer_start': False\n },\n {\n 'testcase_name': 'Transformer',\n 'layer_order': 'mhsa',\n 'has_lconv': False,\n 'has_fflayer_start': False\n },\n {\n 'testcase_name': 'TransformerSkipNormR2',\n 'testonly_skip_norm_layers': True,\n 'layer_order': 'mhsa',\n 'has_lconv': False,\n 'has_fflayer_start': False,\n 'right_context': 2,\n },\n {\n 'testcase_name': 'Funnel',\n 'stride': 2,\n 'query_stride': 2,\n },\n {\n 'testcase_name': 'FunnelStride4',\n 'stride': 4,\n 'query_stride': 2,\n },\n )\n def testCommon(self,\n testonly_skip_norm_layers=False,\n norm_type='ln',\n num_groups=2,\n stride=1,\n query_stride=1,\n layer_order='conv_before_mhsa',\n has_lconv='depthwise',\n has_fflayer_start=True,\n right_context=0):\n assert norm_type in ('ln', 'gn'), norm_type\n kwargs = dict(\n input_dim=8,\n kernel=3,\n layer_order=layer_order,\n num_heads=2,\n left_context=3,\n right_context=right_context,\n ffn_dim=4,\n stride=stride,\n query_stride=query_stride,\n norm_type=norm_type,\n has_lconv=has_lconv,\n has_fflayer_start=has_fflayer_start,\n num_groups=num_groups)\n kwargs['tol'] = 1e-5\n with cluster_factory.SetEval(True), flagsaver.flagsaver(\n testonly_skip_norm_layers=testonly_skip_norm_layers):\n self._TestStreamStepHelper(**kwargs)\n\n def testStackingLayerWithRightContext(self):\n tf.random.set_seed(2021)\n kwargs = dict(\n input_dim=8,\n kernel=3,\n num_heads=2,\n left_context=6,\n right_context=3,\n ffn_dim=4,\n stride=2,\n layer_order='mhsa_before_conv',\n num_layers=3)\n with cluster_factory.SetEval(True):\n self._TestRightContextStackingLayersHelper(**kwargs)\n\n\nif __name__ == '__main__':\n tf.test.main()\n", "# Lint as: python3\n# Copyright 2018 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Attention models.\"\"\"\n\nimport math\nimport lingvo.compat as tf\nfrom lingvo.core import base_layer\nfrom lingvo.core import gshard_utils\nfrom lingvo.core import layers\nfrom lingvo.core import py_utils\nfrom lingvo.core import quant_utils\nfrom lingvo.core import symbolic\n\nimport numpy as np\n\nfrom tensorflow.python.ops import inplace_ops # pylint:disable=g-direct-tensorflow-import\n\n\n# Currently, quantization statistics cannot be accumulated across arbitrary\n# defuns, so we allow them to be disabled. A potentially more robust fix is\n# to save and merge the attention state across the defun boundary as is\n# done in recurrent.py.\ndef _ConditionalCallDefun(cond, f, inputs):\n if not cond:\n return f(inputs)\n return py_utils.CallDefun(f, inputs)\n\n\ndef _ApplyAttentionDropout(params, x):\n \"\"\"Apply attention dropout according to the given parameters.\n\n If `params.atten_dropout_deterministic` is set to True, the dropout will be\n fully deterministic.\n\n Args:\n params: The parameters of attention layer.\n x: A float Tensor on which to apply dropout.\n\n Returns:\n A Tensor with the same shape as `x`.\n \"\"\"\n if params.atten_dropout_prob == 0:\n return x\n\n if params.atten_dropout_deterministic:\n seeds = py_utils.GenerateStepSeedPair(params)\n return py_utils.DeterministicDropout(x, 1.0 - params.atten_dropout_prob,\n seeds)\n else:\n return tf.nn.dropout(\n x, rate=params.atten_dropout_prob, seed=params.random_seed)\n\n\ndef SafeCumprod(x, *args, **kwargs):\n \"\"\"Computes cumprod of x in logspace using cumsum to avoid underflow.\n\n The cumprod function and its gradient can result in numerical instabilities\n when its argument has very small and/or zero values. As long as the argument\n is all positive, we can instead compute the cumulative product as\n exp(cumsum(log(x))). This function can be called identically to\n tf.math.cumprod.\n\n Args:\n x: Tensor to take the cumulative product of.\n *args: Passed on to cumsum; these are identical to those in cumprod.\n **kwargs: Passed on to cumsum; these are identical to those in cumprod.\n\n Returns:\n Cumulative product of x.\n \"\"\"\n with tf.name_scope(None, 'SafeCumprod', [x]):\n x = tf.convert_to_tensor(x, name='x')\n tiny = np.finfo(x.dtype.as_numpy_dtype).tiny\n return tf.exp(\n py_utils.CumSum(\n tf.math.log(tf.clip_by_value(x, tiny, 1)), *args, **kwargs))\n\n\n# pyformat: disable\ndef MonotonicAttentionProb(p_choose_i, previous_attention, mode):\n \"\"\"Compute monotonic attention distribution from choosing probabilities.\n\n Monotonic attention implies that the input sequence is processed in an\n explicitly left-to-right manner when generating the output sequence. In\n addition, once an input sequence element is attended to at a given output\n timestep, elements occurring before it cannot be attended to at subsequent\n output timesteps. This function generates attention distributions according\n to these assumptions. For more information, see `Online and Linear-Time\n Attention by Enforcing Monotonic Alignments`.\n\n Args:\n p_choose_i: Probability of choosing input sequence/memory element i. Should\n be of shape (batch_size, input_sequence_length), and should all be in the\n range [0, 1].\n previous_attention: The attention distribution from the previous output\n timestep. Should be of shape (batch_size, input_sequence_length). For\n the first output timestep, preevious_attention[n] should be [1, 0, 0, ...,\n 0] for all n in [0, ... batch_size - 1].\n mode: How to compute the attention distribution. Must be one of `recursive`,\n `parallel`, or `hard`.\n\n * recursive: uses tf.scan to recursively compute the distribution. This is\n slowest but is exact, general, and does not suffer from numerical\n instabilities.\n * parallel: uses parallelized cumulative-sum and cumulative-product\n operations to compute a closed-form solution to the recurrence relation\n defining the attention distribution. This makes it more efficient than\n 'recursive', but it requires numerical checks which make the\n distribution non-exact. This can be a problem in particular when\n input_sequence_length is long and/or p_choose_i has entries very close\n to 0 or 1.\n * hard: requires that the probabilities in p_choose_i are all either 0 or\n 1, and subsequently uses a more efficient and exact solution.\n\n Returns:\n A tensor of shape (batch_size, input_sequence_length) representing the\n attention distributions for each sequence in the batch.\n\n Raises:\n ValueError: mode is not one of 'recursive', 'parallel', 'hard'.\n \"\"\"\n # pyformat: enable\n # Force things to be tensors\n p_choose_i = tf.convert_to_tensor(p_choose_i, name='p_choose_i')\n previous_attention = tf.convert_to_tensor(\n previous_attention, name='previous_attention')\n if mode == 'recursive':\n batch_size = py_utils.GetShape(p_choose_i)[0]\n tf.logging.info(batch_size)\n # Compute [1, 1 - p_choose_i[0], 1 - p_choose_i[1], ..., 1 - p_choose_i[-2]]\n shifted_1mp_choose_i = tf.concat(\n [tf.ones((batch_size, 1)), 1 - p_choose_i[:, :-1]], 1)\n # Compute attention distribution recursively as\n # q[i] = (1 - p_choose_i[i - 1])*q[i - 1] + previous_attention[i]\n # attention[i] = p_choose_i[i]*q[i]\n attention = p_choose_i * tf.transpose(\n tf.scan(\n # Need to use reshape to remind TF of the shape between loop\n # iterations.\n lambda x, yz: tf.reshape(yz[0] * x + yz[1], (batch_size,)),\n # Loop variables yz[0] and yz[1]\n [\n tf.transpose(shifted_1mp_choose_i),\n tf.transpose(previous_attention)\n ],\n # Initial value of x is just zeros\n tf.zeros((batch_size,))))\n elif mode == 'parallel':\n # SafeCumprod computes cumprod in logspace with numeric checks\n cumprod_1mp_choose_i = SafeCumprod(1 - p_choose_i, axis=1, exclusive=True)\n # Compute recurrence relation solution\n attention = p_choose_i * cumprod_1mp_choose_i * py_utils.CumSum(\n previous_attention /\n # Clip cumprod_1mp to avoid divide-by-zero\n tf.clip_by_value(cumprod_1mp_choose_i, 1e-10, 1.),\n axis=1)\n elif mode == 'hard':\n # Remove any probabilities before the index chosen last time step\n p_choose_i *= tf.cumsum(previous_attention, axis=1)\n # Now, use exclusive cumprod to remove probabilities after the first\n # chosen index, like so:\n # p_choose_i = [0, 0, 0, 1, 1, 0, 1, 1]\n # cumprod(1 - p_choose_i, exclusive=True) = [1, 1, 1, 1, 0, 0, 0, 0]\n # Product of above: [0, 0, 0, 1, 0, 0, 0, 0]\n attention = p_choose_i * tf.math.cumprod(\n 1 - p_choose_i, axis=1, exclusive=True)\n else:\n raise ValueError(\"mode must be 'recursive', 'parallel', or 'hard'.\")\n return attention\n\n\nclass BaseAttentionLayer(quant_utils.QuantizableLayer):\n \"\"\"A base class for all attention layers.\"\"\"\n\n @classmethod\n def Params(cls):\n p = super().Params()\n p.Define('atten_dropout_prob', 0.0,\n 'Probability at which we apply dropout to the attention weights.')\n p.Define(\n 'atten_dropout_deterministic', False,\n 'Whether to dropout in a fully deterministic way, which is more '\n 'suitable for TPU.')\n p.Define('packed_input', False,\n 'If True, each training example may pack multiple sequences.')\n\n p.qdomain.Define('softmax', None, 'QDomain for the internal softmax.')\n p.qdomain.Define(\n 'fullyconnected', None, 'Fully connected layers are fed '\n 'into activation functions which have known input ranges')\n\n return p\n\n def __init__(self, params):\n \"\"\"Constructs a BaseAttentionLayer object.\"\"\"\n if not params.name:\n raise ValueError('params.name is not set.')\n super().__init__(params)\n\n self._source_init_done = False\n\n def _CreateLayerVariables(self):\n super()._CreateLayerVariables()\n self.TrackQTensor('logits', domain='fullyconnected')\n\n def InitForSourcePacked(self,\n theta,\n source_vecs,\n source_contexts,\n source_padding,\n source_segment_id=None):\n \"\"\"Initialize attention for the given source vectors.\n\n Must set `_source_init_done` to True in the function.\n\n Note: `source_segment_id`, if present, should always have the same shape as\n `source_padding`.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n source_vecs: A single tensor of shape [time, batch_size, source_dim].\n source_contexts: A single tensor of shape [time, batch_size, some_dim].\n source_padding: A tensor of shape [time, batch_size].\n source_segment_id: A tensor of shape [time, batch_size]. source_segment_id\n is not None for packed inputs where one training example may pack\n multiple sequences.\n\n Returns:\n A `.NestedMap` object to be passed to ComputeContextVectorWithSource.\n The internal structure of the return value should be considered an\n implementation detail of the attention mechanism and should not be\n inspected or modified by its callers.\n \"\"\"\n self._source_init_done = True\n self._packed_src = self.PackSource(theta, source_vecs, source_contexts,\n source_padding, source_segment_id)\n return self._packed_src\n\n def PackSource(self,\n theta,\n source_vecs,\n source_contexts,\n source_padding,\n source_segment_id=None):\n \"\"\"Packs source vectors.\n\n Does not change attention state.\n\n Note: `source_segment_id`, if present, should always have the same shape as\n `source_padding`.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n source_vecs: A single tensor of shape [time, batch_size, source_dim].\n source_contexts: A single tensor of shape [time, batch_size, some_dim].\n source_padding: A tensor of shape [time, batch_size].\n source_segment_id: A tensor of shape [time, batch_size]. source_segment_id\n is not None for packed inputs where one training example may pack\n multiple sequences.\n\n Returns:\n A `.NestedMap` object to be passed to ComputeContextVectorWithSource.\n The internal structure of the return value should be considered an\n implementation detail of the attention mechanism and should not be\n inspected or modified by its callers.\n \"\"\"\n raise NotImplementedError('Abstract method.')\n\n def ComputeContextVectorWithSource(self,\n theta,\n packed_src,\n query_vec,\n attention_state=None,\n per_step_source_padding=None,\n query_segment_id=None):\n \"\"\"Computes the context vector given the current query output.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n packed_src: A `.NestedMap` object returned by PackSource or\n InitForSourcePacked.\n query_vec: a tensor of shape [batch_size, query_dim].\n attention_state: previous attention state.\n per_step_source_padding: Source sequence padding to apply at this step. If\n not None, it should have shape [target_batch_size, source_length].\n query_segment_id: a tensor of shape [batch_size].\n\n Returns:\n A tuple of 3 elements.\n\n - The attention context vector: [batch_size, context_dim]\n - The attention probability vector: [batch_size, time]\n - The new attention mechanism state: possibly nested tuple of tensors\n with dimensions [target_batch, ...]\n \"\"\"\n raise NotImplementedError('Abstract method.')\n\n def ComputeContextVector(self,\n theta,\n query_vec,\n attention_state=None,\n per_step_source_padding=None,\n query_segment_id=None):\n \"\"\"Computes the context vector given the current query output.\n\n Unlike `ComputeContextVectorWithSource` which explicitly asks for the packed\n source tensors, `ComputeContextVector` uses the class' internal variables.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n query_vec: a tensor of shape [batch_size, query_dim].\n attention_state: previous attention state.\n per_step_source_padding: Source sequence padding to apply at this step. If\n not None, it should be of shape [target_batch_size, source_length].\n query_segment_id: a tensor of shape [batch_size].\n\n Returns:\n A tuple of 3 elements.\n\n - The attention context vector.\n - The attention probability vector.\n - The new attention mechanism state: possibly nested tuple of tensors with\n dimensions [target_batch, ...]\n \"\"\"\n assert self._source_init_done\n return self.ComputeContextVectorWithSource(theta, self._packed_src,\n query_vec, attention_state,\n per_step_source_padding,\n query_segment_id)\n\n def GetInitializationSourceState(self):\n \"\"\"Gets the attention initialization state.\n\n The base class only preserves the `concated_source_vecs`,\n `concated_source_contexts` and `source_padding`. If subclasses use more\n state than this and need to interact with inference code that must\n fetch and reload state, this and `SetInitializationSourceState` must\n be overridden.\n\n Returns:\n A `.NestedMap` of Tensors that can be preserved and reset via\n `SetInitializationSourceState()` at a later point. This allows, for\n example, for attention computations to span session runs.\n \"\"\"\n assert self._source_init_done\n return self._packed_src\n\n def SetInitializationSourceState(self, new_init_state):\n \"\"\"Sets the attention initialization state.\n\n Args:\n new_init_state: A `.NestedMap` matching what was returned from\n `GetInitializationSourceState`, which will return this layer to that\n initialization state.\n \"\"\"\n self._source_init_done = True\n self._packed_src = new_init_state.DeepCopy()\n\n def _PaddedSoftmax(self, logits, padding):\n \"\"\"Performs a softmax as if padding were applied after exponentiation.\n\n The default implementation uses numerical techniques to approximate this\n with a standard `tf.nn.softmax` (using large negative logits for padded\n values). It defers to a `Defun` that may be replaced on low-range\n implementations with a version that is numerically correct.\n\n Args:\n logits: Logits.\n padding: Padding (must be the same shape as logits).\n\n Returns:\n Result of the softmax.\n \"\"\"\n fns = self.fns\n\n if logits.dtype.is_complex:\n logits = tf.abs(logits)\n assert logits.dtype.is_floating\n assert hasattr(logits.dtype, 'max')\n very_negative_logits = (\n tf.ones_like(logits) * logits.dtype.max *\n tf.constant(-0.7, dtype=logits.dtype))\n if self.do_eval:\n very_negative_logits = self.QTensor('logits', very_negative_logits)\n padded_logits = tf.where(padding > 0.0, very_negative_logits, logits)\n # TFLite hardcodes the range of qsoftmax, setting explicitly to avoid\n # incompatible concats.\n return fns.qsoftmax(padded_logits, qdomain='softmax')\n\n def _UpdatePaddingWithPackedInputMask(self, padding, source_segment_ids,\n query_segment_ids):\n \"\"\"Creates an attention mask based on source and query segment ids.\n\n This creates a mask that removes invalid attention, where the query vector\n might assign some weight to neighboring sequences in a packed input example.\n Assumes `n = target_batch // source_batch`.\n\n Args:\n padding: Padding for logits, a tensor of shape [time, n, source_batch].\n source_segment_ids: a tensor of shape [time, source_batch].\n query_segment_ids: a tensor of shape [target_batch].\n\n Returns:\n Logits with mask applied.\n \"\"\"\n # Generating packed input mask for attention padding.\n source_segment_ids = tf.expand_dims(source_segment_ids, 1)\n query_segment_ids = tf.reshape(\n query_segment_ids,\n [1, -1, py_utils.GetShape(source_segment_ids)[2]])\n padding = tf.where(\n tf.equal(source_segment_ids, query_segment_ids), padding,\n tf.ones_like(padding))\n return padding\n\n\nclass AdditiveAttention(BaseAttentionLayer):\n \"\"\"Implements additive attention (also known as \"Bahdanau Attention\").\n\n Described in:\n\n Dzmitry Bahdanau, Kyunghyun Cho, Yoshua Bengio.\n \"Neural Machine Translation by Jointly Learning to Align and Translate.\"\n ICLR 2015.\n https://arxiv.org/abs/1409.0473\n \"\"\"\n\n @classmethod\n def Params(cls):\n \"\"\"Params for this `AdditiveAttention` class.\"\"\"\n p = super().Params()\n p.Define('source_dim', 0, 'Number of source nodes.')\n p.Define('query_dim', 0, 'Number of query nodes.')\n p.Define('hidden_dim', 0, 'Number of hidden nodes.')\n # Fill in reasonable default for params init\n p.params_init = py_utils.WeightInit.GaussianSqrtDim()\n p.Define(\n 'same_batch_size', False,\n 'True iff the source and target sequence has the same batch size.')\n return p\n\n def __init__(self, params):\n \"\"\"Constructs an `AdditiveAttention` object.\"\"\"\n super().__init__(params)\n p = self.params\n\n def AttenProbs(inputs):\n \"\"\"Generates probs.\"\"\"\n source_batch = py_utils.GetShape(inputs.source_padding)[1]\n target_batch = py_utils.GetShape(inputs.per_step_source_padding)[0]\n multiplier = target_batch // source_batch\n\n # Shape of summed is [sl, tb/sb, sb, hidden_dim].\n summed = tf.tanh(inputs.concated_source_vecs + inputs.query_vec_reshaped)\n # logits is of shape [sl * tb/sb * sb, 1]. Computes dot product\n # between v with every rows in 'summed'. Then we reshape the\n # result to be of shape [sl, tb/sb, sb].\n #\n # Another equivalent way is to do:\n # logits = tf.reduce_sum(summed *\n # tf.reshape(v, [1, 1, 1, hidden_dim]), 3)\n logits = py_utils.Matmul(\n tf.reshape(summed, [-1, p.hidden_dim]),\n tf.reshape(inputs.v, [p.hidden_dim, 1]))\n logits = tf.reshape(logits, tf.shape(summed)[:3])\n # Take out the padding states.\n # _source_padding is of shape [source_length, source_batch].\n # reshaped to [source_length, 1, source_batch].\n # per_step_source_padding is reshaped to the same but with 'multiplier'\n # for the second dim.\n source_padding = tf.expand_dims(inputs.source_padding, 1)\n per_step_source_padding = tf.reshape(\n tf.transpose(inputs.per_step_source_padding),\n [-1, multiplier, source_batch])\n source_padding += per_step_source_padding\n\n if p.packed_input:\n source_padding = self._UpdatePaddingWithPackedInputMask(\n source_padding, inputs.source_segment_id, inputs.query_segment_id)\n # Reshape logits to a matrix of shape [target_batch, source_length] and\n # takes the softmax to compute the probabilities.\n logits = tf.transpose(tf.reshape(logits, [-1, target_batch]))\n source_padding = tf.transpose(\n tf.reshape(source_padding, [-1, target_batch]))\n probs = self._PaddedSoftmax(logits, source_padding)\n return probs\n\n # Adds the atten function into the graph's library.\n def Atten(v, w, source_padding, source_segment_id, concated_source_vecs,\n concated_source_contexts, query_vec, query_segment_id,\n per_step_source_padding):\n \"\"\"Computes the attention context vector.\n\n Args:\n v: hidden weight. [hidden_dim, 1].\n w: query weight. [query_dim, hidden_dim].\n source_padding: [source_length, source_batch].\n source_segment_id: [source_lentgh, source_batch]\n concated_source_vecs: [source_length, source_batch, hidden_dim].\n concated_source_contexts: [source_batch, source_length, context_dim]\n query_vec: [target_batch, query_dim]\n query_segment_id: [target_batch]\n per_step_source_padding: [target_batch, source_length]\n Note: concated_source_vecs are the vectors that are used to compute the\n attention score between the query_vec and each concated_source_vec. The\n concated_source_contexts are the vectors that compose the result. The\n attention context vector is computed as a weighted average of the\n concated_source_contexts, using the scores that were computed using\n concated_source_vecs.\n\n Returns:\n attention context vectors and probabilities.\n \"\"\"\n source_batch = py_utils.GetShape(concated_source_vecs)[1]\n target_batch = py_utils.GetShape(query_vec)[0]\n multiplier = target_batch // source_batch\n # concated_source_vecs is reshaped to\n # [source_length, 1, source_batch, hidden_dims]\n concated_source_vecs = tf.expand_dims(concated_source_vecs, 1)\n query_vec_transformed = py_utils.Matmul(query_vec, w)\n\n # query_vec is reshaped to\n # [1, target_batch/source_batch, source_batch, hidden_dims].\n query_vec_reshaped = tf.reshape(\n query_vec_transformed, [1, multiplier, source_batch, p.hidden_dim])\n # probs is of shape [target_batch, source_length]\n probs = py_utils.CallDefun(\n AttenProbs,\n py_utils.NestedMap(\n concated_source_vecs=concated_source_vecs,\n source_padding=source_padding,\n query_vec_reshaped=query_vec_reshaped,\n v=v,\n per_step_source_padding=per_step_source_padding,\n source_segment_id=source_segment_id,\n query_segment_id=query_segment_id))\n probs.set_shape(per_step_source_padding.shape)\n\n # Apply dropout to weights if applicable.\n if not self.do_eval:\n probs = _ApplyAttentionDropout(p, probs)\n\n # Reshape probs to be of shape\n # [target_batch/source_batch, source_batch, source_length]\n probs_reshaped = tf.reshape(probs, [multiplier, source_batch, -1])\n # Transpose probs to be of shape\n # [source_batch, target_batch/source_batch, source_length]\n probs_reshaped = tf.transpose(probs_reshaped, [1, 0, 2])\n # Batched matmul\n # [source_batch, target_batch/source_batch, source_length] *\n # [source_batch, source_length, context_dim] =\n # [source_batch, target_batch/source_batch, context_dim]\n summed = tf.matmul(probs_reshaped, concated_source_contexts)\n\n # summed is of shape\n # [target_batch/source_batch, source_batch, context_dim]\n summed = tf.transpose(summed, [1, 0, 2])\n\n return tf.reshape(summed, [target_batch, -1]), probs\n\n # The source batch size equals to the target batch size.\n def AttenSameBatchSize(v, w, source_padding, source_segment_id,\n concated_source_vecs, concated_source_contexts,\n query_vec, query_segment_id,\n per_step_source_padding):\n \"\"\"Computes the attention context vector.\n\n Args:\n v: hidden weight. [hidden_dim].\n w: query weight. [query_dim, hidden_dim].\n source_padding: [sl, b]\n source_segment_id: [sl, b]\n concated_source_vecs: [sl, b, hidden_dim].\n concated_source_contexts: [b, sl, context_dim]\n query_vec: [b, query_dim]\n query_segment_id: [b]\n per_step_source_padding: [b, sl]\n\n Returns:\n attention context vectors and probabilities.\n \"\"\"\n # TODO(jiaye): support dropout\n if p.atten_dropout_prob != 0:\n raise NotImplementedError('dropout is not supported')\n\n # [b, hidden_dim]\n query_vec = py_utils.Matmul(query_vec, w)\n # [sl, b]\n def AttenProbs(inputs):\n \"\"\"Calculates atten probs with padding.\"\"\"\n # tf.tanh(x+y) shape [sl, b, hidden_dim]\n summed = tf.tanh(inputs.x + inputs.y)\n # [-1, hidden_dim] * [hidden_dim, 1] = [-1, 1]\n res = py_utils.Matmul(\n tf.reshape(summed, [-1, p.hidden_dim]), tf.expand_dims(inputs.v, 1))\n # Reshape res to [sl, b]\n logits = tf.reshape(res, tf.shape(summed)[:2])\n # Take out the padding states. _source_padding is of shape [sl, b].\n source_padding = inputs.source_padding + tf.transpose(\n inputs.per_step_source_padding)\n\n if p.packed_input:\n source_padding = self._UpdatePaddingWithPackedInputMask(\n tf.expand_dims(source_padding, 1), inputs.source_segment_id,\n inputs.query_segment_id)\n source_padding = tf.squeeze(source_padding, 1)\n # [b, sl]\n source_padding = tf.transpose(source_padding)\n logits = tf.transpose(logits)\n # softmax to compute the probabilities. [b, sl]\n probs = self._PaddedSoftmax(logits, source_padding)\n return probs\n\n probs = py_utils.CallDefun(\n AttenProbs,\n py_utils.NestedMap(\n x=concated_source_vecs,\n source_padding=source_padding,\n y=query_vec,\n v=v,\n per_step_source_padding=per_step_source_padding,\n source_segment_id=source_segment_id,\n query_segment_id=query_segment_id))\n probs.set_shape(per_step_source_padding.shape)\n\n # contexts[i, :] is a weighted (probs[i, :]) average of\n # concated_source_vecs[i, :, :].\n # Reshaped probs is of shape [b, 1, sl]\n reshaped_probs = tf.expand_dims(probs, 1)\n # [b, 1, sl] * [b, sl, context_dim] = [b, 1, context_dim]\n contexts = tf.matmul(reshaped_probs, concated_source_contexts)\n # Reshaped context is of shape [b, context_dim]\n contexts = tf.squeeze(contexts, axis=1)\n return contexts, probs\n\n if p.same_batch_size:\n self._ctx_vec = AttenSameBatchSize\n else:\n self._ctx_vec = Atten\n\n def EncodeSource(src_w, vecs, ctxs):\n \"\"\"Prepares source vec and ctx.\"\"\"\n time, batch = py_utils.GetShape(vecs, 2)\n ctxs = py_utils.HasShape(ctxs, [time, batch, -1])\n transformed_vecs = tf.matmul(vecs, src_w)\n transformed_vecs = tf.identity(\n transformed_vecs, name='source_vecs_projected')\n transposed_ctxs = tf.transpose(ctxs, [1, 0, 2])\n transposed_ctxs = tf.identity(transposed_ctxs, name='source_ctx')\n return transformed_vecs, transposed_ctxs\n\n self._encode_source = EncodeSource\n\n def _CreateLayerVariables(self):\n super()._CreateLayerVariables()\n p = self.params\n pc = py_utils.WeightParams(\n shape=[p.source_dim, p.hidden_dim],\n init=p.params_init,\n dtype=p.dtype,\n collections=['AdditiveAttention_vars'])\n self.CreateVariable('source_var', pc)\n\n pc = py_utils.WeightParams(\n shape=[p.query_dim, p.hidden_dim],\n init=p.params_init,\n dtype=p.dtype,\n collections=['AdditiveAttention_vars'])\n self.CreateVariable('query_var', pc)\n\n pc = py_utils.WeightParams(\n shape=[p.hidden_dim],\n init=p.params_init,\n dtype=p.dtype,\n collections=['AdditiveAttention_vars'])\n self.CreateVariable('hidden_var', pc)\n\n def AddGlobalVN(self, theta):\n theta = super().AddGlobalVN(theta)\n theta.source_var = self.AddVN(theta.source_var)\n theta.hidden_var = self.AddVN(theta.hidden_var)\n theta.query_var = self.AddVN(theta.query_var)\n return theta\n\n def PackSource(self,\n theta,\n source_vecs,\n source_contexts,\n source_padding,\n source_segment_id=None):\n \"\"\"Packs source vectors.\n\n Does not change attention state.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n source_vecs: A single tensor of shape [time, batch_size, source_dim].\n source_contexts: A single tensor of shape [time, batch_size, some_dim].\n source_padding: A tensor of shape [time, batch_size].\n source_segment_id: A tensor of shape [time, batch_size].\n\n Returns:\n A NestedMap containing the packed source.\n \"\"\"\n with tf.name_scope(self.params.name):\n if source_segment_id is None:\n source_segment_id = tf.zeros_like(source_padding)\n\n (concated_source_vecs, concated_source_contexts) = (\n self._encode_source(theta.source_var, source_vecs, source_contexts))\n return py_utils.NestedMap(\n # [time, batch_size, hidden_dim].\n source_vecs=concated_source_vecs,\n # [batch_size, time, context_dim].\n # Note the mismatch between `source_vecs` and `source_contexts`. In\n # `source_vecs`, time is the first dim, while it is the second dim in\n # `source_contexts`.\n source_contexts=concated_source_contexts,\n # [time, batch_size].\n source_padding=source_padding,\n # [time, batch_size].\n source_segment_id=source_segment_id)\n\n def ZeroAttentionState(self, source_length, decoder_batch_size):\n p = self.params\n # This is just a dummy state. The first dimension of the state has to match\n # decoder_batch_size.\n zs = tf.zeros([decoder_batch_size, 1], dtype=py_utils.FPropDtype(p))\n return zs\n\n def ComputeContextVectorWithSource(self,\n theta,\n packed_src,\n query_vec,\n attention_state=None,\n per_step_source_padding=None,\n query_segment_id=None):\n \"\"\"Computes the context vector given the current query output.\n\n Note: `packed_src.source_vecs` are the vectors that are used to compute the\n attention score between the `query_vec` and each `packed_src.source_vecs`.\n The `packed_src.source_contexts` are the vectors that compose the result.\n The attention context vector is computed as a weighted average of the\n `packed_src.source_contexts`, using the scores that were computed using\n `packed_src.source_vecs`.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n packed_src: A `.NestedMap` object returned by PackSource or\n InitForSourcePacked.\n query_vec: a tensor of shape [batch_size, query_dim].\n attention_state: previous attention state. It is not used in\n `AdditiveAttention`, and is simply passed through.\n per_step_source_padding: Source sequence padding to apply at this step. If\n not None, it should be of shape [target_batch_size, source_length].\n query_segment_id: a tensor of shape [batch_size]\n\n Returns:\n A tuple of 3 elements.\n\n - The attention context vector: [batch_size, context_dim]\n - The attention probability vector: [batch_size, time]\n - The new attention mechanism state: possibly nested tuple of tensors with\n dimensions [target_batch, ...]\n \"\"\"\n concated_source_vecs = packed_src.source_vecs\n concated_source_contexts = packed_src.source_contexts\n source_padding = packed_src.source_padding\n source_segment_id = packed_src.source_segment_id\n query_batch_size = py_utils.GetShape(query_vec)[0]\n source_length = py_utils.GetShape(source_padding)[0]\n if per_step_source_padding is None:\n zero = tf.constant(0.0, dtype=query_vec.dtype)\n per_step_source_padding = tf.fill([query_batch_size, source_length], zero)\n per_step_source_padding = py_utils.HasShape(\n per_step_source_padding, [query_batch_size, source_length])\n hidden = self.AddVN(theta.hidden_var, per_step=True)\n query = self.AddVN(theta.query_var, per_step=True)\n\n if source_segment_id is None:\n source_segment_id = tf.zeros_like(source_padding)\n if query_segment_id is None:\n query_segment_id = tf.zeros(\n tf.shape(query_vec)[0], dtype=source_padding.dtype)\n\n ctx_vec, prob = self._ctx_vec(hidden, query, source_padding,\n source_segment_id, concated_source_vecs,\n concated_source_contexts, query_vec,\n query_segment_id, per_step_source_padding)\n\n return ctx_vec, prob, attention_state\n\n\nclass DotProductAttention(BaseAttentionLayer):\n \"\"\"Implements dot-product attention (also known as \"Luong Attention\").\n\n Described in:\n\n Minh-Thang Luong, Hieu Pham, Christopher D. Manning.\n \"Effective Approaches to Attention-based Neural Machine Translation.\"\n EMNLP 2015.\n https://arxiv.org/abs/1508.04025\n \"\"\"\n\n @classmethod\n def Params(cls):\n \"\"\"Params for `DotProductAttention`.\"\"\"\n p = super().Params()\n p.Define('source_dim', 0, 'Number of source nodes.')\n p.Define('query_dim', 0, 'Number of query nodes.')\n p.Define('hidden_dim', 0, 'Number of hidden nodes.')\n p.Define(\n 'use_dim_scale', True, 'Whether or not to use per_dim_scale to scale '\n 'the individual dims when calculating attention probabilities. It can '\n 'increase training stability when set to False.')\n return p\n\n def __init__(self, params):\n \"\"\"Constructs a DotProductAttention object.\"\"\"\n super().__init__(params)\n p = self.params\n # TODO(yonghui): relax these constraints.\n assert p.source_dim == p.query_dim\n assert p.source_dim == p.hidden_dim\n\n def AttenProbs(inputs):\n \"\"\"Main attention function.\n\n target_batch = source_batch * n where n is an integer >= 1.\n In this case inputs.query_vec contains:\n -------------------------\n | instance 1 |\n | instance 2 |\n 0 | ... |\n | instance source_batch |\n -------------------------\n | instance 1 |\n | instance 2 |\n 1 | ... |\n | instance source_batch |\n -------------------------\n ...\n -------------------------\n | instance 1 |\n | instance 2 |\n n-1 | ... |\n | instance source_batch |\n -------------------------\n One use case is beam search where n = beam size.\n\n Args:\n inputs: a NestedMap containing:\n - per_dim_scale: [source_dim], a vec to scale individual dims.\n - source_padding: [time, source_batch].\n - concated_source_vecs: [source_batch, time, source_dim].\n - query_vec: [target_batch, source_dim].\n - per_step_source_padding: [target_batch, source_length]\n - source_segment_id: [time, source_batch].\n - query_segment_id: [target_batch].\n\n Returns:\n logits [target_batch, source_time].\n \"\"\"\n source_padding = tf.transpose(inputs.source_padding)\n concated_source_vecs = inputs.concated_source_vecs\n\n logit_scale = tf.stop_gradient(\n tf.math.rsqrt(\n tf.cast(\n py_utils.GetShape(inputs.query_vec)[1],\n dtype=py_utils.FPropDtype(p))))\n source_batch = py_utils.GetShape(concated_source_vecs)[0]\n target_batch = py_utils.GetShape(inputs.query_vec)[0]\n query_vec = inputs.query_vec * inputs.per_dim_scale\n # The n here refers to the \"n\" described in the comment above.\n n = target_batch // source_batch\n query_vec = tf.reshape(query_vec, [n, source_batch, -1])\n # => [source_batch, source_dim, n]\n query_vec = tf.transpose(query_vec, [1, 2, 0])\n # => [n, source_batch, source_sequence_len]\n per_step_source_padding = tf.reshape(inputs.per_step_source_padding,\n [n, source_batch, -1])\n # => [source_batch, source_sequence_len, n]\n per_step_source_padding = tf.transpose(per_step_source_padding, [1, 2, 0])\n # Dot-product part.\n # Calls batch_mat_mul since dim > 2 for per-instance matmul.\n # [source_batch, time, source_dim] * [source_batch, source_dim, n]\n # => [source_batch, time, n]\n concated_source_vecs, query_vec = self.ToAqtActActInputs(\n act_lhs=concated_source_vecs,\n act_rhs=query_vec,\n act_lhs_distribution=quant_utils.QDistribution.SYMMETRIC,\n act_rhs_distribution=quant_utils.QDistribution.SYMMETRIC)\n logits = tf.matmul(concated_source_vecs, query_vec)\n logits = self.FromAqtActActMatmul(logits)\n\n logits *= logit_scale\n # Exclude padding frames.\n # [source_batch, time] => [source_batch, time, 1]\n source_padding = tf.expand_dims(source_padding, 2)\n source_padding += per_step_source_padding\n if p.packed_input:\n source_padding = tf.transpose(source_padding, [1, 2, 0])\n source_padding = self._UpdatePaddingWithPackedInputMask(\n source_padding, inputs.source_segment_id, inputs.query_segment_id)\n source_padding = tf.transpose(source_padding, [1, 2, 0])\n else:\n source_padding = tf.transpose(source_padding, [2, 0, 1])\n\n # => [n, source_batch, time]\n logits = tf.transpose(logits, [2, 0, 1])\n\n # => [n * source_batch, time].\n # This makes logits store content in the same order as query_vec.\n logits = tf.reshape(logits, [target_batch, -1])\n source_padding = tf.reshape(source_padding, [target_batch, -1])\n probs = self._PaddedSoftmax(logits, source_padding)\n return probs\n\n def Atten(per_dim_scale, source_padding, source_segment_id,\n concated_source_vecs, concated_source_contexts, query_vec,\n query_segment_id, per_step_source_padding):\n \"\"\"Main attention function.\n\n Args:\n per_dim_scale: [source_dim], a vec to scale individual dims.\n source_padding: [time, source_batch].\n source_segment_id: [time, source_batch].\n concated_source_vecs: [time, source_batch, source_dim].\n concated_source_contexts: [source_batch, time, context_dim].\n query_vec: [target_batch, source_dim].\n query_segment_id: [target_batch].\n per_step_source_padding: [target_batch, source_length]\n Note: concated_source_vecs are the vectors that are used to compute the\n attention score between the query_vec and each concated_source_vec. The\n concated_source_contexts are the vectors that compose the result. The\n attention context vector is computed as a weighted average of the\n concated_source_contexts, using the scores that were computed using\n concated_source_vecs.\n\n Returns:\n Two tensors:\n\n - context_vector: [target_batch, context_dim].\n - probs: [target_batch, time].\n \"\"\"\n py_utils.assert_shape_match([py_utils.GetShape(concated_source_vecs)[2]],\n [py_utils.GetShape(query_vec)[1]])\n py_utils.assert_shape_match([py_utils.GetShape(concated_source_vecs)[2]],\n [symbolic.ToStatic(p.source_dim)])\n source_batch = py_utils.GetShape(concated_source_vecs)[1]\n target_batch = py_utils.GetShape(query_vec)[0]\n n = target_batch // source_batch\n concated_source_vecs = tf.transpose(concated_source_vecs, [1, 0, 2])\n concated_source_vecs = tf.identity(\n concated_source_vecs, name='concated_source_vecs')\n returned_probs = py_utils.CallDefun(\n AttenProbs,\n py_utils.NestedMap(\n per_dim_scale=per_dim_scale,\n source_padding=source_padding,\n concated_source_vecs=concated_source_vecs,\n query_vec=query_vec,\n per_step_source_padding=per_step_source_padding,\n source_segment_id=source_segment_id,\n query_segment_id=query_segment_id))\n returned_probs.set_shape(per_step_source_padding.shape)\n\n # => [n, source_batch, time].\n probs = tf.reshape(returned_probs, [n, source_batch, -1])\n # => [source_batch, n, time].\n probs = tf.transpose(probs, [1, 0, 2])\n\n # Apply dropout to weights if applicable.\n if not self.do_eval:\n probs = _ApplyAttentionDropout(p, probs)\n\n # Weight each frame with the probability and sum them.\n # [source_batch, n, time] * [source_batch, time, context_dim]\n # => [source_batch, n, context_dim].\n concated_source_contexts = tf.identity(\n concated_source_contexts, name='concated_source_contexts')\n probs, concated_source_contexts = self.ToAqtActActInputs(\n act_lhs=probs,\n act_rhs=concated_source_contexts,\n act_lhs_distribution=quant_utils.QDistribution.POSITIVE,\n act_rhs_distribution=quant_utils.QDistribution.SYMMETRIC)\n\n context_vector = tf.matmul(probs, concated_source_contexts)\n context_vector = self.FromAqtActActMatmul(context_vector)\n\n # => [n, source_batch, context_dim].\n context_vector = tf.transpose(context_vector, [1, 0, 2])\n context_vector = gshard_utils.MeshSplit(context_vector, p.device_mesh,\n p.activation_split_dims_mapping)\n # => [n * source_batch, context_dim].\n context_vector = tf.reshape(context_vector, [target_batch, -1])\n\n return context_vector, returned_probs\n\n self._ctx_vec = Atten\n\n def _CreateLayerVariables(self):\n super()._CreateLayerVariables()\n p = self.params\n\n if p.use_dim_scale:\n pc = py_utils.WeightParams(\n shape=[p.hidden_dim],\n init=py_utils.WeightInit.Constant(0.0),\n dtype=p.dtype,\n collections=['DotProductAttention_vars'])\n\n self.CreateVariable('per_dim_scale', pc)\n\n def PackSource(self,\n theta,\n source_vecs,\n source_contexts,\n source_padding,\n source_segment_id=None):\n \"\"\"Packs source vectors.\n\n Does not change attention state.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n source_vecs: A tensor of shape [time, source_batch, source_dim].\n source_contexts: A tensor of shape [time, source_batch, context_dim].\n source_padding: A tensor of shape [time, source_batch].\n source_segment_id: A tensor of shape [time, source_batch].\n\n Returns:\n A tuple (concated_source_vecs, concated_source_contexts, source_padding)\n where `concated_source_vecs` is a tensor of shape [time, batch_size,\n hidden_dim], `concated_source_contexts` is a tensor of shape\n [batch_size, time, some_dim] and `source_padding` is a tensor of shape\n [time, batch_size].\n \"\"\"\n concated_source_vecs = tf.identity(source_vecs)\n concated_source_contexts = tf.transpose(source_contexts, [1, 0, 2])\n if source_segment_id is None:\n source_segment_id = tf.zeros_like(source_padding)\n return py_utils.NestedMap(\n # [time, batch_size, hidden_dim].\n source_vecs=concated_source_vecs,\n # [batch_size, time, context_dim].\n # Note the mismatch between `source_vecs` and `source_contexts`. In\n # `source_vecs`, time is the first dim, while it is the second dim in\n # `source_contexts`.\n source_contexts=concated_source_contexts,\n # [time, batch_size].\n source_padding=source_padding,\n # [time, batch_size].\n source_segment_id=source_segment_id)\n\n def ZeroAttentionState(self, source_length, decoder_batch_size):\n p = self.params\n # No states to keep track of currently.\n return tf.zeros([decoder_batch_size, 1], dtype=py_utils.FPropDtype(p))\n\n def ComputeContextVectorWithSource(self,\n theta,\n packed_src,\n query_vec,\n attention_state=None,\n per_step_source_padding=None,\n query_segment_id=None):\n \"\"\"Computes the context vector given the current query output.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n packed_src: A `.NestedMap` object returned by PackSource or\n InitForSourcePacked.\n query_vec: a tensor of shape [target_batch, query_dim], where target_batch\n = n * source_batch (e.g., n = num_hyps_per_beam in beamsearch). Along\n the target_batch dimension, there are n groups of consecutive rows, each\n group containing source_batch rows.\n attention_state: previous attention state. It is not used in\n AdditiveAttention, and is simply passed through.\n per_step_source_padding: Source sequence padding to apply at this step. If\n not None, it should be of shape [target_batch, source_length].\n query_segment_id: Query segment id with shape [target_batch].\n\n Returns:\n A tuple of 3 elements.\n\n - The attention context vector: [batch_size, context_dim]\n - The attention probability vector: [batch_size, time]\n - The new attention mechanism state: possibly nested tuple of tensors\n with dimensions [target_batch, ...]\n \"\"\"\n concated_source_vecs = packed_src.source_vecs\n concated_source_contexts = packed_src.source_contexts\n\n source_padding = packed_src.source_padding\n source_segment_id = packed_src.source_segment_id\n query_batch_size = py_utils.GetShape(query_vec)[0]\n source_sequence_length = py_utils.GetShape(source_padding)[0]\n if per_step_source_padding is None:\n zero = tf.constant(0.0, dtype=query_vec.dtype)\n per_step_source_padding = tf.fill(\n [query_batch_size, source_sequence_length], zero)\n per_step_source_padding = py_utils.HasShape(\n per_step_source_padding, [query_batch_size, source_sequence_length])\n if source_segment_id is None:\n source_segment_id = tf.zeros_like(source_padding)\n if query_segment_id is None:\n query_segment_id = tf.zeros(\n py_utils.GetShape(query_vec)[0], dtype=source_padding.dtype)\n\n def ScaleFn(x):\n return tf.nn.softplus(x) / tf.nn.softplus(tf.constant(0.0, dtype=x.dtype))\n\n if self.params.use_dim_scale:\n per_dim_scale_var = theta.per_dim_scale\n else:\n per_dim_scale_var = tf.constant(0.0, dtype=query_vec.dtype)\n\n ctx_vec, prob = self._ctx_vec(\n ScaleFn(per_dim_scale_var), source_padding, source_segment_id,\n concated_source_vecs, concated_source_contexts, query_vec,\n query_segment_id, per_step_source_padding)\n return ctx_vec, prob, attention_state\n\n\ndef _RecursiveReshape(x, shape):\n if x is None:\n return None\n elif isinstance(x, py_utils.NestedMap):\n return x.Transform(lambda y: _RecursiveReshape(y, shape))\n else:\n return tf.reshape(x, shape) if x.shape.ndims == 2 else x\n\n\nclass MultiHeadedAttention(BaseAttentionLayer, quant_utils.QuantizableLayer):\n \"\"\"Attention with multiple attention heads.\n\n Conceptually, the algorithm works as follows:\n\n 1. Source vectors (attention keys) are first projected to vectors of dim\n p.hidden_dim.\n 2. Query vectors are projected to vectors of dim p.hidden_dim as well.\n 3. Context vectors (attention values) are not projected by default, unless\n `enable_ctx_pre_proj` is True.\n 4. Source vectors, query vectors and context vectors are all split into\n p.num_attention_heads chunks.\n 5. The inner atten mechanism is computed separately on each of the chunks.\n 6. Attention contexts from each of the chunk are concatenated to form the\n final context.\n 7. Attention probs from each of the chunk are averaged to form the final\n attention prob.\n \"\"\"\n\n @classmethod\n def Params(cls):\n \"\"\"Params for MultiHeadedAttention.\"\"\"\n p = super().Params()\n p.Define('source_dim', 0, 'Number of source nodes.')\n p.Define('query_dim', 0, 'Number of query nodes.')\n p.Define('context_dim', 0, 'Number of context nodes.')\n p.Define('hidden_dim', 0, 'Number of hidden nodes.')\n p.Define('num_attention_heads', 2, 'Num of attention heads.')\n p.Define(\n 'use_source_vec_as_attention_value', True,\n 'Whether or not to use source_vec as the attention value as well.'\n ' If True, we expect source_vec and source_contexts are the same.')\n p.Define('enable_source_proj', True,\n 'If False, source side linear projection is disabled.')\n p.Define('enable_query_proj', True,\n 'If False, query side linear projection is disabled.')\n p.Define('inner_atten_params', DotProductAttention.Params(),\n 'Params for underlying attention mechanism.')\n p.Define(\n 'enable_ctx_pre_proj', False,\n 'If True, context is pre-projected before processing into'\n ' hidden_dim.')\n p.Define(\n 'enable_ctx_post_proj', False,\n 'If True, computed context is post projected into'\n ' ctx_post_proj_dim.')\n p.Define('ctx_post_proj_dim', 0, 'Number of post projection nodes.')\n p.Define(\n 'num_post_proj', 1, 'Number of post projections, usually the same as '\n 'number of tasks. Each task may choose to use one of the post '\n 'projection layers.')\n p.Define(\n 'proj_init', 'default', 'Initialization approach for projection '\n 'layers:'\n 'uniform: Use uniform initialization. '\n 'default: Use the default Xavier initialization.')\n p.Define(\n 'attention_head_prob_index', -1, 'If > 0, instead of averaging '\n 'the probabilities of all attention heads when returning the '\n 'attention probability, instead return the selected index prob.')\n\n p.Define('use_bias', True, 'Whether to use bias for projection layer.')\n p.Define('enable_per_dim_scale', True,\n 'Whether to use per_dim_scale in inner_atten.')\n\n # Often the attention context output needs to be concated\n # with tensors from another layer. This allows them to share\n # quantization parameters. By convention, all attention layers\n # need to include their context output vectors in this domain.\n p.qdomain.Define('atten_context', None,\n 'Quantization domain for attention context.')\n\n p.params_init = py_utils.WeightInit.Xavier(scale=1.0)\n\n return p\n\n def __init__(self, params):\n \"\"\"Constructs a MultiHeadedAttention object.\"\"\"\n super().__init__(params)\n p = self.params\n assert symbolic.ToStatic(\n p.hidden_dim) % p.num_attention_heads == 0, '%s mod %s != 0' % (\n symbolic.ToStatic(p.hidden_dim), p.num_attention_heads)\n\n if p.proj_init not in ('uniform', 'default'):\n raise ValueError('Unknown proj_init: %s!' % p.proj_init)\n\n att_dim = p.hidden_dim // p.num_attention_heads\n\n att_p = p.inner_atten_params.Set(\n source_dim=att_dim,\n query_dim=att_dim,\n hidden_dim=att_dim,\n dtype=p.dtype,\n atten_dropout_prob=p.atten_dropout_prob,\n atten_dropout_deterministic=p.atten_dropout_deterministic,\n packed_input=p.packed_input)\n\n if att_p.cls == DotProductAttention:\n att_p.use_dim_scale = p.enable_per_dim_scale\n\n if not att_p.name:\n att_p.name = 'inner_att'\n self.CreateChild('atten', att_p)\n if p.attention_head_prob_index >= 0:\n assert p.attention_head_prob_index < p.num_attention_heads\n\n self.CreateAqtWeight(\n 'query_proj',\n shape=[p.query_dim, p.hidden_dim],\n feature_axis=-1,\n legacy_aqt_w_name='query_proj_aqt')\n self.CreateAqtWeight(\n 'source_proj',\n shape=[p.source_dim, p.hidden_dim],\n feature_axis=-1,\n legacy_aqt_w_name='source_proj_aqt')\n self.CreateAqtWeight(\n 'ctx_proj',\n shape=[p.context_dim, p.hidden_dim],\n feature_axis=-1,\n legacy_aqt_w_name='ctx_pre_proj_aqt')\n self.CreateAqtWeight(\n 'ctx_post_proj',\n shape=[p.hidden_dim, p.ctx_post_proj_dim],\n feature_axis=-1,\n legacy_aqt_w_name='ctx_post_proj_aqt')\n\n def _CreateLayerVariables(self):\n super()._CreateLayerVariables()\n p = self.params\n\n def InitProj(layer_dim, bias=False):\n if p.proj_init == 'uniform':\n # Note we also initialize bias with uniform distribution here, following\n # the default Pytorch implementation:\n # https://pytorch.org/docs/stable/nn.html#linear\n proj_init = py_utils.WeightInit.Uniform(scale=np.sqrt(1.0 / layer_dim))\n elif p.proj_init == 'default':\n proj_init = py_utils.WeightInit.Constant(0.0) if bias else p.params_init\n return proj_init\n\n if p.use_bias:\n pc_bias = py_utils.WeightParams(\n shape=[p.hidden_dim],\n init=InitProj(p.hidden_dim, bias=True),\n dtype=p.dtype,\n collections=[self.__class__.__name__ + '_vars'])\n\n if p.enable_source_proj:\n pc = py_utils.WeightParams(\n shape=[p.source_dim, p.hidden_dim],\n init=InitProj(p.source_dim),\n dtype=p.dtype,\n device_mesh=p.device_mesh,\n tensor_split_dims_mapping=p.weight_split_dims_mapping,\n collections=[self.__class__.__name__ + '_vars'])\n self.CreateVariable('source_proj', pc)\n if p.use_bias:\n self.CreateVariable('source_proj_b', pc_bias)\n else:\n assert p.source_dim == p.hidden_dim\n\n if p.enable_query_proj:\n pc = py_utils.WeightParams(\n shape=[p.query_dim, p.hidden_dim],\n init=InitProj(p.query_dim),\n dtype=p.dtype,\n device_mesh=p.device_mesh,\n tensor_split_dims_mapping=p.weight_split_dims_mapping,\n collections=[self.__class__.__name__ + '_vars'])\n self.CreateVariable('query_proj', pc)\n if p.use_bias:\n self.CreateVariable('query_proj_b', pc_bias)\n else:\n assert p.query_dim == p.hidden_dim\n\n if p.enable_ctx_pre_proj and not p.use_source_vec_as_attention_value:\n assert p.context_dim\n pc = py_utils.WeightParams(\n shape=[p.context_dim, p.hidden_dim],\n init=InitProj(p.context_dim),\n dtype=p.dtype,\n device_mesh=p.device_mesh,\n tensor_split_dims_mapping=p.weight_split_dims_mapping,\n collections=[self.__class__.__name__ + '_vars'])\n self.CreateVariable('ctx_proj', pc)\n if p.use_bias:\n self.CreateVariable('ctx_proj_b', pc_bias)\n\n if p.enable_ctx_post_proj:\n assert p.ctx_post_proj_dim\n if p.num_post_proj == 1:\n pc_shape = [p.hidden_dim, p.ctx_post_proj_dim]\n pc_b_shape = [p.ctx_post_proj_dim]\n elif p.num_post_proj > 1:\n pc_shape = [p.hidden_dim, p.ctx_post_proj_dim, p.num_post_proj]\n pc_b_shape = [p.ctx_post_proj_dim, p.num_post_proj]\n else:\n raise ValueError('num_post_proj must > 0!')\n weight_split_dims_mapping = p.weight_split_dims_mapping\n if weight_split_dims_mapping and p.num_post_proj > 1:\n weight_split_dims_mapping = weight_split_dims_mapping + [-1]\n pc = py_utils.WeightParams(\n shape=pc_shape,\n init=InitProj(p.hidden_dim),\n dtype=p.dtype,\n device_mesh=p.device_mesh,\n tensor_split_dims_mapping=weight_split_dims_mapping,\n collections=[self.__class__.__name__ + '_vars'])\n self.CreateVariable('ctx_post_proj', pc)\n if p.use_bias:\n pc_bias_post_proj = py_utils.WeightParams(\n shape=pc_b_shape,\n init=InitProj(p.ctx_post_proj_dim, bias=True),\n dtype=p.dtype,\n collections=[self.__class__.__name__ + '_vars'])\n self.CreateVariable('ctx_post_proj_b', pc_bias_post_proj)\n\n self.TrackQTensor('source_proj_matmul', 'source_proj_add',\n 'query_proj_matmul', 'query_proj_add',\n 'ctx_pre_proj_matmul', 'ctx_pre_proj_add')\n # TODO(suderman): Remove the self.do_eval check below once brop quant within\n # defun is fixed on the training side. This is less than ideal as-is because\n # training will just trend to match downstream quant constraints vs force\n # alignment.\n self.TrackQTensor(\n 'ctx_post_proj_matmul', 'ctx_post_proj_add', domain='atten_context')\n\n @classmethod\n def SetOutputContextDim(cls, p, out_dim):\n p.ctx_post_proj_dim = out_dim\n\n @py_utils.NameScopeDecorator('MultiHeadedAttention/PackSource')\n def PackSource(self,\n theta,\n source_vecs,\n source_contexts,\n source_padding,\n source_segment_id=None):\n \"\"\"Packs source vectors.\n\n Does not change attention state.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n source_vecs: A tensor of shape [time, source_batch, source_dim].\n source_contexts: A tensor of shape [time, source_batch, context_dim].\n source_padding: A tensor of shape [time, source_batch].\n source_segment_id: A tensor of shape [time, source_batch].\n\n Returns:\n A NestedMap representing packed src. It will have the same structure\n as the one returned by the inner atten, except that source_batch will be\n source_batch * num_heads.\n \"\"\"\n\n p = self.params\n fns = self.fns\n if not p.enable_source_proj:\n assert p.source_dim == p.hidden_dim\n if not p.enable_query_proj:\n assert p.query_dim == p.hidden_dim\n with tf.name_scope('init__0'):\n if p.use_source_vec_as_attention_value:\n source_vecs = py_utils.HasShape(source_vecs,\n py_utils.GetShape(source_contexts))\n time_steps, batch_size = py_utils.GetShape(source_padding, 2)\n # source_projected shape [time * source_batch, hidden]\n with tf.name_scope('init__0a'):\n source_vec_depth = py_utils.GetShape(source_vecs)[2]\n with tf.name_scope('init__0b'):\n if p.enable_source_proj:\n source_vecs = tf.reshape(source_vecs, [-1, source_vec_depth])\n source_vecs, w_source_proj = self.ToAqtInputs(\n 'source_proj',\n act=source_vecs,\n weight=theta.source_proj,\n w_feature_axis=-1)\n w_source_proj = self.QWeight(w_source_proj)\n source_projected = (\n fns.qbatchmatmul(\n source_vecs, w_source_proj, qout_name='source_proj_matmul'))\n source_projected = self.FromAqtMatmul('source_proj', source_projected)\n if p.use_bias:\n source_projected = fns.qadd(\n source_projected,\n self.QWeight(theta.source_proj_b),\n qout_name='source_proj_add')\n else:\n source_projected = tf.reshape(source_vecs, [-1, source_vec_depth])\n if p.activation_split_dims_mapping:\n source_projected = gshard_utils.MeshSplit(\n source_projected, p.device_mesh,\n p.activation_split_dims_mapping[1:])\n with tf.name_scope('init__1'):\n num_heads = p.num_attention_heads\n # => [time, source_batch * num_heads, hidden / num_heads]\n source_projected = tf.reshape(source_projected, [\n time_steps, batch_size * num_heads,\n symbolic.ToStatic(p.hidden_dim // num_heads)\n ])\n source_projected = gshard_utils.MeshSplit(source_projected, p.device_mesh,\n p.activation_split_dims_mapping)\n source_projected = self.ProcessProjectionVec(theta, source_projected,\n 'source')\n if p.use_source_vec_as_attention_value:\n source_contexts_reshaped = source_projected\n else:\n if p.enable_ctx_pre_proj:\n source_contexts = tf.reshape(\n source_contexts, [-1, py_utils.GetShape(source_contexts)[2]])\n source_contexts, w_ctx_proj = self.ToAqtInputs(\n 'ctx_proj',\n act=source_contexts,\n weight=theta.ctx_proj,\n w_feature_axis=-1)\n w_ctx_proj = self.QWeight(w_ctx_proj)\n\n source_contexts_projected = fns.qbatchmatmul(\n source_contexts, w_ctx_proj, qout_name='ctx_pre_proj_matmul')\n source_contexts_projected = self.FromAqtMatmul(\n 'ctx_proj', source_contexts_projected)\n if p.use_bias:\n source_contexts_projected = fns.qadd(\n source_contexts_projected,\n self.QWeight(theta.ctx_proj_b),\n qout_name='ctx_pre_proj_add')\n if p.activation_split_dims_mapping:\n source_contexts_projected = gshard_utils.MeshSplit(\n source_contexts_projected, p.device_mesh,\n p.activation_split_dims_mapping[1:])\n else:\n source_contexts_projected = source_contexts\n\n source_context_depth = py_utils.GetShape(source_contexts_projected)[-1]\n source_contexts_reshaped = tf.reshape(source_contexts_projected, [\n time_steps, batch_size * num_heads,\n source_context_depth // num_heads\n ])\n source_contexts_projected = gshard_utils.MeshSplit(\n source_contexts_projected, p.device_mesh,\n p.activation_split_dims_mapping)\n source_contexts_projected = self.ProcessProjectionVec(\n theta, source_contexts_projected, 'ctx')\n\n with tf.name_scope('init__2'):\n source_padding_replicated = tf.reshape(\n tf.tile(\n tf.reshape(source_padding, [time_steps, batch_size, 1]),\n [1, 1, num_heads]), [time_steps, batch_size * num_heads])\n if source_segment_id is None:\n source_segment_id_repl = tf.zeros_like(source_padding_replicated)\n else:\n source_segment_id_repl = tf.reshape(\n tf.tile(\n tf.reshape(source_segment_id, [time_steps, batch_size, 1]),\n [1, 1, num_heads]), [time_steps, batch_size * num_heads])\n\n return self.atten.PackSource(theta.atten, source_projected,\n source_contexts_reshaped,\n source_padding_replicated,\n source_segment_id_repl)\n\n @py_utils.NameScopeDecorator('MultiHeadedAttention/ExtendSourcePacked')\n def ExtendSourcePacked(self,\n theta,\n new_source_vecs,\n new_source_contexts,\n new_source_paddings,\n new_source_segment_ids,\n cached_packed_src,\n t=None):\n \"\"\"Extend cached source_vecs and source_contexts by one more timestep.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n new_source_vecs: A tensor of shape [source_batch, source_dim].\n new_source_contexts: A tensor of shape [source_batch, context_dim].\n new_source_vecs and new_source_contexts are source_vecs and\n source_contexts for the new timestep to be extended.\n new_source_paddings: If not None, a tensor of shape [source_batch].\n source_padding for the new timestep.\n new_source_segment_ids: If not None, a tensor of shape [source_batch].\n source_segment_id for the new timestep.\n cached_packed_src: a `.NestedMap` object, containing already preprocessed\n source_vecs and source_contexts for the previous t-1 steps. To support\n tf.while_loop on TPU (satisfying static shape requirement), instead of\n using tf.concat to update the cached vectors, the time dimension of each\n cached vector is fixed as the max_sequence_length and inplace\n update op is used to update the information for each time step:\n * source_vecs: A tensor of shape [max_sequence_length, source_batch,\n hidden_dim]. [:t, :, :] contains valid preprocessed source_vecs in the\n previous t - 1 timesteps, the rests are invalid data.\n * source_contexts: A tensor of shape [max_sequence_length, source_batch,\n hidden_dim]. [:t, :, :] contains valid preprocessed source_contexts in\n the previous t - 1 timesteps, the rests are invalid data.\n * source_padding: If not None, a tensor of shape [max_sequence_length,\n source_batch, num_heads]. [:t, :, :] contains cached source padding\n for the previous t - 1 timesteps, the rests are invalid data.\n * source_segment_id: If not None, a tensor of shape\n [max_sequence_length, source_batch, num_heads]. [:t, :, :] contains\n cached source segment id for the previous t - 1 timesteps, the rests\n are invalid data.\n When t is None (not running on TPU or the while loop is unrolled):\n * source_vecs: A tensor of shape [t - 1, source_batch, hidden_dim].\n * source_contexts: A tensor of shape [t - 1, source_batch, hidden_dim].\n * source_padding: If not None, a tensor of shape [t - 1, source_batch,\n num_heads], cached source padding for the previous t - 1 timesteps.\n * source_segment_id: If not None, a tensor of shape [t - 1,\n source_batch, num_heads], cached source segment id for the previous t\n - 1 timesteps.\n t: a scalar, the current time step, 0-based.\n\n Returns:\n Extended cached source_vecs, source_contexts, source_paddings, and\n source_segment_ids. The time dimension of each cached state is fixed:\n 'extended_source_vec' is of shape [max_sequence_length, batch_size,\n num_heads * dim];\n 'extended_source_context' is of shape [max_sequence_length, batch_size,\n num_heads * dim];\n 'source_padding' is of shape [max_sequence_length, batch_size, num_heads];\n 'source_segment_id' is of shape [max_sequence_length, batch_size,\n num_heads].\n But only [:(t + 1), :, :] contains valid data.\n If t is not given,\n 'extended_source_vec' is of shape [t, batch_size, num_heads * dim];\n 'extended_source_context' is of shape [t, batch_size, num_heads * dim];\n 'source_padding' is of shape [t, batch_size, num_heads];\n 'source_segment_id' is of shape [t, batch_size, num_heads].\n \"\"\"\n batch_size = py_utils.GetShape(new_source_vecs)[0]\n if new_source_paddings is None:\n new_source_paddings = tf.zeros([batch_size], dtype=new_source_vecs.dtype)\n if new_source_segment_ids is None:\n new_source_segment_ids = tf.zeros([batch_size],\n dtype=new_source_vecs.dtype)\n processed_packed_src = self.InitForSourcePacked(\n theta, tf.expand_dims(new_source_vecs, 0),\n tf.expand_dims(new_source_contexts, 0),\n tf.expand_dims(new_source_paddings, 0),\n tf.expand_dims(new_source_segment_ids, 0))\n extended_packed_src = py_utils.NestedMap()\n for key in ('source_vecs', 'source_contexts', 'source_padding',\n 'source_segment_id'):\n if cached_packed_src.get(key, None) is None:\n extended_packed_src[key] = None\n else:\n if t is not None:\n processed = tf.reshape(processed_packed_src[key], [batch_size, -1])\n # Make sure t is a scaler instead of tensors having shape like [1,].\n # This could happen in cases where function is called by recurrent.py\n # (for example target_sequence_sampler.)\n t = tf.reshape(t, [])\n extended_packed_src[key] = inplace_ops.alias_inplace_update(\n cached_packed_src[key], t, processed)\n else:\n processed = tf.reshape(processed_packed_src[key], [1, batch_size, -1])\n extended_packed_src[key] = tf.concat(\n [cached_packed_src[key], processed], axis=0)\n return extended_packed_src\n\n @py_utils.NameScopeDecorator('MultiHeadedAttention/ZeroAttentionState')\n def ZeroAttentionState(self, source_length, decoder_batch_size):\n zero_att_state = self.atten.ZeroAttentionState(\n source_length, decoder_batch_size * self.params.num_attention_heads)\n # [batch * num_heads, length] => [batch, num_heads * length].\n zero_att_state = _RecursiveReshape(zero_att_state, [decoder_batch_size, -1])\n nested_map_zero_att_state = py_utils.NestedMap(inner=zero_att_state)\n if self.params.attention_head_prob_index >= 0:\n selected_prob_head = tf.zeros([decoder_batch_size, source_length],\n dtype=py_utils.FPropDtype(self.params))\n nested_map_zero_att_state[\n 'selected_attention_head_probs'] = selected_prob_head\n return nested_map_zero_att_state\n\n def ProcessProjectionVec(self, theta, projection_vec, projection_type):\n # no-op for this class but allows subclasses to override to process\n # projected vectors.\n return projection_vec\n\n @py_utils.NameScopeDecorator(\n 'MultiHeadedAttention/ComputeContextVectorWithSource')\n def ComputeContextVectorWithSource(self,\n theta,\n packed_src,\n query_vec,\n attention_state=None,\n per_step_source_padding=None,\n query_segment_id=None,\n atten_idx=None):\n \"\"\"Computes the context vector given the current query output.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n packed_src: A `.NestedMap` object returned by PackSource or\n InitForSourcePacked.\n query_vec: a tensor of shape [target_batch, query_dim].\n attention_state: A NestedMap. 'inner' contains the inner attention\n state. It is not used in AdditiveAttention, and is simply passed\n through. Optionally, if attention_head_prob_index >= 0, then\n 'selected_attention_head_probs' contains the selected attention\n probability head.\n per_step_source_padding: Source sequence padding to apply at this step. If\n not None, it should be of shape [target_batch_size, source_length].\n query_segment_id: a tensor of shape [target_batch].\n atten_idx: If not None, then apply a different attention projection for\n different samples in a batch, each of which may come from different\n tasks. This is usually used in multi-task setting. A tensor of shape\n [target_batch].\n Note: concated_source_vecs are the vectors that are used to compute the\n attention score between the query_vec and each concated_source_vec. The\n concated_source_contexts are the vectors that compose the result. The\n attention context vector is computed as a weighted average of the\n concated_source_contexts, using the scores that were computed using\n concated_source_vecs.\n\n Returns:\n A tuple of 3 elements.\n\n - The attention context vector: [batch_size, context_dim]\n - The attention probability vector: [batch_size, time]\n - The new attention mechanism state: A nested tuple of tensors with\n dimensions [target_batch, ...]. See input 'attention_state' for\n description of items in the nested tuple.\n \"\"\"\n p = self.params\n fns = self.fns\n source_padding = packed_src.source_padding\n source_seq_len = py_utils.GetShape(source_padding)[0]\n num_heads = p.num_attention_heads\n batch_size = py_utils.GetShape(query_vec)[0]\n static_inner_atten_dim = symbolic.ToStatic(p.hidden_dim // num_heads)\n query_vec_projected_shape = [batch_size * num_heads, static_inner_atten_dim]\n\n if p.enable_query_proj:\n query_vec, w_query_proj = self.ToAqtInputs(\n 'query_proj',\n act=query_vec,\n weight=theta.query_proj,\n w_feature_axis=-1)\n w_query_proj = self.QWeight(w_query_proj)\n query_vec_projected = fns.qbatchmatmul(\n query_vec, w_query_proj, qout_name='query_proj_matmul')\n query_vec_projected = self.FromAqtMatmul('query_proj',\n query_vec_projected)\n if p.use_bias:\n query_vec_projected = fns.qadd(\n query_vec_projected,\n self.QWeight(theta.query_proj_b),\n qout_name='query_proj_add')\n query_vec_projected = tf.reshape(query_vec_projected,\n query_vec_projected_shape)\n query_vec_projected = self.ProcessProjectionVec(theta,\n query_vec_projected,\n 'query')\n else:\n query_vec_projected = tf.reshape(query_vec, query_vec_projected_shape)\n if p.activation_split_dims_mapping:\n query_vec_projected = gshard_utils.MeshSplit(\n query_vec_projected, p.device_mesh,\n p.activation_split_dims_mapping[1:])\n\n query_batch_size = py_utils.GetShape(query_vec)[0]\n if query_segment_id is None:\n query_segment_id = tf.zeros(\n query_batch_size * num_heads, dtype=source_padding.dtype)\n else:\n query_segment_id_repl = tf.tile(\n tf.expand_dims(query_segment_id, 1), [1, num_heads])\n query_segment_id = tf.reshape(query_segment_id_repl, [-1])\n\n if per_step_source_padding is None:\n zero = tf.constant(0.0, dtype=query_vec.dtype)\n per_step_source_padding = tf.fill([query_batch_size, source_seq_len],\n zero)\n per_step_source_padding = py_utils.HasShape(\n per_step_source_padding, [query_batch_size, source_seq_len])\n per_step_source_padding = tf.reshape(\n tf.tile(per_step_source_padding, [1, num_heads]), [-1, source_seq_len])\n attention_state = _RecursiveReshape(attention_state,\n [batch_size * num_heads, -1])\n if isinstance(attention_state, py_utils.NestedMap):\n if 'emit_probs' in attention_state:\n inner_state = attention_state\n elif 'inner' in attention_state:\n inner_state = attention_state.inner\n else:\n inner_state = attention_state\n ctx_vec, prob, new_inner_state = self.atten.ComputeContextVectorWithSource(\n theta.atten, packed_src, query_vec_projected, inner_state,\n per_step_source_padding, query_segment_id)\n ctx_vec = tf.reshape(ctx_vec, [batch_size, -1])\n if p.activation_split_dims_mapping:\n ctx_vec = gshard_utils.MeshSplit(ctx_vec, p.device_mesh,\n p.activation_split_dims_mapping[1:])\n if p.enable_ctx_post_proj:\n if atten_idx is None:\n assert p.num_post_proj == 1, (\n 'atten_idx is None, this means there is no need to select '\n 'different post projections, and p.num_post_proj is supposed to be '\n '1. However you set p.num_post_proj=%s .' % p.num_post_proj)\n ctx_vec, w_ctx_post_proj = self.ToAqtInputs(\n 'ctx_post_proj',\n act=ctx_vec,\n weight=theta.ctx_post_proj,\n w_feature_axis=-1)\n w_ctx_post_proj = self.QWeight(w_ctx_post_proj)\n ctx_vec = fns.qbatchmatmul(\n ctx_vec, w_ctx_post_proj, qout_name='ctx_post_proj_matmul')\n ctx_vec = self.FromAqtMatmul('ctx_post_proj', ctx_vec)\n if p.use_bias:\n ctx_vec = fns.qadd(\n ctx_vec,\n self.QWeight(theta.ctx_post_proj_b),\n qout_name='ctx_post_proj_add')\n else:\n assert p.num_post_proj > 1, (\n 'atten_idx is not None, this means there are multiple post '\n 'projections, and p.num_post_proj is supposed to be > 1. However '\n 'you set p.num_post_proj=%s .' % p.num_post_proj)\n bs_range = [tf.range(batch_size)]\n select = tf.transpose(tf.concat([bs_range, [atten_idx]], axis=0))\n # => [batch, dim, num_langs]\n ctx_vec = tf.einsum('ab,bcd->acd', ctx_vec, theta.ctx_post_proj)\n if p.use_bias:\n ctx_vec += tf.expand_dims(theta.ctx_post_proj_b, 0)\n # => [batch, num_langs, dim]\n ctx_vec = tf.transpose(ctx_vec, [0, 2, 1])\n # => [batch, dim]\n ctx_vec = tf.gather_nd(ctx_vec, select)\n ctx_vec = self.ProcessProjectionVec(theta, ctx_vec, 'ctx_post')\n\n # explicitly name this tensor for potential future reference\n multi_headed_atten_prob = tf.reshape(\n prob, [batch_size, num_heads, -1], name='multi_headed_atten_prob')\n prob = self.QRAct(\n tf.reduce_mean(multi_headed_atten_prob, 1),\n quant_utils.QDistribution.SOFTMAX,\n domain='softmax')\n if isinstance(attention_state, py_utils.NestedMap):\n att_state = attention_state\n if 'emit_probs' in attention_state:\n att_state = new_inner_state\n elif 'inner' in attention_state:\n att_state.inner = new_inner_state\n else:\n att_state = new_inner_state\n if p.attention_head_prob_index >= 0:\n selected_prob_head = multi_headed_atten_prob[:, p.\n attention_head_prob_index, :]\n att_state.selected_attention_head_probs = selected_prob_head\n att_state = _RecursiveReshape(att_state, [batch_size, -1])\n return ctx_vec, prob, att_state\n\n @py_utils.NameScopeDecorator(\n 'MultiHeadedAttention/ComputeContextVectorWithAttenProbs')\n def ComputeContextVectorWithAttenProbs(self, theta, packed_context,\n atten_probs):\n \"\"\"Computes the context vector given the attention probailities.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n packed_context: Concated source contexts with shape [ batch_size *\n num_heads, time, context_dim // num_heads].\n atten_probs: The attention probability vector: [batch_size * num_heads,\n time].\n\n Returns:\n The attention context vector shaped [target_batch, source_dim].\n If p.enable_ctx_post_proj is false, source_dim = context_dim,\n otherwise, source_dim = p.ctx_post_proj_dim.\n \"\"\"\n p = self.params\n num_heads = p.num_attention_heads\n # packed_context: [batch_size * num_head, num_style,\n # hidden_dim / num_head]\n # inp: [batch_size * num_head, num_style]\n packed_context = py_utils.with_dependencies([\n py_utils.assert_shape_match([py_utils.GetShape(packed_context)[0]],\n [py_utils.GetShape(atten_probs)[0]])\n ], packed_context)\n b_size = py_utils.GetShape(packed_context)[0] // num_heads\n ctx_vec = tf.reshape(\n tf.matmul(tf.expand_dims(atten_probs, 1), packed_context), [b_size, -1])\n if p.enable_ctx_post_proj:\n ctx_vec_proj = tf.matmul(ctx_vec, theta.ctx_post_proj)\n ctx_vec_proj += theta.ctx_post_proj_b\n ctx_vec_proj = self.ProcessProjectionVec(theta, ctx_vec_proj, 'ctx_post')\n else:\n ctx_vec_proj = ctx_vec\n return ctx_vec_proj, ctx_vec\n\n def PackCachedSource(self, cached_src):\n p = self.params\n concated_source_vecs = cached_src.source_vecs\n concated_source_contexts = cached_src.source_contexts\n source_padding = cached_src.source_padding\n source_segment_id = cached_src.source_segment_id\n batch_size = py_utils.GetShape(concated_source_vecs)[1]\n src_seq_len = py_utils.GetShape(concated_source_vecs)[0]\n num_heads = p.num_attention_heads\n packed_src = py_utils.NestedMap()\n packed_src.source_vecs = tf.reshape(\n concated_source_vecs, [src_seq_len, batch_size * num_heads, -1])\n # TODO(yonghui): Rewrite the following with just one transpose.\n packed_src.source_contexts = tf.transpose(\n tf.reshape(concated_source_contexts,\n [src_seq_len, batch_size * num_heads, -1]), [1, 0, 2])\n if source_padding is not None:\n packed_src.source_padding = tf.reshape(\n source_padding, [src_seq_len, batch_size * num_heads])\n else:\n packed_src.source_padding = tf.zeros(\n [src_seq_len, batch_size * num_heads], dtype=py_utils.FPropDtype(p))\n if source_segment_id is None:\n packed_src.source_segment_id = tf.zeros(\n [src_seq_len, batch_size * num_heads],\n dtype=packed_src.source_padding.dtype)\n else:\n packed_src.source_segment_id = tf.reshape(\n source_segment_id, [src_seq_len, batch_size * num_heads])\n return packed_src\n\n @py_utils.NameScopeDecorator(\n 'MultiHeadedAttention/ComputeContextVectorWithCachedSource')\n def ComputeContextVectorWithCachedSource(self,\n theta,\n cached_src,\n query_vec,\n attention_state=None,\n per_step_source_padding=None,\n query_segment_id=None):\n \"\"\"Same as the ComputeContextVectorWithSource api above, except values ...\n\n in source_vecs, source_contexts and source_padding are ordered differently.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n cached_src: A `.NestedMap` object returned by ExtendSourcePacked.\n query_vec: a tensor of shape [target_batch, query_dim].\n attention_state: previous attention state. It is not used in\n AdditiveAttention, and is simply passed through.\n per_step_source_padding: Source sequence padding to apply at this step. If\n not None, it should be of shape [target_batch_size, source_length].\n query_segment_id: a tensor of shape [target_batch].\n\n Returns:\n A tuple of 3 tensors:\n\n - The attention context vector: [target_batch, source_dim]\n - The attention probability vector: [target_batch, time]\n - The new attention mechanism state: possibly nested tuple of tensors with\n dimensions [target_batch....]\n \"\"\"\n return self.ComputeContextVectorWithSource(\n theta, self.PackCachedSource(cached_src), query_vec, attention_state,\n per_step_source_padding, query_segment_id)\n\n\nclass LocationSensitiveAttention(BaseAttentionLayer):\n \"\"\"An attention that also takes into account previously attended locations.\n\n See section 2.2 of this paper for a description of this technique:\n http://papers.nips.cc/paper/5847-attention-based-models-for-speech-recognition.pdf\n \"\"\"\n\n @classmethod\n def Params(cls):\n \"\"\"Params for this LocationSensitiveAttention class.\"\"\"\n p = super().Params()\n p.Define('source_dim', 0, 'Number of source nodes.')\n p.Define('location_filter_size', 0,\n 'Location filter size, should be an odd number e.g. 31.')\n p.Define('location_num_filters', 0, 'Number of location filters, e.g. 32.')\n p.Define('query_dim', 0, 'Number of query nodes.')\n p.Define('hidden_dim', 0, 'Number of hidden nodes.')\n p.Define(\n 'same_batch_size', False,\n 'True iff the source and target sequence has the same batch size.')\n p.Define(\n 'location_features', ['PREV_PROBS'],\n 'List signals to run the convolutions on. Possible options are: '\n 'PREV_PROBS, CUMULATIVE_PROBS.')\n\n # Often the attention context output needs to be concated\n # with tensors from another layer. This allows them to share\n # quantization parameters. By convention, all attention layers\n # need to include their context output vectors in this domain.\n p.qdomain.Define('atten_context', None,\n 'Quantization domain for attention context.')\n\n # Fill in reasonable default for params init\n p.params_init = py_utils.WeightInit.GaussianSqrtDim()\n return p\n\n def __init__(self, params):\n \"\"\"Constructs an LocationSensitiveAttention object.\"\"\"\n super().__init__(params)\n p = self.params\n self._is_quantized = p.qdomain.default is not None\n assert not p.packed_input, ('Packed input is not supported yet for '\n 'LocationSensitiveAttention.')\n\n if p.atten_dropout_prob != 0:\n raise NotImplementedError('dropout is not supported')\n\n def AttenLogits(inputs):\n \"\"\"Generates logits.\"\"\"\n fns = self.fns\n\n def CollapseOutDim(x):\n return tf.reshape(x, [-1, tf.shape(x)[-1]])\n\n # => [sl, sb, hd]\n location_feats = tf.transpose(inputs.location_feats, [2, 0, 1])\n location_hidden = fns.qmatmul(\n CollapseOutDim(location_feats),\n inputs.location_var,\n qout_name='logits_mul')\n\n sl = py_utils.GetShape(location_feats)[0]\n tb = py_utils.GetShape(location_feats)[1]\n hd = py_utils.GetShape(inputs.location_var)[1]\n location_hidden = tf.reshape(location_hidden, [sl, tb, hd])\n sb = py_utils.GetShape(inputs.query_vec_reshaped)[2]\n bs_mult = py_utils.GetShape(inputs.query_vec_reshaped)[1]\n location_hidden = tf.reshape(location_hidden, [sl, bs_mult, sb, hd])\n\n # Shape of summed is [sl, tb/sb, sb, hidden_dim].\n summed = fns.qadd(\n inputs.concated_source_vecs,\n inputs.query_vec_reshaped,\n qout_name='logits_add')\n summed = fns.qadd(summed, location_hidden, qout_name='logits_bias')\n summed = fns.qtanh(summed)\n # logits is of shape [sl * tb/sb * sb, 1]. Computes dot product\n # between v with every rows in 'summed'. Then we reshape the\n # result to be of shape [sl, tb/sb, sb].\n logits = fns.qmatmul(\n tf.reshape(summed, [-1, p.hidden_dim]),\n tf.reshape(inputs.hidden_v, [p.hidden_dim, 1]),\n qout_name='logits')\n logits = tf.reshape(logits, py_utils.GetShape(summed)[:3])\n return logits\n\n def AttenLogitsSameBatchSize(inputs):\n \"\"\"Generates logits.\n\n Optimized code path for when the target and the source have the same batch\n size.\n\n Args:\n inputs: a NestedMap containing:\n - concated_source_vecs: Tensor of shape [sl, batch, dim]\n - query_vec_transformed: Tensor of shape [batch, dim]\n - hidden_v: Tensor of shape [dim]\n - location_feats: Tensor of shape [batch, location_feature_dim, sl]\n - location_var: Tensor of shape [location_feature_dim, dim]\n\n Returns:\n logits in the shape [sl, batch_size].\n \"\"\"\n\n def CollapseOutDim(x):\n return tf.reshape(x, [-1, tf.shape(x)[-1]])\n\n fns = self.fns\n # => [sl, sb, hd]\n location_feats = tf.transpose(inputs.location_feats, [2, 0, 1])\n location_hidden = fns.qmatmul(\n CollapseOutDim(location_feats),\n inputs.location_var,\n qout_name='logits_mul')\n sl = tf.shape(location_feats)[0]\n tb = tf.shape(location_feats)[1]\n hd = tf.shape(inputs.location_var)[1]\n location_hidden = tf.reshape(location_hidden, [sl, tb, hd])\n\n # Shape of summed is [sl, sb, hidden_dim].\n summed = fns.qadd(\n inputs.concated_source_vecs,\n tf.expand_dims(inputs.query_vec_transformed, 0),\n qout_name='logits_add')\n\n summed = fns.qadd(summed, location_hidden, qout_name='logits_bias')\n summed = fns.qtanh(summed)\n\n # logits is of shape [sl * sb, 1]. Computes dot product\n # between v with every rows in 'summed'. Then we reshape the\n # result to be of shape [sl, tb].\n logits = fns.qmatmul(\n tf.reshape(summed, [-1, p.hidden_dim]),\n tf.reshape(inputs.hidden_v, [p.hidden_dim, 1]),\n qout_name='logits')\n logits = tf.reshape(logits, py_utils.GetShape(summed)[:2])\n return logits\n\n def Atten(hidden_var, query_var, source_padding, concated_source_vecs,\n concated_source_contexts, query_vec, attention_state,\n location_filter_var, location_var, per_step_source_padding):\n \"\"\"Computes the attention context vector.\"\"\"\n p = self.params\n # attention_state shape [batch, len(p.location_features), slen]\n # it contains previous and accumulated attention probabilites.\n attention_state = py_utils.HasShape(attention_state,\n [-1, len(p.location_features), -1])\n\n fns = self.fns\n location_feats = self._ApplyConv(attention_state, location_filter_var)\n\n # concated_source_vecs is of shape [sl, sb, dims]\n # concated_source_contexts is of shape [sb, sl, context_dim]\n # query_vec is of shape [tb, dims]\n sb = py_utils.GetShape(concated_source_vecs)[1]\n tb = py_utils.GetShape(query_vec)[0]\n multiplier = tb // sb\n # concated_source_vecs is reshaped to [sl, 1, sb, hidden_dims]\n concated_source_vecs = tf.expand_dims(concated_source_vecs, 1)\n query_vec_transformed = fns.qmatmul(\n query_vec, query_var, qout_name='atten_matmul')\n # query_vec is reshaped to [1, tb/sb, sb, hidden_dims].\n query_vec_reshaped = tf.reshape(query_vec_transformed,\n [1, multiplier, sb, p.hidden_dim])\n # logits is of shape [sl, tb/sb, sb]\n logits = _ConditionalCallDefun(\n self._is_quantized, AttenLogits,\n py_utils.NestedMap(\n concated_source_vecs=concated_source_vecs,\n query_vec_reshaped=query_vec_reshaped,\n hidden_v=hidden_var,\n location_feats=location_feats,\n location_var=location_var))\n # Take out the padding states.\n # _source_padding is of shape [sl, sb].\n # reshaped to [sl, 1, sb].\n source_padding = tf.expand_dims(source_padding, 1)\n per_step_source_padding = tf.reshape(\n tf.transpose(per_step_source_padding), [-1, multiplier, sb])\n source_padding = tf.add(source_padding, per_step_source_padding)\n source_padding = self.QRAct(source_padding,\n quant_utils.QDistribution.PADDING)\n\n # Reshape logits to a matrix of shape [tb, sl] and takes the\n # softmax to compute the probabilities.\n logits = tf.transpose(tf.reshape(logits, [-1, tb]))\n source_padding = tf.transpose(tf.reshape(source_padding, [-1, tb]))\n probs = self._PaddedSoftmax(logits, source_padding)\n # Reshape probs to be of shape [tb/sb, sb, sl].\n probs_reshaped = tf.reshape(probs, [multiplier, sb, -1])\n # Transpose probs to be of shape [sb, tb/sb, sl]\n probs_reshaped = tf.transpose(probs_reshaped, [1, 0, 2])\n # [sb, tb/sb, sl] * [sb, sl, context_dim] = [sb, tb/sb, context_dim]\n summed = fns.qbatchmatmul(\n tf.cast(probs_reshaped, concated_source_contexts.dtype),\n concated_source_contexts,\n qout_name='atten_context')\n # summed is of shape [tb/sb, sb, context_dim]\n summed = tf.transpose(summed, [1, 0, 2])\n return tf.reshape(summed, [tb, -1]), probs\n\n def AttenSameBatchSize(hidden_var, query_var, source_padding,\n concated_source_vecs, concated_source_contexts,\n query_vec, attention_state, location_filter_var,\n location_var, per_step_source_padding):\n \"\"\"Computes the attention context vector.\n\n Optimized code path for when source and target have the same batch size.\n \"\"\"\n del per_step_source_padding\n p = self.params\n # attention_state shape [batch, len(p.location_features), slen]\n # it contains previous and accumulated attention probabilites.\n attention_state = py_utils.HasShape(attention_state,\n [-1, len(p.location_features), -1])\n\n fns = self.fns\n location_feats = self._ApplyConv(attention_state, location_filter_var)\n query_vec_transformed = fns.qmatmul(\n query_vec, query_var, qout_name='atten_matmul')\n # logits is of shape [sl, sb]\n logits = _ConditionalCallDefun(\n not self._is_quantized, AttenLogitsSameBatchSize,\n py_utils.NestedMap(\n concated_source_vecs=concated_source_vecs,\n query_vec_transformed=query_vec_transformed,\n hidden_v=hidden_var,\n location_feats=location_feats,\n location_var=location_var))\n # => [sl, tb]\n logits.set_shape(source_padding.shape)\n # Reshape logits to a matrix of shape [tb, sl] and takes the\n # softmax to compute the probabilities.\n logits = tf.transpose(logits)\n source_padding = tf.transpose(source_padding)\n probs = self._PaddedSoftmax(logits, source_padding)\n summed = fns.qbatchmatmul(\n tf.cast(tf.expand_dims(probs, 1), concated_source_contexts.dtype),\n concated_source_contexts,\n qout_name='atten_context')\n return tf.squeeze(summed, 1), probs\n\n if p.same_batch_size:\n self._ctx_vec = AttenSameBatchSize\n else:\n self._ctx_vec = Atten\n\n def EncodeSource(src_w, vecs, ctxs):\n fns = self.fns\n time, batch = py_utils.GetShape(vecs, 2)\n ctxs = py_utils.HasShape(ctxs, [time, batch, -1])\n transformed_vecs = tf.reshape(\n fns.qmatmul(\n tf.reshape(vecs, [-1, p.source_dim]),\n src_w,\n qout_name='encode_matmul'), [time, batch, -1])\n transposed_ctxs = tf.transpose(ctxs, [1, 0, 2])\n return transformed_vecs, transposed_ctxs\n\n self._encode_source = EncodeSource\n\n def _CreateLayerVariables(self):\n super()._CreateLayerVariables()\n p = self.params\n\n pc = py_utils.WeightParams(\n shape=[p.source_dim, p.hidden_dim],\n init=p.params_init,\n dtype=p.dtype,\n collections=['LocationSensitiveAttention_vars'])\n self.CreateVariable('source_var', pc)\n\n pc = py_utils.WeightParams(\n shape=[p.query_dim, p.hidden_dim],\n init=p.params_init,\n dtype=p.dtype,\n collections=['LocationSensitiveAttention_vars'])\n self.CreateVariable('query_var', pc)\n\n pc = py_utils.WeightParams(\n shape=[p.hidden_dim],\n init=p.params_init,\n dtype=p.dtype,\n collections=['LocationSensitiveAttention_vars'])\n self.CreateVariable('hidden_var', pc)\n\n assert p.location_filter_size % 2 == 1\n assert p.location_num_filters > 0\n\n location_filter_shape = [\n p.location_filter_size,\n len(p.location_features), p.location_num_filters\n ]\n # TODO(yonghui): Don't hard code how params are initialized.\n location_filter_pc = py_utils.WeightParams(\n shape=location_filter_shape,\n init=py_utils.WeightInit.Uniform(0.05),\n dtype=p.dtype,\n collections=['LocationSensitiveAttention_vars'])\n self.CreateVariable('location_filter_var', location_filter_pc)\n location_var_shape = [p.location_num_filters, p.hidden_dim]\n location_pc = py_utils.WeightParams(\n shape=location_var_shape,\n init=py_utils.WeightInit.Uniform(0.05),\n dtype=p.dtype,\n collections=['LocationSensitiveAttention_vars'])\n self.CreateVariable('location_var', location_pc)\n\n self.TrackQTensor('atten_conv')\n self.TrackQTensor('atten_context', domain='atten_context')\n self.TrackQTensor(\n 'atten_matmul',\n 'logits_add',\n 'encode_matmul',\n 'logits_mul',\n 'logits_bias',\n domain='fullyconnected')\n\n def AddGlobalVN(self, theta):\n theta = super().AddGlobalVN(theta)\n theta.source_var = self.AddVN(theta.source_var)\n theta.hidden_var = self.AddVN(theta.hidden_var)\n theta.query_var = self.AddVN(theta.query_var)\n theta.location_filter_var = self.AddVN(theta.location_filter_var)\n theta.location_var = self.AddVN(theta.location_var)\n return theta\n\n def _ApplyConv(self, attention_state, location_filter_var):\n \"\"\"Applies the convolution on attention state.\"\"\"\n p = self.params\n fns = self.fns\n attention_state_f32 = attention_state\n location_filter_var_f32 = location_filter_var\n if p.dtype != tf.float32:\n attention_state_f32 = tf.cast(attention_state, tf.float32)\n location_filter_var_f32 = tf.cast(location_filter_var, tf.float32)\n data_format = 'NCW'\n if py_utils.use_xla() in ('', 'cpu'):\n # NCW format is not supported on CPU.\n attention_state_f32 = tf.transpose(attention_state_f32, [0, 2, 1])\n data_format = 'NWC'\n location_feats = fns.qconv1d(\n attention_state_f32,\n location_filter_var_f32,\n 1,\n 'SAME',\n data_format=data_format,\n qout_name='atten_conv')\n if py_utils.use_xla() in ('', 'cpu'):\n location_feats = tf.transpose(location_feats, [0, 2, 1])\n if p.dtype != tf.float32:\n location_feats = tf.cast(location_feats, p.dtype)\n # [sb, hd, sl]\n return location_feats\n\n def PackSource(self,\n theta,\n source_vecs,\n source_contexts,\n source_padding,\n source_segment_id=None):\n with tf.name_scope(self.params.name):\n if source_segment_id is None:\n source_segment_id = tf.zeros_like(source_padding)\n (concated_source_vecs, concated_source_contexts) = (\n self._encode_source(\n self.QWeight(theta.source_var), source_vecs, source_contexts))\n return py_utils.NestedMap(\n # [time, batch_size, hidden_dim].\n source_vecs=concated_source_vecs,\n # [batch_size, time, context_dim].\n # Note the mismatch between `source_vecs` and `source_contexts`. In\n # `source_vecs`, time is the first dim, while it is the second dim in\n # `source_contexts`.\n source_contexts=concated_source_contexts,\n # [time, batch_size].\n source_padding=source_padding,\n # [time, batch_size].\n source_segment_id=source_segment_id)\n\n def ZeroAttentionState(self, source_length, decoder_batch_size):\n p = self.params\n dtype = p.dtype.real_dtype\n num_features = len(p.location_features)\n with tf.name_scope(p.name):\n state = tf.concat([\n tf.ones([decoder_batch_size, num_features, 1], dtype=dtype),\n tf.zeros([decoder_batch_size, num_features, source_length - 1],\n dtype=dtype)\n ], 2)\n\n state = self.QRAct(\n state, quant_utils.QDistribution.SOFTMAX, domain='softmax')\n return state\n\n def ComputeContextVectorWithSource(self,\n theta,\n packed_src,\n query_vec,\n attention_state=None,\n per_step_source_padding=None,\n query_segment_id=None):\n \"\"\"Computes the context vector given the current query output.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n packed_src: A `.NestedMap` object returned by PackSource or\n InitForSourcePacked.\n query_vec: a tensor of shape [batch_size, query_dim].\n attention_state: If `params().location_features == ['PREV_PROBS',\n 'CUMULATIVE_PROBS']`, then `attention_state` is a tensor of shape\n [batch_size, 2, src_len].\n\n - attention_state[:, 0, :] contains previous attention probabilities.\n - attention_state[:, 1, :] contains a sum over previous timesteps of\n attention probabilities.\n\n per_step_source_padding: Source sequence padding to apply at this step. If\n not None, it should be of shape [target_batch_size, source_length].\n query_segment_id: Query segment id with shape [batch_size].\n\n Note: concated_source_vecs are the vectors that are used to compute the\n attention score between the query_vec and each concated_source_vec. The\n concated_source_contexts are the vectors that compose the result. The\n attention context vector is computed as a weighted average of the\n concated_source_contexts, using the scores that were computed using\n concated_source_vecs.\n\n Returns:\n A tuple of 3 elements.\n\n - The attention context vector: [batch_size, context_dim]\n - The attention probability vector: [batch_size, time]\n - The new attention mechanism state: possibly nested tuple of tensors with\n dimensions [target_batch, ...]\n \"\"\"\n del query_segment_id\n p = self.params\n concated_source_vecs = packed_src.source_vecs\n concated_source_contexts = packed_src.source_contexts\n source_padding = packed_src.source_padding\n if p.same_batch_size:\n assert per_step_source_padding is None\n query_batch_size = py_utils.GetShape(query_vec)[0]\n source_length = py_utils.GetShape(source_padding)[0]\n if per_step_source_padding is None:\n zero = tf.constant(0.0, dtype=query_vec.dtype)\n per_step_source_padding = tf.fill([query_batch_size, source_length], zero)\n per_step_source_padding = py_utils.HasShape(\n per_step_source_padding, [query_batch_size, source_length])\n hidden = self.AddVN(theta.hidden_var, per_step=True)\n query = self.AddVN(theta.query_var, per_step=True)\n location_filter = self.AddVN(theta.location_filter_var, per_step=True)\n location = self.AddVN(theta.location_var, per_step=True)\n\n ctx_vec, prob = self._ctx_vec(hidden, query, source_padding,\n concated_source_vecs,\n concated_source_contexts, query_vec,\n attention_state, location_filter, location,\n per_step_source_padding)\n\n new_feats = {'PREV_PROBS': prob}\n if 'CUMULATIVE_PROBS' in p.location_features:\n # Quantization must match the _PaddedSoftmax method.\n cum_prob_index = p.location_features.index('CUMULATIVE_PROBS')\n cum_probs = tf.add(prob, attention_state[:, cum_prob_index, :])\n cum_probs = self.QRAct(\n cum_probs, quant_utils.QDistribution.SOFTMAX, domain='softmax')\n new_feats['CUMULATIVE_PROBS'] = cum_probs\n new_attention_state = tf.stack([new_feats[f] for f in p.location_features],\n axis=1)\n return ctx_vec, prob, new_attention_state\n\n\ndef MergeSourcePaddingWithPerStepSourcePadding(source_padding,\n per_step_source_padding, tb):\n \"\"\"Merges source padding with per-step source padding.\n\n Args:\n source_padding: [sl, sb].\n per_step_source_padding: [tb, sl].\n tb: target batch size.\n\n Returns:\n A tensor of shape [tb, sl].\n \"\"\"\n # source_padding is of shape [sl, sb].\n sl = py_utils.GetShape(source_padding)[0]\n sb = py_utils.GetShape(source_padding)[1]\n\n if per_step_source_padding is None:\n zero = tf.constant(0.0, dtype=source_padding.dtype)\n per_step_source_padding = tf.fill([tb, sl], zero)\n per_step_source_padding = py_utils.HasShape(per_step_source_padding, [tb, sl])\n\n # Transpose and reshape source_padding to [1, sb, sl].\n source_padding = tf.expand_dims(tf.transpose(source_padding), 0)\n # Merge source_padding and per_step_source_padding.\n source_padding = tf.maximum(source_padding,\n tf.reshape(per_step_source_padding, [-1, sb, sl]))\n return tf.reshape(source_padding, [tb, -1])\n\n\nclass MonotonicAttention(BaseAttentionLayer):\n \"\"\"An attention mechanism which enforces monotonic alignments.\n\n This layer implements the monotonic attention mechanism described in\n Online and Linear-Time Attention by Enforcing Mononotonic Alignments\n (https://arxiv.org/abs/1704.00784). It is used in exactly the same way as\n AdditiveAttention, but both the attention distribution and the energy function\n are different.\n\n Rather than using a softmax, this mechanism feeds the attention energy into a\n (hard or soft) sigmoid and treats the output as Bernoulli probabilities\n representing the probability of attending to a given entry in the input\n sequence, processed from left-to-right. Based on this interpretation, the\n resulting distribution over input sequence entries is computed with a dynamic\n program. The intended use is to train with soft sigmoids according to the\n expected output (setting param hard_sigmoid=False), then use hard sigmoids at\n test time to allow for online and linear-time decoding. To encourge the train\n and test-time behavior to be similar, noise can optionally be added to the\n sigmoid activations during training (param pre_sigmoid_noise). For the energy\n function, rather than computing::\n\n E = dot(v, tanh(dot(W, query) + dot(W, encoder_states)))\n\n it computes::\n\n E = dot(g*v/||v||, tanh(dot(W, query) + dot(W, encoder_states) + b)) + r\n\n where g and r are scalars and b is a vector, and ||v|| is the L2 norm of v.\n instead. These modifications address the fact that the sigmoids in the\n monotonic attention mechanism are sensitive to offset and a bit harder to\n train compared to the softmax function. It can be helpful to initialize the\n energy bias scalar r to a negative value (param hidden_bias_init).\n \"\"\"\n\n @classmethod\n def Params(cls):\n \"\"\"Params for this MonotonicAttention class.\"\"\"\n p = super().Params()\n p.Define('source_dim', 0, 'Number of source nodes.')\n p.Define('query_dim', 0, 'Number of query nodes.')\n p.Define('hidden_dim', 0, 'Number of hidden nodes.')\n p.Define('pre_sigmoid_noise', 0, 'Standard deviation of pre-sigmoid noise.')\n p.Define('hidden_bias_init', -1, 'Initial value of hidden bias.')\n p.Define('hard_sigmoid', False, 'Whether to use a hard sigmoid.')\n # Fill in reasonable default for params init\n p.params_init = py_utils.WeightInit.GaussianSqrtDim()\n return p\n\n def __init__(self, params):\n \"\"\"Constructs an MonotonicAttention object.\"\"\"\n super().__init__(params)\n p = self.params\n assert not p.packed_input, ('Packed input not supported for Monotonic '\n 'Attention.')\n if p.atten_dropout_prob != 0:\n raise NotImplementedError('dropout is not supported')\n\n # When running eval, don't add pre-sigmoid noise, and use a hard sigmoid to\n # match behavior of online decoding.\n if self.do_eval:\n p.pre_sigmoid_noise = 0.\n p.hard_sigmoid = True\n\n def EncodeSource(src_w, vecs, ctxs):\n time, batch = py_utils.GetShape(vecs, 2)\n ctxs = py_utils.HasShape(ctxs, [time, batch, -1])\n transformed_vecs = tf.reshape(\n py_utils.Matmul(tf.reshape(vecs, [-1, p.source_dim]), src_w),\n [time, batch, -1])\n transposed_ctxs = tf.transpose(ctxs, [1, 0, 2])\n return transformed_vecs, transposed_ctxs\n\n self._encode_source = EncodeSource\n\n def _CreateLayerVariables(self):\n super()._CreateLayerVariables()\n p = self.params\n\n # source is the weight matrix for the memory/encoder states\n pc = py_utils.WeightParams(\n shape=[p.source_dim, p.hidden_dim],\n init=p.params_init,\n dtype=p.dtype,\n collections=['MonotonicAttention_vars'])\n self.CreateVariable('source_var', pc)\n\n # query is the weight matrix for the query/decoder RNN state\n pc = py_utils.WeightParams(\n shape=[p.query_dim, p.hidden_dim],\n init=p.params_init,\n dtype=p.dtype,\n collections=['MonotonicAttention_vars'])\n self.CreateVariable('query_var', pc)\n\n # hidden is the pre-softmax vector which converts from tanh to scalar\n pc = py_utils.WeightParams(\n shape=[p.hidden_dim],\n init=p.params_init,\n dtype=p.dtype,\n collections=['MonotonicAttention_vars'])\n self.CreateVariable('hidden_var', pc)\n\n # energy_bias is the bias vector which appears inside of tanh\n # Initialize the bias vector to all zeros\n pc = py_utils.WeightParams(\n shape=[p.hidden_dim],\n init=py_utils.WeightInit.Constant(0.0),\n dtype=p.dtype,\n collections=['MonotonicAttention_vars'])\n self.CreateVariable('energy_bias_var', pc)\n\n # hidden_scale is the weight normalization scale for hidden\n # Initialize so that the initial scale is 1/sqrt(hidden_dim)\n pc = py_utils.WeightParams(\n shape=[],\n init=py_utils.WeightInit.Constant(1 / np.sqrt(p.hidden_dim)),\n dtype=p.dtype,\n collections=['MonotonicAttention_vars'])\n self.CreateVariable('hidden_scale_var', pc)\n\n # hidden_bias is the bias scalar applied before the sigmoid\n # Use the hidden_bias_init hyperparam to set the initial value\n pc = py_utils.WeightParams(\n shape=[],\n init=py_utils.WeightInit.Constant(p.hidden_bias_init),\n dtype=p.dtype,\n collections=['MonotonicAttention_vars'])\n self.CreateVariable('hidden_bias_var', pc)\n\n def AddGlobalVN(self, theta):\n theta = super().AddGlobalVN(theta)\n theta.source_var = self.AddVN(theta.source_var)\n theta.hidden_var = self.AddVN(theta.hidden_var)\n theta.query_var = self.AddVN(theta.query_var)\n return theta\n\n def PackSource(self,\n theta,\n source_vecs,\n source_contexts,\n source_padding,\n source_segment_id=None):\n with tf.name_scope(self.params.name):\n if source_segment_id is None:\n source_segment_id = tf.zeros_like(source_padding)\n (concated_source_vecs, concated_source_contexts) = (\n self._encode_source(theta.source_var, source_vecs, source_contexts))\n return py_utils.NestedMap(\n # [time, batch_size, hidden_dim].\n source_vecs=concated_source_vecs,\n # [batch_size, time, context_dim].\n # Note the mismatch between `source_vecs` and `source_contexts`. In\n # `source_vecs`, time is the first dim, while it is the second dim in\n # `source_contexts`.\n source_contexts=concated_source_contexts,\n # [time, batch_size].\n source_padding=source_padding,\n # [time, batch_size].\n source_segment_id=source_segment_id)\n\n def ZeroAttentionState(self, source_length, decoder_batch_size):\n p = self.params\n dtype = p.dtype\n with tf.name_scope(p.name):\n # Set initial previous attention to [1, 0, ... 0] to avoid special-casing\n emit_probs = tf.one_hot(\n tf.zeros((decoder_batch_size,), dtype=tf.int32),\n source_length,\n dtype=dtype)\n return py_utils.NestedMap(emit_probs=emit_probs)\n\n def ComputeProbabilities(self, theta, concated_source_vecs,\n merged_source_padding, query_vec, attention_state):\n \"\"\"Computes probabilities of emissions.\"\"\"\n\n # concated_source_contexts is of shape [sb, sl, context_dim]\n # query_vec is of shape [tb, dims]\n sb = tf.shape(concated_source_vecs)[1]\n tb = tf.shape(query_vec)[0]\n multiplier = tb // sb\n\n p = self.params\n\n def AttenLogits(inputs):\n \"\"\"Computes logits from source, query, and variables.\n\n Args:\n inputs: a NestedMap containing:\n - concated_source_vecs: [sl, sb, hidden_dims].\n - query_vec: [tb, query_dim].\n - query_v: [query_dim, hidden_dim]\n - energy_b: [hidden_dim].\n - hidden_v: [hidden_dim].\n - hidden_g: [].\n - hidden_b: [].\n\n Returns:\n logits shaped [tb, sl].\n \"\"\"\n # Apply query matrix to query. Becomes [tb, hidden_dim].\n query_vec_transformed = py_utils.Matmul(\n inputs.query_vec, inputs.query_v, name='query_transformation')\n # query_vec is reshaped to [1, tb/sb, sb, hidden_dim].\n query_vec_reshaped = tf.reshape(query_vec_transformed,\n [1, multiplier, sb, p.hidden_dim])\n\n # [sl, 1, sb, hidden_dim].\n concated_source_vecs = tf.expand_dims(inputs.concated_source_vecs, 1)\n energy_b = tf.reshape(inputs.energy_b, [1, 1, 1, -1])\n # Shape of summed is [sl, tb/sb, sb, hidden_dim].\n summed = tf.tanh(concated_source_vecs + query_vec_reshaped + energy_b)\n hidden_v = inputs.hidden_g * tf.nn.l2_normalize(inputs.hidden_v, axis=0)\n # logits is of shape [sl * tb/sb * sb, 1]. Computes dot product\n # between v with every rows in 'summed'. Then we reshape the\n # result to be of shape [sl, tb/sb, sb].\n #\n # Another equivalent way is to do:\n # logits = tf.reduce_sum(summed *\n # tf.reshape(v, [1, 1, 1, hidden_dim]), 3)\n logits = py_utils.Matmul(\n tf.reshape(summed, [-1, p.hidden_dim]),\n tf.reshape(hidden_v, [p.hidden_dim, 1]))\n logits += inputs.hidden_b\n # [tb, sl].\n logits = tf.transpose(tf.reshape(logits, [-1, tb]), [1, 0])\n return logits\n\n with tf.name_scope('logits'):\n logits = py_utils.CallDefun(\n AttenLogits,\n py_utils.NestedMap(\n concated_source_vecs=concated_source_vecs,\n query_vec=query_vec,\n query_v=theta.query_var,\n energy_b=theta.energy_bias_var,\n hidden_v=theta.hidden_var,\n hidden_g=theta.hidden_scale_var,\n hidden_b=theta.hidden_bias_var))\n\n previous_attention = attention_state.emit_probs\n with tf.name_scope('prob'):\n if self.params.hard_sigmoid:\n # If using a hard sigmoid, just compare against 0\n p_choose_i = tf.cast(tf.greater(logits, 0), logits.dtype)\n # Never choose padded values.\n p_choose_i = tf.where(merged_source_padding > 0.0,\n tf.zeros_like(p_choose_i), p_choose_i)\n # Compute probability distribution assuming hard probabilities\n probs = MonotonicAttentionProb(p_choose_i, previous_attention, 'hard')\n else:\n # Compute pre-sigmoid noise.\n activation_noise = tf.random.stateless_normal(\n py_utils.GetShape(logits),\n py_utils.GenerateStepSeedPair(p),\n dtype=logits.dtype)\n # Compute sigmoid probabilities.\n p_choose_i = tf.nn.sigmoid(logits + self.params.pre_sigmoid_noise *\n activation_noise)\n # Never choose padded values.\n p_choose_i = tf.where(merged_source_padding > 0,\n tf.zeros_like(p_choose_i), p_choose_i)\n # Compute attention distribution\n probs = MonotonicAttentionProb(p_choose_i, previous_attention,\n 'parallel')\n\n # [tb, sl].\n return probs, py_utils.NestedMap(emit_probs=probs)\n\n def ComputeContextVectorWithSource(self,\n theta,\n packed_src,\n query_vec,\n attention_state,\n per_step_source_padding=None,\n query_segment_id=None):\n \"\"\"Computes the context vector given the current query output.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n packed_src: A `.NestedMap` object returned by PackSource or\n InitForSourcePacked.\n query_vec: a tensor of shape [batch_size, query_dim].\n attention_state: The attention probs computed at the previous timestep.\n per_step_source_padding: Source sequence padding to apply at this step. If\n not None, it should be of shape [target_batch_size, source_length].\n query_segment_id: a tensor of shape [batch_size].\n Note: concated_source_vecs are the vectors that are used to compute the\n attention score between the query_vec and each concated_source_vec. The\n concated_source_contexts are the vectors that compose the result. The\n attention context vector is computed as a weighted average of the\n concated_source_contexts, using the scores that were computed using\n concated_source_vecs.\n\n Returns:\n A tuple of 3 elements.\n\n - The attention context vector: [batch_size, context_dim]\n - The attention probability vector: [batch_size, time]\n - The attention probability vector: (again, to be interpreted as state).\n \"\"\"\n del query_segment_id\n concated_source_vecs = packed_src.source_vecs\n concated_source_contexts = packed_src.source_contexts\n source_padding = packed_src.source_padding\n sb = tf.shape(concated_source_vecs)[1]\n tb = tf.shape(query_vec)[0]\n multiplier = tb // sb\n merged_source_padding = MergeSourcePaddingWithPerStepSourcePadding(\n source_padding, per_step_source_padding, tb)\n\n probs, new_state = self.ComputeProbabilities(theta, concated_source_vecs,\n merged_source_padding,\n query_vec, attention_state)\n\n with tf.name_scope('sum'):\n # Reshape probs to be of shape [tb/sb, sb, sl]\n probs_reshaped = tf.reshape(probs, [multiplier, sb, -1])\n # Transpose probs to be of shape [sb, tb/sb, sl]\n probs_reshaped = tf.transpose(probs_reshaped, [1, 0, 2])\n # Batched matmul\n # [sb, tb/sb, sl] * [sb, sl, context_dim] = [sb, tb/sb, context_dim]\n summed = tf.matmul(probs_reshaped, concated_source_contexts)\n # summed is of shape [tb/sb, sb, context_dim]\n summed = tf.transpose(summed, [1, 0, 2])\n ctx_vec = tf.reshape(summed, [tb, -1])\n\n return ctx_vec, probs, new_state\n\n\nclass GmmMonotonicAttention(BaseAttentionLayer):\n \"\"\"A GMM-based monotonic attention module.\n\n Based on \"Generating Sequences With Recurrent Neural Networks\" by Alex Graves.\n Eq [46-51] in https://arxiv.org/abs/1308.0850.\n \"\"\"\n\n @classmethod\n def Params(cls):\n \"\"\"Params for this MonotonicAttention class.\"\"\"\n p = super().Params()\n p.Define('source_dim', 0, 'Number of source nodes.')\n p.Define('query_dim', 0, 'Number of query nodes.')\n p.Define('hidden_dim', 128,\n 'Number of hidden units for the MLP that predicts GMM params.')\n p.Define('max_offset', -1,\n 'Max offset to move attention pointer, Enabled only when > 0.')\n p.Define('num_mixtures', 5, 'Number of location GMM components.')\n p.Define(\n 'normalize_probs', False,\n 'Whether to normalize probabilities computed by GMM. Otherwise, '\n 'the attention weights (i.e. probabilities) may not add up to '\n '1.0.')\n\n # TODO(ngyuzh): find a good initialize for both TTS and ASR. Consider split\n # the layer if it's very sensitive to the initialization\n p.params_init = py_utils.WeightInit.Xavier(0.1)\n return p\n\n def __init__(self, params):\n \"\"\"Constructs a GMM-based monotonic attention module.\"\"\"\n super().__init__(params)\n p = self.params\n if p.atten_dropout_prob != 0:\n raise NotImplementedError('dropout is not supported.')\n\n # TODO(ngyuzh): Compare Sigmoid and other activation functions.\n ff_params = layers.FeedForwardNet.Params().Set(\n name=p.name,\n input_dim=p.query_dim,\n hidden_layer_dims=[p.hidden_dim, p.num_mixtures * 3],\n activation=['SIGMOID', 'NONE'],\n params_init=p.params_init.Copy())\n self.CreateChild('GMM', ff_params)\n\n def ComputeProbs(encoder_positions, priors, means, variances):\n \"\"\"Computes the location GMM probabilities at all encoder positions.\n\n This function assumes that the first 2 dimensions of `priors`, `means`,\n `variances`, and the return value:\n `multiplier (target_batch / source_batch)` and `source_batch` are\n transposed, and `encoder_positions` has only non-one dimensions.\n\n Args:\n encoder_positions: [source_batch, source_length]\n priors: [multiplier, source_batch, num_mixtures]\n means: [multiplier, source_batch, num_mixtures]\n variances: [multiplier, source_batch, num_mixtures]\n\n Returns:\n Probabilities shaped [multiplier, source_batch, source_length].\n \"\"\"\n # [multiplier, source_batch, 1, num_mixtures]\n priors = tf.expand_dims(priors, 2)\n means = tf.expand_dims(means, 2)\n variances = tf.expand_dims(variances, 2)\n epsilon = 1e-8\n\n # [source_batch, source_length, 1]\n encoder_positions = tf.expand_dims(encoder_positions, 2)\n\n # [multiplier, source_batch, source_length, num_mixtures]\n probs = ((priors * tf.math.rsqrt(2 * np.pi * variances + epsilon)) *\n tf.exp(-(encoder_positions - means)**2 /\n (2 * variances + epsilon)))\n\n # [multiplier, source_batch, source_length]\n return tf.reduce_sum(probs, axis=3)\n\n def Atten(source_padding, concated_source_vecs, concated_source_contexts,\n query_vec, priors, means, variances, encoder_positions,\n per_step_source_padding):\n \"\"\"Computes the attention context vector.\n\n Args:\n source_padding: [source_length, source_batch]\n concated_source_vecs: [source_length, source_batch, hidden_dim]\n concated_source_contexts: [source_batch, source_length, context_dim]\n query_vec: [target_batch, query_dim]\n priors: [target_batch, num_mixtures]\n means: [target_batch, num_mixtures]\n variances: [target_batch, num_mixtures]\n encoder_positions: [source_batch, source_length]\n per_step_source_padding: [target_batch, source_length]\n\n Returns:\n Tuple(context vector, atten probs):\n\n - context vector: [target_batch, context_dim]\n - attention probabilities: [target_batch, source_length]\n \"\"\"\n # Note: shape [target_batch] can be converted to\n # [multiplier, source_batch], not [source_batch, multiplier].\n p = self.params\n source_batch = tf.shape(concated_source_vecs)[1]\n target_batch = tf.shape(query_vec)[0]\n multiplier = target_batch // source_batch\n\n # [multiplier, source_batch, num_mixtures]\n priors = tf.reshape(priors, [multiplier, source_batch, p.num_mixtures])\n means = tf.reshape(means, [multiplier, source_batch, p.num_mixtures])\n variances = tf.reshape(variances,\n [multiplier, source_batch, p.num_mixtures])\n\n # [multiplier, source_batch, source_length]\n probs = ComputeProbs(encoder_positions, priors, means, variances)\n\n # [source_batch, source_length]\n source_padding = tf.transpose(source_padding)\n\n # [multiplier, source_batch, source_length]\n per_step_source_padding = tf.reshape(per_step_source_padding,\n [multiplier, source_batch, -1])\n source_padding += per_step_source_padding\n source_padding = tf.minimum(source_padding, 1.0)\n\n # [multiplier, source_batch, source_length]\n probs *= (1.0 - source_padding)\n if p.normalize_probs:\n probs /= tf.maximum(tf.reduce_sum(probs, axis=2, keepdims=True), 1e-12)\n\n # [source_batch, multiplier, source_length]\n probs_transposed = tf.transpose(probs, [1, 0, 2])\n\n # Matmul:\n # [source_batch, multiplier, source_length]\n # @ [source_batch, source_length, context_dim]\n # -> [source_batch, multiplier, context_dim]\n context_vector_transposed = tf.matmul(probs_transposed,\n concated_source_contexts)\n\n # [multiplier, source_batch, context_dim]\n context_vector = tf.transpose(context_vector_transposed, [1, 0, 2])\n\n # [target_batch, context_dim], [target_batch, source_length]\n return (tf.reshape(context_vector, [target_batch, -1]),\n tf.reshape(probs, [target_batch, -1]))\n\n self._ctx_vec = Atten\n\n def EncodeSource(vecs, ctxs):\n # TODO(ngyuzh): combine with content-base attention.\n time, batch = py_utils.GetShape(vecs, 2)\n ctxs = py_utils.HasShape(ctxs, [time, batch, -1])\n transposed_ctxs = tf.transpose(ctxs, [1, 0, 2])\n return vecs, transposed_ctxs\n\n self._encode_source = EncodeSource\n\n def PackSource(self,\n theta,\n source_vecs,\n source_contexts,\n source_padding,\n source_segment_id=None):\n with tf.name_scope(self.params.name):\n if source_segment_id is None:\n source_segment_id = tf.zeros_like(source_padding)\n (concated_source_vecs, concated_source_contexts) = (\n self._encode_source(source_vecs, source_contexts))\n return py_utils.NestedMap(\n # [source_length, source_batch, hidden_dim].\n source_vecs=concated_source_vecs,\n # [source_batch, source_length, context_dim].\n # Note the mismatch between `source_vecs` and `source_contexts`. In\n # `source_vecs`, `source_length` is the first dim, while it is the\n # second dim in `source_contexts`.\n source_contexts=concated_source_contexts,\n # [source_length, source_batch].\n source_padding=source_padding,\n # [source_length, source_batch].\n source_segment_id=source_segment_id)\n\n def ZeroAttentionState(self, source_length, decoder_batch_size):\n p = self.params\n\n # [target_batch, num_mixtures]\n position = tf.zeros([decoder_batch_size, p.num_mixtures], dtype=p.dtype)\n position_offsets = tf.zeros([decoder_batch_size, p.num_mixtures],\n dtype=p.dtype)\n variances = tf.ones([decoder_batch_size, p.num_mixtures], dtype=p.dtype)\n priors = tf.zeros([decoder_batch_size, p.num_mixtures], dtype=p.dtype)\n\n # [target_batch, num_mixtures, 4]\n return tf.stack([position, position_offsets, variances, priors], axis=2)\n\n def ComputeContextVectorWithSource(self,\n theta,\n packed_src,\n query_vec,\n attention_state,\n per_step_source_padding=None,\n query_segment_id=None):\n \"\"\"Computes the context vector given the current query output.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n packed_src: A `.NestedMap` object returned by PackSource or\n InitForSourcePacked.\n query_vec: a tensor of shape [target_batch, query_dim].\n attention_state: previous attention state, a tensor of shape\n [target_batch, num_mixtures, 4].\n - attention_state[:, :, 0] contains previous location\n - attention_state[:, :, 1] contains previous offset.\n - attention_state[:, :, 2] contains previous variance.\n - attention_state[:, :, 3] contains previous prior.\n per_step_source_padding: Source sequence padding to apply at this step. If\n not None, it should be of shape [target_batch, source_length].\n query_segment_id: a tensor of shape [target_batch].\n Note: concated_source_vecs are the vectors that are used to compute the\n attention score between the query_vec and each concated_source_vec. The\n concated_source_contexts are the vectors that compose the result. The\n attention context vector is computed as a weighted average of the\n concated_source_contexts, using the scores that were computed using\n concated_source_vecs.\n\n Returns:\n A tuple of 3 elements.\n\n - The attention context vector: [target_batch, context_dim]\n - The attention probability vector: [target_batch, source_length]\n - The new attention state vector: [target_batch, num_mixtures, 4]\n \"\"\"\n del query_segment_id\n p = self.params\n concated_source_vecs = packed_src.source_vecs\n concated_source_contexts = packed_src.source_contexts\n source_padding = packed_src.source_padding\n\n target_batch = tf.shape(query_vec)[0]\n source_length = tf.shape(source_padding)[0]\n source_batch = tf.shape(source_padding)[1]\n\n # [target_batch, source_length]\n if per_step_source_padding is None:\n per_step_source_padding = tf.zeros([target_batch, source_length],\n dtype=query_vec.dtype)\n per_step_source_padding = py_utils.HasShape(per_step_source_padding,\n [target_batch, source_length])\n\n # [target_batch, num_mixtures * 3]\n out = self.GMM.FProp(theta.GMM, query_vec)\n\n # [target_batch, num_mixtures]\n priors_logits, position_offset_logits, log_variances = tf.split(\n out, 3, axis=1, name='GMM')\n\n log_variances = tf.minimum(log_variances, layers.LOG_SCALE_CLAMP_BOUND)\n variances = tf.exp(log_variances)\n\n priors = tf.nn.softmax(priors_logits)\n\n if p.max_offset > 0:\n position_offset = tf.nn.sigmoid(position_offset_logits)\n position_offset *= p.max_offset\n else:\n position_offset = tf.exp(position_offset_logits)\n\n new_position = attention_state[:, :, 0] + position_offset\n\n # Tile and reshape encoder_positions to [source_batch, source_length]\n # so that it can be evaluated by locations GMMs in a vectorized way.\n encoder_positions = tf.expand_dims(\n tf.cast(tf.range(source_length), tf.float32), 0)\n encoder_positions = tf.tile(encoder_positions, [source_batch, 1])\n\n # [target_batch, context_dim], [target_batch, source_length]\n ctx_vec, prob = self._ctx_vec(source_padding, concated_source_vecs,\n concated_source_contexts, query_vec, priors,\n new_position, variances, encoder_positions,\n per_step_source_padding)\n\n # [target_batch, num_mixtures, 4]\n new_atten_states = tf.stack(\n [new_position, position_offset, variances, priors], axis=2)\n\n return ctx_vec, prob, new_atten_states\n\n\nclass MergerLayer(base_layer.BaseLayer):\n \"\"\"Merges a list of input tensors with various options into a single tensor.\n\n Implements a merger/combiner operator given a list of tensors. The merger\n operator outputs a single tensor with the following options (merger_op):\n\n - atten: Applies attention over the set of input tensors given query vector.\n - mean: Takes the mean of input tensors.\n - concat: Concatenates the input tensors over the last dimension.\n - sum: Sum up all the input tensors.\n - weighted_sum: Use learnt weights to combine input tensors.\n - gated_avg: Learnt input dependent gates are used to average tensors.\n\n This class is expected to be called by multi-source/multi-column models.\n \"\"\"\n\n @classmethod\n def Params(cls):\n \"\"\"Params for this MergerLayer class.\"\"\"\n p = super().Params()\n p.Define('merger_op', None, 'How to merge input tensors.')\n p.Define('source_dim', 0, 'Number of source nodes.')\n p.Define('query_dim', 0, 'Number of query nodes.')\n p.Define('hidden_dim', 0, 'Number of hidden nodes.')\n p.Define('attention_tpl', AdditiveAttention.Params(),\n 'Attention used by the merger layer when merger_op is atten.')\n p.Define(\n 'pre_proj_input_dims', None,\n 'If set, should be a list of depths for the tensors to be merged.'\n ' Setting this will result in a pre-projection to source_dim'\n ' before the merger.')\n p.Define(\n 'pre_proj_output_dims', None,\n 'Should be a list of depths which the input tensors specified in '\n 'pre_proj_input_dims need to be projected to. Should match the length '\n 'of pre_proj_input_dims.')\n p.Define(\n 'proj_tpl',\n layers.ProjectionLayer.Params().Set(\n batch_norm=False, weight_norm=True, has_bias=True),\n 'Configs template for the projection layer.')\n p.Define('gated_avg_tpl', layers.GatedAverageLayer.Params(),\n 'Configs template for the gated average layer.')\n p.Define('num_sources', 0, 'If merger_op=weighted_sum, then must specify '\n 'num of sources.')\n p.Define('post_proj', None,\n 'Post projection for the merged context vector.')\n return p\n\n # Merging operation keys supported by this layer.\n MERGER_OPS = ['mean', 'atten', 'concat', 'sum', 'weighted_sum', 'gated_avg']\n\n def __init__(self, params):\n super().__init__(params)\n p = self.params\n if not p.name:\n raise ValueError('Layer must have a specified name!')\n if p.merger_op not in set(self.MERGER_OPS):\n raise ValueError('Merger op must be one of: ', self.MERGER_OPS)\n\n if p.merger_op == 'atten':\n atten_params = p.attention_tpl.Copy()\n atten_params.source_dim = p.source_dim\n atten_params.query_dim = p.query_dim\n atten_params.hidden_dim = p.hidden_dim\n atten_params.dtype = p.dtype\n if atten_params.params_init is None:\n atten_params.params_init = py_utils.WeightInit.Gaussian(\n 1. / math.sqrt(atten_params.source_dim + atten_params.query_dim),\n seed=p.random_seed)\n self.CreateChild('atten', atten_params)\n\n if p.pre_proj_input_dims:\n if not p.pre_proj_output_dims:\n raise ValueError('Output dims should be specified for projection.')\n if len(p.pre_proj_input_dims) != len(p.pre_proj_output_dims):\n raise ValueError(\n 'Output dims should be the same length as input dims. '\n 'Expected: %s obtained: %s' %\n (len(p.pre_proj_input_dims), len(p.pre_proj_output_dims)))\n pre_proj_params = []\n for i, (pre_proj_input_dim, pre_proj_output_dim) in enumerate(\n zip(p.pre_proj_input_dims, p.pre_proj_output_dims)):\n proj_p = p.proj_tpl.Copy()\n proj_p.name = 'merger_pre_proj_%d' % i\n proj_p.input_dim = pre_proj_input_dim\n proj_p.output_dim = pre_proj_output_dim\n pre_proj_params.append(proj_p)\n self.CreateChildren('pre_proj', pre_proj_params)\n\n if p.merger_op == 'gated_avg':\n assert p.num_sources > 0, ('For merger_op=gated_avg, must specify '\n 'num_sources > 0.')\n params = p.gated_avg_tpl.Copy()\n params.name = 'g_avg_merger'\n params.num_nodes = p.source_dim\n params.num_inputs = p.num_sources\n self.CreateChild('gated_average', params)\n\n if p.post_proj:\n self.CreateChild('post_proj', p.post_proj)\n\n def _CreateLayerVariables(self):\n super()._CreateLayerVariables()\n p = self.params\n\n if p.merger_op == 'weighted_sum':\n assert p.num_sources > 0, ('For merger_op=weighted_sum, must specify '\n 'num_sources > 0.')\n params_init = py_utils.WeightInit.Constant(1.0 / p.num_sources)\n # Weights to be learned.\n pw = py_utils.WeightParams(\n shape=[p.num_sources],\n init=params_init,\n dtype=p.dtype,\n collections=[self.__class__.__name__ + '_vars'])\n self.CreateVariable('sum_weight', pw)\n\n def _child_variable_scope_override(self):\n return {\n **super()._child_variable_scope_override(), 'atten': [],\n 'gated_average': [],\n 'pre_proj': []\n }\n\n def FProp(self, theta, inputs, query_vec=None):\n \"\"\"Combines the list of input tensors into a single tensor.\n\n Args:\n theta: A `.NestedMap` object containing weights' values of this layer and\n its children layers.\n inputs: A list of tensors of shape [..., hidden_dim] or [...,\n [pre_proj_input_dims[i]]] if pre_proj_input_dims is specified.\n query_vec: A tensor of shape [..., hidden_dim].\n\n Returns:\n A tensor of the same shape with input tensors.\n\n Raises:\n ValueError: p.merger_op is not defined.\n \"\"\"\n p = self.params\n n_sources = len(inputs)\n\n if p.pre_proj_input_dims and len(p.pre_proj_input_dims) != n_sources:\n raise ValueError('pre_proj_input_dims must be specified for each input.')\n\n if n_sources == 1:\n return inputs[0]\n\n # Pre-projection operation.\n if p.pre_proj_input_dims:\n for i in range(n_sources):\n inputs[i] = self.pre_proj[i].FProp(theta.pre_proj[i], inputs[i])\n\n tensor_pairs = list(zip(inputs[:-1], inputs[1:]))\n if p.merger_op == 'mean':\n # Simply take the mean, all dims must match.\n with tf.control_dependencies([\n py_utils.assert_shape_match(tf.shape(t1), tf.shape(t2))\n for t1, t2 in tensor_pairs\n ]):\n output = tf.add_n(inputs) / n_sources\n\n elif p.merger_op == 'sum':\n # Sum up all sources, all dims must match.\n with tf.control_dependencies([\n py_utils.assert_shape_match(tf.shape(t1), tf.shape(t2))\n for t1, t2 in tensor_pairs\n ]):\n output = tf.add_n(inputs)\n\n elif p.merger_op == 'weighted_sum':\n # Weighted sum of all sources, all dims must match.\n # For weighted_sum, assume input is a list of rank 3 tensors\n inputs = tf.stack(inputs)\n inputs = py_utils.HasRank(inputs, 4)\n\n with tf.control_dependencies([\n py_utils.assert_shape_match(tf.shape(t1), tf.shape(t2))\n for t1, t2 in tensor_pairs\n ]):\n w = tf.expand_dims(\n tf.expand_dims(tf.expand_dims(theta.sum_weight, 1), 1), 1)\n w = tf.tile(\n w,\n [1,\n tf.shape(inputs)[1],\n tf.shape(inputs)[2],\n tf.shape(inputs)[3]])\n output = tf.reduce_sum(inputs * w, axis=0)\n\n elif p.merger_op == 'atten':\n # Apply attention over the concatenated tensor, all dims must match.\n with tf.control_dependencies([\n py_utils.assert_shape_match(tf.shape(t1), tf.shape(t2))\n for t1, t2 in tensor_pairs\n ]):\n inputs = tf.stack(inputs, axis=0)\n batch_size = tf.shape(inputs)[1]\n paddings = tf.zeros([n_sources, batch_size], dtype=inputs.dtype)\n self.atten.InitForSourcePacked(theta.atten, inputs, inputs, paddings)\n output, _, _ = self.atten.ComputeContextVector(\n theta.atten, tf.reshape(query_vec, [-1, p.query_dim]))\n\n elif p.merger_op == 'concat':\n # Concatenate over the last dim, all dims but last must match.\n with tf.control_dependencies([\n py_utils.assert_equal(tf.shape(t1)[:-1],\n tf.shape(t2)[:-1]) for t1, t2 in tensor_pairs\n ]):\n output = tf.concat(inputs, axis=-1)\n\n elif p.merger_op == 'gated_avg':\n output = self.gated_average.FProp(theta.gated_average, inputs)\n\n else:\n raise ValueError('Unrecognized merge op!')\n\n if p.post_proj:\n output = self.post_proj.FProp(theta.post_proj, output)\n\n return output\n\n\nclass MultiSourceAttention(BaseAttentionLayer):\n \"\"\"Attention with multiple source sub-attentions.\n\n It attends to multiple sources and uses one query as input to generates a\n combined attention context. The dimension of the combined context vector is a\n sum of all source context vectors. Each source attention has its separate\n params and is associated with a source key.\n \"\"\"\n\n @classmethod\n def Params(cls):\n p = super().Params()\n p.Define('source_atten_tpls', None,\n 'A list of (source_key, attention_param) '\n 'pairs.')\n p.Define('source_dim', 0, 'Default source dimension.')\n p.Define(\n 'query_dim', 0, 'Number of query nodes. Child attention params '\n 'must have query_dim less or euqal than 0 or equal to this value.')\n p.Define(\n 'primary_source_key', 'source_0', 'Key for the primary source '\n 'whose attention probabilities will be used as an output.')\n p.Define(\n 'atten_merger_tpl',\n MergerLayer.Params().Set(\n params_init=py_utils.WeightInit.Uniform(0.04), merger_op='sum'),\n 'Params to specify how to merge source attention vectors.')\n return p\n\n def __init__(self, params):\n \"\"\"Constructs an MultiSourceAttention object.\"\"\"\n super().__init__(params)\n p = self.params\n for source_key, atten_p in p.source_atten_tpls:\n child_p = atten_p.Copy()\n if child_p.query_dim <= 0:\n child_p.query_dim = p.query_dim\n else:\n assert child_p.query_dim == p.query_dim\n if child_p.source_dim <= 0:\n child_p.source_dim = p.source_dim\n self.CreateChild('atten_%s' % source_key, child_p)\n\n # Initialize source context vector merging layer.\n merger_p = p.atten_merger_tpl.Copy()\n merger_p.name = 'atten_merger'\n merger_p.source_dim = p.source_dim\n merger_p.query_dim = p.query_dim\n self.CreateChild('atten_merger', merger_p)\n\n def PackSource(self,\n theta,\n source_vecs,\n source_contexts,\n source_padding,\n source_segment_id=None):\n p = self.params\n with tf.name_scope(self.params.name):\n packed_src = py_utils.NestedMap()\n for source_key, _ in p.source_atten_tpls:\n packed_src[source_key] = (\n self.children['atten_%s' % source_key].InitForSourcePacked(\n theta['atten_%s' % source_key], source_vecs[source_key],\n source_contexts[source_key], source_padding[source_key],\n source_segment_id[source_key] if source_segment_id else None))\n return packed_src\n\n def ZeroAttentionState(self, source_seq_length, decoder_batch_size):\n p = self.params\n with tf.name_scope(self.params.name):\n return py_utils.NestedMap({\n source_key: getattr(self, 'atten_%s' % source_key).ZeroAttentionState(\n source_seq_length[source_key], decoder_batch_size)\n for source_key, _ in p.source_atten_tpls\n })\n\n def ComputeContextVectorWithSource(self,\n theta,\n packed_src,\n query_vec,\n attention_state=None,\n per_step_source_padding=None,\n query_segment_id=None):\n p = self.params\n assert per_step_source_padding is None\n with tf.name_scope(self.params.name):\n result_map = py_utils.NestedMap()\n for source_key, _ in p.source_atten_tpls:\n result_map[source_key] = (\n self.children['atten_%s' %\n source_key].ComputeContextVectorWithSource(\n theta.get('atten_%s' % source_key),\n packed_src[source_key], query_vec,\n attention_state[source_key]\n if attention_state else None,\n per_step_source_padding, query_segment_id))\n return self._CombineContext(theta, result_map, query_vec)\n\n def _CombineContext(self, theta, context_map, query_vec):\n ctxs = context_map.Flatten()\n combined_context = (\n self.atten_merger.FProp(theta.atten_merger, [ctx for ctx, _, _ in ctxs],\n query_vec))\n return (\n combined_context,\n # Return atten_probs of the primary source.\n # TODO(huk): Maybe return a NestedMap.\n context_map[self.params.primary_source_key][1],\n py_utils.NestedMap({\n src_key: context_map[src_key][2]\n for src_key, _ in self.params.source_atten_tpls\n }))\n" ]
[ [ "tensorflow.compat.v2.nest.map_structure", "tensorflow.compat.v2.io.gfile.exists", "tensorflow.compat.v2.io.gfile.makedirs" ], [ "tensorflow.compat.v2.nest.map_structure", "tensorflow.compat.v2.function", "tensorflow.compat.v2.errors.OutOfRangeError" ], [ "numpy.zeros", "numpy.random.rand", "numpy.random.randint" ], [ "tensorflow.python.ops.inplace_ops.alias_inplace_update", "numpy.sqrt", "numpy.finfo" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10", "2.7", "1.12", "2.6", "2.2", "1.13", "2.3", "2.4", "2.9", "2.5", "2.8", "2.10" ] } ]
pakesson/scaml
[ "c69d422d6839d75a81426c81fd8d570fa421744b" ]
[ "explain.py" ]
[ "#!/usr/bin/env python\n\nimport sys\nimport math\nimport numpy as np\n\nfrom tensorflow.keras.models import load_model\n\nfrom aes import aes_sbox, aes_sbox_inv\n\nimport matplotlib\nmatplotlib.use('TkAgg')\nimport matplotlib.pyplot as plt\n\ndef get_label(plaintext, key, index):\n return aes_sbox[plaintext[index] ^ key[index]]\n\nnum_classes = 256\nattack_byte = 0\nstart_trace_to_attack = 100\nnumber_of_traces_to_attack = 25\nnumber_of_traces_to_explain = 5\nocclusion_size = 1\n\ndef apply_occlusion(sample, x, occlusion_size=1, occlusion_value=0):\n occluded_sample = np.array(sample, copy=True)\n occluded_sample[x:x+occlusion_size, :] = occlusion_value\n return occluded_sample\n\ndef get_occlusion_sensitivity(samples, model, class_index, occlusion_size=1):\n print(\"Generating occlusion sensitivity maps...\")\n\n confidence_map = np.zeros(math.ceil(samples[0].shape[0] / occlusion_size))\n sensitivity_map = np.zeros(math.ceil(samples[0].shape[0] / occlusion_size))\n\n for idx, sample in enumerate(samples):\n print(f\" Sample {idx}\")\n\n occlusion_value = np.mean(sample)\n\n occlusions = [\n apply_occlusion(sample, x, occlusion_size, occlusion_value)\n for x in range(0, sample.shape[0], occlusion_size)\n ]\n\n predictions = model.predict(np.array(occlusions), batch_size=32)\n target_class_predictions = [\n prediction[class_index[idx]] for prediction in predictions\n ]\n\n for x, confidence in zip(range(sensitivity_map.shape[0]), target_class_predictions):\n confidence_map[x] += confidence\n\n # Mean confidence value\n confidence_map = confidence_map / samples.shape[0]\n sensitivity_map = 1 - confidence_map\n\n # Scale back up\n result = np.zeros(samples[0].shape[0])\n for x in range(result.shape[0]):\n result[x] = sensitivity_map[x // occlusion_size]\n\n return result\n\ndef explain(data, model, class_index, occlusion_size=1):\n # Make sure the data shape is (num_traces, num_points_per_trace, x)\n if len(data.shape) == 2:\n data = data.reshape((1, data.shape[0], data.shape[1]))\n class_index = class_index.reshape((1, class_index.shape[0], class_index.shape[1]))\n elif len(data.shape) != 3:\n raise ValueError(\"unsupported data shape\")\n\n # Generate one map for all samples\n return get_occlusion_sensitivity(data, model, class_index, occlusion_size)\n\nif __name__ == '__main__':\n if len(sys.argv) != 4:\n print(\"Usage:\")\n print(f\" {sys.argv[0]} <model filename> <trace filename> <sensitivity map filename>\")\n exit()\n\n model_filename = sys.argv[1]\n trace_filename = sys.argv[2]\n sensitivity_map_filename = sys.argv[3]\n\n model = load_model(model_filename)\n print(\"Input shape: \" + str(model.input_shape))\n\n traces = np.load(trace_filename)\n\n print(traces.files)\n\n trace_array = traces['trace_array']\n textin_array = traces['textin_array']\n known_keys = traces['known_keys']\n\n trace_array = trace_array.reshape((trace_array.shape[0], trace_array.shape[1], 1))\n\n # Run an initial prediction before we try to explain anything\n result = model.predict(trace_array[start_trace_to_attack:start_trace_to_attack+number_of_traces_to_attack, :, :])\n\n log10_sum_prediction = np.zeros(num_classes)\n for k in range(number_of_traces_to_attack):\n plaintext = textin_array[start_trace_to_attack+k, attack_byte]\n prediction = result[k]\n for l in range(num_classes):\n key_byte_index = (aes_sbox_inv[l] ^ plaintext)\n log10_sum_prediction[key_byte_index] += np.log10(prediction[l] + 1e-22)\n\n print(\"Best key byte guess: \" + str(np.argmax(log10_sum_prediction)))\n print(\"known_keys[0]: \" + str(known_keys[0]))\n\n # Run explainer\n data = trace_array[start_trace_to_attack:start_trace_to_attack+number_of_traces_to_explain, :, :]\n key_index = np.argmax(log10_sum_prediction)\n class_index = aes_sbox[textin_array[start_trace_to_attack:start_trace_to_attack+number_of_traces_to_explain, attack_byte] ^ key_index]\n\n sensitivity_map = explain(data, model, class_index, occlusion_size)\n\n # Save results\n np.savez_compressed(sensitivity_map_filename, sensitivity_map=sensitivity_map)\n\n # Visualize the results\n fig = plt.figure()\n plt.title(f\"Occlusion sensitivity for key byte {attack_byte} in trace {start_trace_to_attack}\")\n ax = fig.gca()\n x = np.linspace(0, sensitivity_map.shape[0]-1, sensitivity_map.shape[0])\n for i in range(0, sensitivity_map.shape[0]-1, occlusion_size):\n color = (sensitivity_map[i]-min(sensitivity_map))/np.ptp(sensitivity_map)\n ax.plot(x[i:i+occlusion_size+1], data[0, i:i+occlusion_size+1, 0], color=plt.cm.plasma(color))\n plt.show()\n" ]
[ [ "tensorflow.keras.models.load_model", "matplotlib.pyplot.cm.plasma", "matplotlib.pyplot.title", "numpy.linspace", "matplotlib.use", "numpy.ptp", "numpy.savez_compressed", "numpy.argmax", "numpy.mean", "numpy.log10", "numpy.load", "numpy.array", "numpy.zeros", "matplotlib.pyplot.show", "matplotlib.pyplot.figure" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [ "1.10", "2.7", "2.2", "2.3", "2.4", "2.5", "2.6" ] } ]
diegomrsantos/Python-Baseball
[ "4543df7a4d74e82106a3e8481553149c447d8ab6" ]
[ "stats/attendance.py" ]
[ "import pandas as pd\nimport matplotlib.pyplot as plt\nfrom data import games\n\ninfo_filter = games['type'] == 'info'\nattendance_filter = games['multi2'] == 'attendance'\nattendance = games.loc[info_filter & attendance_filter, ['year', 'multi3']]\n\nattendance.columns = ['year', 'attendance']\n\nattendance.loc[:, 'attendance'] = pd.to_numeric(attendance.loc[:, 'attendance'])\n\nattendance.plot(x='year', y='attendance', figsize=(15, 7), kind='bar')\nplt.xlabel('Year')\nplt.ylabel('Attendance')\nplt.axhline(y=attendance['attendance'].mean(), label='Mean', linestyle='--', color='green')\nplt.show()\n" ]
[ [ "matplotlib.pyplot.xlabel", "matplotlib.pyplot.show", "pandas.to_numeric", "matplotlib.pyplot.ylabel" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "0.23", "0.21", "2.0", "1.4", "0.19", "1.1", "1.5", "1.2", "0.24", "0.20", "1.0", "0.25", "1.3" ], "scipy": [], "tensorflow": [] } ]
rperi/trustworthy-asv-fairness
[ "15df69a8f3f8ad5262002c9e3d12aa12ea8f1c6f" ]
[ "evaluate/evaluate_FDR.py" ]
[ "import numpy as np\nimport pandas as pd\nimport os\nimport pdb\nfrom scipy.spatial.distance import cosine\nfrom sklearn.metrics import roc_curve, confusion_matrix\nimport sys\nfrom tqdm import tqdm\nfrom sklearn.metrics import auc\nimport argparse\n\nfprs = [0.01,0.02,0.03,0.04,0.05,0.06,0.07,0.08,0.09,0.1,0.2,0.3,0.4,0.5]\ngroups = ['male_male','female_female']\nomegas = [0.0, 0.25, 0.5, 0.75, 1.0]\nemb_map = {}\nxvec_map = {}\n\ndef compute_scores(df_, eer_threshold_overall=0, agnostic_FLAG=False, emb_FLAG=True):\n if emb_FLAG:\n emb_mapping = emb_map\n else:\n emb_mapping = xvec_map\n similarity_scores= []\n labels = []\n for idx, row in tqdm(enumerate(df_.iterrows())):\n enrol = row[1]['audio_1']\n test = row[1]['audio_2']\n label = row[1]['label']\n if not enrol in emb_mapping.keys():\n print(enrol)\n if not test in emb_mapping.keys():\n print(test)\n\n sim = 1 - cosine(emb_mapping[enrol],emb_mapping[test])\n\n similarity_scores.append(sim)\n labels.append(label)\n fpr, tpr, threshold = roc_curve(labels, similarity_scores)\n fnr = 1 - tpr\n eer_threshold = threshold[np.nanargmin(np.absolute((fnr - fpr)))]\n eer1 = fpr[np.nanargmin(np.absolute((fnr - fpr)))]\n eer2 = fnr[np.nanargmin(np.absolute((fnr - fpr)))]\n eer = np.mean((eer1,eer2))\n\n sim = np.array(similarity_scores)\n labels = np.array(labels)\n if not agnostic_FLAG:\n fpr, fnr = compute_fpr_fnr(sim, labels, eer_threshold_overall)\n return sim, labels, eer, fpr, fnr\n else:\n return sim, labels, eer, eer_threshold\n\ndef compute_fpr_fnr(sim,labels_e1, thresh):\n\n preds = np.zeros(labels_e1.shape[0])\n preds[sim > thresh] = 1\n tn, fp, fn, tp = confusion_matrix(labels_e1, preds).ravel()\n fpr = fp/(fp+tn)\n fnr = fn/(fn+tp)\n return fpr, fnr\n\ndef compute_fdr(fprs, fnrs, omega=0.5):\n A = np.absolute(fprs[0]-fprs[1])\n B = np.absolute(fnrs[0]-fnrs[1])\n \n return 1 - (omega*A + (1-omega)*B)\n\ndef compute_auFDR(fpr_ov, tpr_ov, threshold_ov, sim_g0, sim_g1, labels_g0, labels_g1, \n score_dir, emb_FLAG=True, omega=0.5):\n # FDRs at various thersholds\n fdrs = []\n fnrs = []\n for fpr in tqdm(fprs):\n thresh = threshold_ov[np.nanargmin(np.absolute((fpr_ov-fpr)))]\n fnr = 1 - tpr_ov[np.nanargmin(np.absolute((fpr_ov-fpr)))]\n fpr_g0, fnr_g0 = compute_fpr_fnr(sim_g0, labels_g0, thresh)\n fpr_g1, fnr_g1 = compute_fpr_fnr(sim_g1, labels_g1, thresh)\n fdr = compute_fdr((fpr_g0, fpr_g1), (fnr_g0, fnr_g1), float(omega))\n fdrs.append(np.round(fdr*100,2))\n fnrs.append(np.round(fnr*100,2))\n auFDR = auc([x*100 for x in fprs], fdrs)\n auFDR_10 = auc([x*100 for x in fprs[0:10]], fdrs[0:10])\n df = pd.DataFrame(zip(fprs,fdrs, fnrs), columns=['fpr','fdr', 'fnr'])\n if emb_FLAG:\n print(\"Alpha = {} auFDR auFDR_10\".format(omega))\n print(\"Embeddings: {} {}\\n\".format(auFDR, auFDR_10))\n df.to_csv(os.path.join(score_dir, 'fdr_at_fpr_gender_omega_{}.csv'.format(omega)), index=None)\n else:\n print(\"Alpha = {} auFDR auFDR_10\".format(omega))\n print(\"xvectors: {} {}\\n\".format(auFDR, auFDR_10))\n df.to_csv(os.path.join(score_dir, 'fdr_at_fpr_gender_omega_{}.csv'.format(omega)), index=None)\n return auFDR, auFDR_10\n\ndef main(args):\n xvec_FLAG = args.eval_xvector\n\n # Creating necessary trials for gender-specific evaluations \n trial_dir = args.trials_root\n trials = os.path.join(trial_dir, 'Test-Combined.csv')\n df = pd.read_csv(trials)\n df['label'] = pd.to_numeric(df['label'])\n\n df_m = df.loc[df[\"gender_1\"]=='male']\n df_f = df.loc[df[\"gender_1\"]=='female']\n df_m_m = df_m.loc[df_m[\"gender_2\"]=='male']\n df_f_f = df_f.loc[df_f[\"gender_2\"]=='female']\n \n if not os.path.exists(os.path.join(trial_dir,'Test-male-all.csv')):\n df_m.to_csv(os.path.join(trial_dir,'Test-male-all.csv'), index=None)\n if not os.path.exists(os.path.join(trial_dir,'Test-female-all.csv')):\n df_f.to_csv(os.path.join(trial_dir,'Test-female-all.csv'), index=None)\n if not os.path.exists(os.path.join(trial_dir,'Test-male-male.csv')):\n df_m_m.to_csv(os.path.join(trial_dir,'Test-male-male.csv'), index=None)\n if not os.path.exists(os.path.join(trial_dir,'Test-female-female.csv')):\n df_f_f.to_csv(os.path.join(trial_dir,'Test-female-female.csv'), index=None)\n\n # Create directories to save ASV scores\n scores_dir_base = args.scores_root\n scores_dir_xvec = os.path.join(scores_dir_base,'baseline')\n scores_dir = os.path.join(scores_dir_base,'{}'.format(args.mode))\n os.makedirs(scores_dir_xvec, exist_ok=True)\n os.makedirs(scores_dir, exist_ok=True)\n\n # Load extracted embeddings and xvectors\n test_utts = np.load(os.path.join(args.data_root,'test_utts.npy'))\n \n pred_dir = args.pred_root\n e1 = np.load(os.path.join(pred_dir,'emb1.npy'))\n for idx, utt in enumerate(test_utts):\n emb_map[utt] = e1[idx,:]\n if xvec_FLAG:\n xvec = np.load(os.path.join(args.data_root,'test_data.npy'))\n for idx, utt in enumerate(test_utts):\n xvec_map[utt] = xvec[idx,:]\n\n\n # Gender-agnostic scoring\n print(\"Computing Gender-agnostic scores\")\n if os.path.exists(os.path.join(scores_dir_xvec, 'sim_xvec_overall.npy')) and os.path.exists(os.path.join(scores_dir, 'sim_e1_overall.npy')) and os.path.exists(os.path.join(scores_dir_xvec, 'labels_overall.npy')):\n sim_e1_ov = np.load(os.path.join(scores_dir, 'sim_e1_overall.npy'))\n labels_ov = np.load(os.path.join(scores_dir_xvec, 'labels_overall.npy'))\n fpr, tpr, threshold = roc_curve(labels_ov, sim_e1_ov)\n fnr = 1 - tpr\n eer_threshold_e1_ov = threshold[np.nanargmin(np.absolute((fnr - fpr)))]\n eer_e1_ov = fpr[np.nanargmin(np.absolute((fnr - fpr))) ]\n\n if xvec_FLAG:\n sim_xvec_ov = np.load(os.path.join(scores_dir_xvec, 'sim_xvec_overall.npy'))\n fpr, tpr, threshold = roc_curve(labels_ov, sim_xvec_ov)\n fnr = 1 - tpr\n eer_threshold_xvec_ov = threshold[np.nanargmin(np.absolute((fnr - fpr)))]\n eer_xvec_ov = fpr[np.nanargmin(np.absolute((fnr - fpr)))]\n print(\"Done scoring Gender-agnostic trials\")\n else:\n sim_e1_ov, labels_ov, eer_e1_ov, eer_threshold_e1_ov = compute_scores(df, agnostic_FLAG=True)\n np.save(os.path.join(scores_dir, 'sim_e1_overall'), sim_e1_ov)\n np.save(os.path.join(scores_dir_xvec, 'labels_overall'), labels_ov)\n if xvec_FLAG:\n sim_xvec_ov, labels_xvec_ov, eer_xvec_ov, eer_threshold_xvec_ov = compute_scores(df, agnostic_FLAG=True, emb_FLAG=False)\n np.save(os.path.join(scores_dir_xvec, 'sim_xvec_overall'), sim_xvec_ov)\n print(\"Done scoring Gender-agnostic trials\")\n\n #Gender-specific scoring\n print(\"Computing Gender-specific scores\")\n if (not os.path.exists(os.path.join(scores_dir, 'sim_e1_male_male.npy'))) or (not os.path.exists(os.path.join(scores_dir, 'sim_e1_female_female.npy'))):\n sim_e1_m, labels_e1_m, eer_e1_m, fpr_e1_m, fnr_e1_m = compute_scores(df_m_m, eer_threshold_e1_ov)\n sim_e1_f, labels_e1_f, eer_e1_f, fpr_e1_f, fnr_e1_f = compute_scores(df_f_f, eer_threshold_e1_ov)\n np.save(os.path.join(scores_dir, 'sim_e1_male_male'), sim_e1_m)\n np.save(os.path.join(scores_dir, 'sim_e1_female_female'), sim_e1_f)\n np.save(os.path.join(scores_dir_xvec, 'labels_male_male'), labels_e1_m)\n np.save(os.path.join(scores_dir_xvec, 'labels_female_female'), labels_e1_f)\n \n print(\"EER_all EER_Male EER_Female\")\n print(\"Embeddings: {} {} {}\\n\".format(np.round(eer_e1_ov*100,2), np.round(eer_e1_m*100,2), np.round(eer_e1_f*100,2)))\n \n sim_e1_g0 = sim_e1_m\n sim_e1_g1 = sim_e1_f\n labels_g0 = labels_e1_m\n labels_g1 = labels_e1_f\n print(\"Done scoring Gender-specific trials\")\n else:\n sim_e1 = []\n labels = []\n for group in groups:\n sim_e1.append(np.load(os.path.join(scores_dir, 'sim_e1_{}.npy'.format(group))))\n labels.append(np.load(os.path.join(scores_dir_xvec, 'labels_{}.npy'.format(group))))\n sim_e1_g0 = sim_e1[0]\n sim_e1_g1 = sim_e1[1]\n labels_g0 = labels[0]\n labels_g1 = labels[1]\n print(\"Done scoring Gender-specific trials\")\n if xvec_FLAG:\n if (not os.path.exists(os.path.join(scores_dir_xvec, 'sim_xvec_male_male.npy'))) or (not os.path.exists(os.path.join(scores_dir_xvec, 'sim_xvec_female_female.npy'))):\n print(\"Computing Gender-specific scores for x-vectors\")\n sim_xvec_m, labels_xvec_m, eer_xvec_m, fpr_xvec_m, fnr_xvec_m = compute_scores(df_m_m, eer_threshold_xvec_ov, emb_FLAG=False)\n sim_xvec_f, labels_xvec_f, eer_xvec_f, fpr_xvec_f, fnr_xvec_f = compute_scores(df_f_f, eer_threshold_xvec_ov, emb_FLAG=False)\n np.save(os.path.join(scores_dir_xvec, 'sim_xvec_male_male'), sim_xvec_m)\n np.save(os.path.join(scores_dir_xvec, 'sim_xvec_female_female'), sim_xvec_f)\n sim_xvec_g0 = sim_xvec_m\n sim_xvec_g1 = sim_xvec_f\n print(\"x-vector: {} {} {}\\n\".format(np.round(eer_xvec_ov*100,2), np.round(eer_xvec_m*100,2),np.round(eer_xvec_f*100,2)))\n print(\"Done scoring Gender-specific trials for x-vectors\")\n else:\n sim_xvec = []\n for group in groups:\n sim_xvec.append(np.load(os.path.join(scores_dir_xvec, 'sim_xvec_{}.npy'.format(group))))\n sim_xvec_g0 = sim_xvec[0]\n sim_xvec_g1 = sim_xvec[1]\n print(\"Done scoring Gender-specific trials for x-vectors\")\n\n # Compute area under FDR-FPR curve\n fpr_ov, tpr_ov, threshold_ov = roc_curve(labels_ov, sim_e1_ov)\n aus, au10s = [], []\n for omega in omegas:\n au, au10 = compute_auFDR(fpr_ov, tpr_ov, threshold_ov, sim_e1_g0, sim_e1_g1, labels_g0, labels_g1, scores_dir, emb_FLAG=True, omega=omega)\n aus.append(au)\n au10s.append(au10)\n df = pd.DataFrame(zip(omegas,aus, au10s), columns=['omega','au', 'au10'])\n df.to_csv(os.path.join(scores_dir, 'au_fdrs.csv'), index=None)\n\n if xvec_FLAG:\n fpr_ov, tpr_ov, threshold_ov = roc_curve(labels_ov, sim_xvec_ov)\n aus, aus10 = [],[]\n for omega in omegas:\n compute_auFDR(fpr_ov, tpr_ov, threshold_ov, sim_xvec_g0, sim_xvec_g1, labels_g0, labels_g1, scores_dir_xvec, emb_FLAG=False, omega=omega)\n aus.append(au)\n au10s.append(au10)\n df = pd.DataFrame(zip(omegas,aus, au10s), columns=['omega','au', 'au10'])\n df.to_csv(os.path.join(scores_dir_xvec, 'aufdrs.csv'), index=None)\n pdb.set_trace()\nif __name__=='__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('--mode', type=str, required=True)\n parser.add_argument('--trials_root', type=str, required=True,\n help=\"Directory containing Test-Combined.csv\")\n\n parser.add_argument('--data_root', type=str, required=True,\n help=\"Directory containing test_utts.npy\")\n\n parser.add_argument('--pred_root', type=str, required=True,\n help=\"Directory containing Extracted embeddings\")\n\n parser.add_argument('--scores_root', type=str, required=True,\n help=\"Directory to save ASV scores\")\n parser.add_argument('--eval_xvector', default=False, action='store_true')\n args = parser.parse_args()\n main(args)\n\n" ]
[ [ "pandas.to_numeric", "numpy.absolute", "pandas.read_csv", "scipy.spatial.distance.cosine", "sklearn.metrics.confusion_matrix", "sklearn.metrics.roc_curve", "numpy.round", "numpy.mean", "sklearn.metrics.auc", "numpy.array", "numpy.zeros" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [ "2.0", "1.4", "1.3", "1.1", "1.5", "1.2" ], "scipy": [ "0.13", "1.6", "0.14", "1.10", "0.15", "1.4", "0.16", "1.9", "0.19", "1.5", "0.18", "1.2", "1.7", "0.12", "1.0", "0.17", "1.3", "1.8" ], "tensorflow": [] } ]
creare-com/podpac
[ "7feb5c957513c146ce73ba1c36c630284f513a6e", "7feb5c957513c146ce73ba1c36c630284f513a6e", "7feb5c957513c146ce73ba1c36c630284f513a6e", "7feb5c957513c146ce73ba1c36c630284f513a6e", "7feb5c957513c146ce73ba1c36c630284f513a6e" ]
[ "podpac/core/coordinates/test/test_uniform_coordinates1d.py", "podpac/core/test/test_node.py", "podpac/core/interpolation/test/test_interpolation.py", "podpac/core/algorithm/generic.py", "podpac/core/algorithm/test/test_stats.py" ]
[ "from datetime import datetime\nimport json\n\nimport pytest\nimport traitlets as tl\nimport numpy as np\nfrom numpy.testing import assert_equal\n\nimport podpac\nfrom podpac.core.coordinates.utils import make_coord_array\nfrom podpac.core.coordinates.coordinates1d import Coordinates1d\nfrom podpac.core.coordinates.array_coordinates1d import ArrayCoordinates1d\nfrom podpac.core.coordinates.uniform_coordinates1d import UniformCoordinates1d\nfrom podpac.core.coordinates.coordinates import Coordinates\n\n\nclass TestUniformCoordinatesCreation(object):\n def test_numerical(self):\n # ascending\n c = UniformCoordinates1d(0, 50, 10)\n a = np.array([0, 10, 20, 30, 40, 50], dtype=float)\n assert c.start == 0\n assert c.stop == 50\n assert c.step == 10\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, [0, 50])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == 6\n assert c.dtype == float\n assert c.is_monotonic == True\n assert c.is_descending == False\n assert c.is_uniform == True\n\n # descending\n c = UniformCoordinates1d(50, 0, -10)\n a = np.array([50, 40, 30, 20, 10, 0], dtype=float)\n assert c.start == 50\n assert c.stop == 0\n assert c.step == -10\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, [0, 50])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == 6\n assert c.dtype == float\n assert c.is_monotonic == True\n assert c.is_descending == True\n assert c.is_uniform == True\n\n def test_numerical_inexact(self):\n # ascending\n c = UniformCoordinates1d(0, 49, 10)\n a = np.array([0, 10, 20, 30, 40], dtype=float)\n assert c.start == 0\n assert c.stop == 49\n assert c.step == 10\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, [0, 40])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == 5\n assert c.dtype == float\n assert c.is_monotonic == True\n assert c.is_descending == False\n assert c.is_uniform == True\n\n # descending\n c = UniformCoordinates1d(50, 1, -10)\n a = np.array([50, 40, 30, 20, 10], dtype=float)\n assert c.start == 50\n assert c.stop == 1\n assert c.step == -10\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, [10, 50])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.dtype == float\n assert c.size == a.size\n assert c.is_monotonic == True\n assert c.is_descending == True\n assert c.is_uniform == True\n\n def test_datetime(self):\n # ascending\n c = UniformCoordinates1d(\"2018-01-01\", \"2018-01-04\", \"1,D\")\n a = np.array([\"2018-01-01\", \"2018-01-02\", \"2018-01-03\", \"2018-01-04\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2018-01-01\")\n assert c.stop == np.datetime64(\"2018-01-04\")\n assert c.step == np.timedelta64(1, \"D\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[0, -1]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == False\n assert c.is_uniform == True\n\n # descending\n c = UniformCoordinates1d(\"2018-01-04\", \"2018-01-01\", \"-1,D\")\n a = np.array([\"2018-01-04\", \"2018-01-03\", \"2018-01-02\", \"2018-01-01\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2018-01-04\")\n assert c.stop == np.datetime64(\"2018-01-01\")\n assert c.step == np.timedelta64(-1, \"D\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[-1, 0]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == True\n assert c.is_uniform == True\n\n def test_datetime_inexact(self):\n # ascending\n c = UniformCoordinates1d(\"2018-01-01\", \"2018-01-06\", \"2,D\")\n a = np.array([\"2018-01-01\", \"2018-01-03\", \"2018-01-05\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2018-01-01\")\n assert c.stop == np.datetime64(\"2018-01-06\")\n assert c.step == np.timedelta64(2, \"D\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[0, -1]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == False\n assert c.is_uniform == True\n\n # descending\n c = UniformCoordinates1d(\"2018-01-06\", \"2018-01-01\", \"-2,D\")\n a = np.array([\"2018-01-06\", \"2018-01-04\", \"2018-01-02\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2018-01-06\")\n assert c.stop == np.datetime64(\"2018-01-01\")\n assert c.step == np.timedelta64(-2, \"D\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[-1, 0]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == True\n assert c.is_uniform == True\n\n def test_datetime_month_step(self):\n # ascending\n c = UniformCoordinates1d(\"2018-01-01\", \"2018-04-01\", \"1,M\")\n a = np.array([\"2018-01-01\", \"2018-02-01\", \"2018-03-01\", \"2018-04-01\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2018-01-01\")\n assert c.stop == np.datetime64(\"2018-04-01\")\n assert c.step == np.timedelta64(1, \"M\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[0, -1]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == False\n assert c.is_uniform == True\n\n # descending\n c = UniformCoordinates1d(\"2018-04-01\", \"2018-01-01\", \"-1,M\")\n a = np.array([\"2018-04-01\", \"2018-03-01\", \"2018-02-01\", \"2018-01-01\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2018-04-01\")\n assert c.stop == np.datetime64(\"2018-01-01\")\n assert c.step == np.timedelta64(-1, \"M\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[-1, 0]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == True\n assert c.is_uniform == True\n\n def test_datetime_year_step(self):\n # ascending, exact\n c = UniformCoordinates1d(\"2018-01-01\", \"2021-01-01\", \"1,Y\")\n a = np.array([\"2018-01-01\", \"2019-01-01\", \"2020-01-01\", \"2021-01-01\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2018-01-01\")\n assert c.stop == np.datetime64(\"2021-01-01\")\n assert c.step == np.timedelta64(1, \"Y\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[0, -1]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == False\n assert c.is_uniform == True\n\n # descending, exact\n c = UniformCoordinates1d(\"2021-01-01\", \"2018-01-01\", \"-1,Y\")\n a = np.array([\"2021-01-01\", \"2020-01-01\", \"2019-01-01\", \"2018-01-01\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2021-01-01\")\n assert c.stop == np.datetime64(\"2018-01-01\")\n assert c.step == np.timedelta64(-1, \"Y\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[-1, 0]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == True\n assert c.is_uniform == True\n\n # ascending, inexact (two cases)\n c = UniformCoordinates1d(\"2018-01-01\", \"2021-04-01\", \"1,Y\")\n a = np.array([\"2018-01-01\", \"2019-01-01\", \"2020-01-01\", \"2021-01-01\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2018-01-01\")\n assert c.stop == np.datetime64(\"2021-04-01\")\n assert c.step == np.timedelta64(1, \"Y\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[0, -1]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == False\n assert c.is_uniform == True\n\n c = UniformCoordinates1d(\"2018-04-01\", \"2021-01-01\", \"1,Y\")\n a = np.array([\"2018-04-01\", \"2019-04-01\", \"2020-04-01\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2018-04-01\")\n assert c.stop == np.datetime64(\"2021-01-01\")\n assert c.step == np.timedelta64(1, \"Y\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[0, -1]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == False\n assert c.is_uniform == True\n\n # descending, inexact (two cases)\n c = UniformCoordinates1d(\"2021-01-01\", \"2018-04-01\", \"-1,Y\")\n a = np.array([\"2021-01-01\", \"2020-01-01\", \"2019-01-01\", \"2018-01-01\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2021-01-01\")\n assert c.stop == np.datetime64(\"2018-04-01\")\n assert c.step == np.timedelta64(-1, \"Y\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[-1, 0]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == True\n assert c.is_uniform == True\n\n c = UniformCoordinates1d(\"2021-04-01\", \"2018-01-01\", \"-1,Y\")\n a = np.array([\"2021-04-01\", \"2020-04-01\", \"2019-04-01\", \"2018-04-01\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2021-04-01\")\n assert c.stop == np.datetime64(\"2018-01-01\")\n assert c.step == np.timedelta64(-1, \"Y\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[-1, 0]])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == True\n assert c.is_uniform == True\n\n def test_numerical_size(self):\n # ascending\n c = UniformCoordinates1d(0, 10, size=20)\n assert c.start == 0\n assert c.stop == 10\n assert c.step == 10 / 19.0\n assert_equal(c.coordinates, np.linspace(0, 10, 20))\n assert_equal(c.bounds, [0, 10])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == 20\n assert c.dtype == float\n assert c.is_monotonic == True\n assert c.is_descending == False\n assert c.is_uniform == True\n\n # descending\n c = UniformCoordinates1d(10, 0, size=20)\n assert c.start == 10\n assert c.stop == 0\n assert c.step == -10 / 19.0\n assert_equal(c.coordinates, np.linspace(10, 0, 20))\n assert_equal(c.bounds, [0, 10])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == 20\n assert c.dtype == float\n assert c.is_monotonic == True\n assert c.is_descending == True\n assert c.is_uniform == True\n\n def test_datetime_size(self):\n # ascending\n c = UniformCoordinates1d(\"2018-01-01\", \"2018-01-10\", size=10)\n assert c.start == np.datetime64(\"2018-01-01\")\n assert c.stop == np.datetime64(\"2018-01-10\")\n assert_equal(c.bounds, [np.datetime64(\"2018-01-01\"), np.datetime64(\"2018-01-10\")])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == 10\n assert c.dtype == np.datetime64\n assert c.is_descending == False\n\n # descending\n c = UniformCoordinates1d(\"2018-01-10\", \"2018-01-01\", size=10)\n assert c.start == np.datetime64(\"2018-01-10\")\n assert c.stop == np.datetime64(\"2018-01-01\")\n assert_equal(c.bounds, [np.datetime64(\"2018-01-01\"), np.datetime64(\"2018-01-10\")])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == 10\n assert c.dtype == np.datetime64\n assert c.is_descending == True\n\n # increase resolution\n c = UniformCoordinates1d(\"2018-01-01\", \"2018-01-10\", size=21)\n assert c.start == np.datetime64(\"2018-01-01\")\n assert c.stop == np.datetime64(\"2018-01-10\")\n assert_equal(c.bounds, [np.datetime64(\"2018-01-01\"), np.datetime64(\"2018-01-10\")])\n assert c.coordinates[c.argbounds[0]] == c.bounds[0]\n assert c.coordinates[c.argbounds[1]] == c.bounds[1]\n assert c.size == 21\n assert c.dtype == np.datetime64\n assert c.is_descending == False\n\n def test_datetime_size_invalid(self):\n with pytest.raises(ValueError, match=\"Cannot divide timedelta\"):\n c = UniformCoordinates1d(\"2018-01-01\", \"2018-01-10\", size=20)\n\n def test_numerical_size_floating_point_error(self):\n c = UniformCoordinates1d(50.619, 50.62795, size=30)\n assert c.size == 30\n\n def test_numerical_singleton(self):\n # positive step\n c = UniformCoordinates1d(1, 1, 10)\n a = np.array([1], dtype=float)\n assert c.start == 1\n assert c.stop == 1\n assert c.step == 10\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, [1, 1])\n assert c.size == 1\n assert c.dtype == float\n assert c.is_monotonic == True\n assert c.is_descending == None\n assert c.is_uniform == True\n\n # negative step\n c = UniformCoordinates1d(1, 1, -10)\n a = np.array([1], dtype=float)\n assert c.start == 1\n assert c.stop == 1\n assert c.step == -10\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, [1, 1])\n assert c.size == 1\n assert c.dtype == float\n assert c.is_monotonic == True\n assert c.is_descending == None\n assert c.is_uniform == True\n\n def test_datetime_singleton(self):\n # positive step\n c = UniformCoordinates1d(\"2018-01-01\", \"2018-01-01\", \"1,D\")\n a = np.array([\"2018-01-01\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2018-01-01\")\n assert c.stop == np.datetime64(\"2018-01-01\")\n assert c.step == np.timedelta64(1, \"D\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[0, -1]])\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == None\n assert c.is_uniform == True\n\n # negative step\n c = UniformCoordinates1d(\"2018-01-01\", \"2018-01-01\", \"-1,D\")\n a = np.array([\"2018-01-01\"]).astype(np.datetime64)\n assert c.start == np.datetime64(\"2018-01-01\")\n assert c.stop == np.datetime64(\"2018-01-01\")\n assert c.step == np.timedelta64(-1, \"D\")\n assert_equal(c.coordinates, a)\n assert_equal(c.bounds, a[[-1, 0]])\n assert c.size == a.size\n assert c.dtype == np.datetime64\n assert c.is_monotonic == True\n assert c.is_descending == None\n assert c.is_uniform == True\n\n def test_from_tuple(self):\n # numerical, step\n c = UniformCoordinates1d.from_tuple((0, 10, 0.5))\n assert c.start == 0.0\n assert c.stop == 10.0\n assert c.step == 0.5\n\n # numerical, size\n c = UniformCoordinates1d.from_tuple((0, 10, 20))\n assert c.start == 0.0\n assert c.stop == 10.0\n assert c.size == 20\n\n # datetime, step\n c = UniformCoordinates1d.from_tuple((\"2018-01-01\", \"2018-01-04\", \"1,D\"))\n assert c.start == np.datetime64(\"2018-01-01\")\n assert c.stop == np.datetime64(\"2018-01-04\")\n assert c.step == np.timedelta64(1, \"D\")\n\n # invalid\n with pytest.raises(ValueError, match=\"UniformCoordinates1d.from_tuple expects a tuple\"):\n UniformCoordinates1d.from_tuple((0, 10))\n\n with pytest.raises(ValueError, match=\"UniformCoordinates1d.from_tuple expects a tuple\"):\n UniformCoordinates1d.from_tuple(np.array([0, 10, 0.5]))\n\n def test_copy(self):\n c = UniformCoordinates1d(0, 10, 50, name=\"lat\")\n c2 = c.copy()\n assert c is not c2\n assert c == c2\n\n def test_invalid_init(self):\n with pytest.raises(ValueError):\n UniformCoordinates1d(0, 0, 0)\n\n with pytest.raises(ValueError):\n UniformCoordinates1d(0, 50, 0)\n\n with pytest.raises(ValueError):\n UniformCoordinates1d(0, 50, -10)\n\n with pytest.raises(ValueError):\n UniformCoordinates1d(50, 0, 10)\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(0, \"2018-01-01\", 10)\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(\"2018-01-01\", 50, 10)\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(\"2018-01-01\", \"2018-01-02\", 10)\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(0.0, \"2018-01-01\", \"1,D\")\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(\"2018-01-01\", 50, \"1,D\")\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(0, 50, \"1,D\")\n\n with pytest.raises(ValueError):\n UniformCoordinates1d(\"a\", 50, 10)\n\n with pytest.raises(ValueError):\n UniformCoordinates1d(0, \"b\", 10)\n\n with pytest.raises(ValueError):\n UniformCoordinates1d(0, 50, \"a\")\n\n with pytest.raises(TypeError):\n UniformCoordinates1d()\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(0)\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(0, 50)\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(0, 50, 10, size=6)\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(0, 10, size=20.0)\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(0, 10, size=\"string\")\n\n with pytest.raises(TypeError):\n UniformCoordinates1d(\"2018-01-10\", \"2018-01-01\", size=\"1,D\")\n\n\nclass TestUniformCoordinatesEq(object):\n def test_equal(self):\n c1 = UniformCoordinates1d(0, 50, 10)\n c2 = UniformCoordinates1d(0, 50, 10)\n c3 = UniformCoordinates1d(0, 50, 10)\n c4 = UniformCoordinates1d(5, 50, 10)\n c5 = UniformCoordinates1d(0, 60, 10)\n c6 = UniformCoordinates1d(0, 50, 5)\n c7 = UniformCoordinates1d(50, 0, -10)\n\n assert c1 == c2\n assert c1 == c3\n assert c1 != c4\n assert c1 != c5\n assert c1 != c6\n assert c1 != c7\n\n def test_equal_array_coordinates(self):\n c1 = UniformCoordinates1d(0, 50, 10)\n c2 = ArrayCoordinates1d([0, 10, 20, 30, 40, 50])\n c3 = ArrayCoordinates1d([10, 20, 30, 40, 50, 60])\n\n assert c1 == c2\n assert c1 != c3\n\n\nclass TestUniformCoordinatesSerialization(object):\n def test_definition(self):\n # numerical\n c = UniformCoordinates1d(0, 50, 10, name=\"lat\")\n d = c.definition\n assert isinstance(d, dict)\n assert set(d.keys()) == set([\"start\", \"stop\", \"step\", \"name\"])\n json.dumps(d, cls=podpac.core.utils.JSONEncoder) # test serializable\n c2 = UniformCoordinates1d.from_definition(d) # test from_definition\n assert c2 == c\n\n # datetimes\n c = UniformCoordinates1d(\"2018-01-01\", \"2018-01-03\", \"1,D\")\n d = c.definition\n assert isinstance(d, dict)\n assert set(d.keys()) == set([\"start\", \"stop\", \"step\"])\n json.dumps(d, cls=podpac.core.utils.JSONEncoder) # test serializable\n c2 = UniformCoordinates1d.from_definition(d) # test from_definition\n assert c2 == c\n\n def test_invalid_definition(self):\n # incorrect definition\n d = {\"stop\": 50}\n with pytest.raises(ValueError, match='UniformCoordinates1d definition requires \"start\"'):\n UniformCoordinates1d.from_definition(d)\n\n d = {\"start\": 0}\n with pytest.raises(ValueError, match='UniformCoordinates1d definition requires \"stop\"'):\n UniformCoordinates1d.from_definition(d)\n\n def test_from_definition_size(self):\n # numerical\n d = {\"start\": 0, \"stop\": 50, \"size\": 6}\n c = UniformCoordinates1d.from_definition(d)\n assert_equal(c.coordinates, [0, 10, 20, 30, 40, 50])\n\n # datetime, size\n d = {\"start\": \"2018-01-01\", \"stop\": \"2018-01-03\", \"size\": 3}\n c = UniformCoordinates1d.from_definition(d)\n assert_equal(c.coordinates, np.array([\"2018-01-01\", \"2018-01-02\", \"2018-01-03\"]).astype(np.datetime64))\n\n\nclass TestUniformCoordinatesIndexing(object):\n def test_len(self):\n c = UniformCoordinates1d(0, 50, 10)\n assert len(c) == 6\n\n def test_index(self):\n c = UniformCoordinates1d(0, 50, 10, name=\"lat\")\n\n # int\n c2 = c[2]\n assert isinstance(c2, Coordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [20])\n\n c2 = c[-2]\n assert isinstance(c2, Coordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [40])\n\n # slice\n c2 = c[:2]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 0\n assert c2.stop == 10\n assert c2.step == 10\n\n c2 = c[2:]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 20\n assert c2.stop == 50\n assert c2.step == 10\n\n c2 = c[::2]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 0\n assert c2.stop == 50\n assert c2.step == 20\n\n c2 = c[1:-1]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 10\n assert c2.stop == 40\n assert c2.step == 10\n\n c2 = c[-3:5]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 30\n assert c2.stop == 40\n assert c2.step == 10\n\n c2 = c[::-1]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 50\n assert c2.stop == 0\n assert c2.step == -10\n\n # index array\n c2 = c[[0, 1, 3]]\n assert isinstance(c2, ArrayCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [0, 10, 30])\n\n c2 = c[[3, 1, 0]]\n assert isinstance(c2, ArrayCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [30, 10, 0])\n\n c2 = c[[0, 3, 1]]\n assert isinstance(c2, ArrayCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [0, 30, 10])\n\n c2 = c[[]]\n assert isinstance(c2, ArrayCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [])\n\n c2 = c[0:0]\n assert isinstance(c2, ArrayCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [])\n\n c2 = c[[]]\n assert isinstance(c2, ArrayCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [])\n\n # boolean array\n c2 = c[[True, True, True, False, True, False]]\n assert isinstance(c2, ArrayCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [0, 10, 20, 40])\n\n # invalid\n with pytest.raises(IndexError):\n c[0.3]\n\n with pytest.raises(IndexError):\n c[10]\n\n def test_index_descending(self):\n c = UniformCoordinates1d(50, 0, -10, name=\"lat\")\n\n # int\n c2 = c[2]\n assert isinstance(c2, Coordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [30])\n\n c2 = c[-2]\n assert isinstance(c2, Coordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [10])\n\n # slice\n c2 = c[:2]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 50\n assert c2.stop == 40\n assert c2.step == -10\n\n c2 = c[2:]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 30\n assert c2.stop == 0\n assert c2.step == -10\n\n c2 = c[::2]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 50\n assert c2.stop == 0\n assert c2.step == -20\n\n c2 = c[1:-1]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 40\n assert c2.stop == 10\n assert c2.step == -10\n\n c2 = c[-3:5]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 20\n assert c2.stop == 10\n assert c2.step == -10\n\n c2 = c[::-1]\n assert isinstance(c2, UniformCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert c2.start == 0\n assert c2.stop == 50\n assert c2.step == 10\n\n # index array\n c2 = c[[0, 1, 3]]\n assert isinstance(c2, ArrayCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [50, 40, 20])\n\n c2 = c[[3, 1, 0]]\n assert isinstance(c2, ArrayCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [20, 40, 50])\n\n c2 = c[[0, 3, 1]]\n assert isinstance(c2, ArrayCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [50, 20, 40])\n\n # boolean array\n c2 = c[[True, True, True, False, True, False]]\n assert isinstance(c2, ArrayCoordinates1d)\n assert c2.name == c.name\n assert c2.properties == c.properties\n assert_equal(c2.coordinates, [50, 40, 30, 10])\n\n def test_in(self):\n c = UniformCoordinates1d(0, 50, 10, name=\"lat\")\n assert 0 in c\n assert 10 in c\n assert 50 in c\n assert -10 not in c\n assert 60 not in c\n assert 5 not in c\n assert np.datetime64(\"2018\") not in c\n assert \"a\" not in c\n\n c = UniformCoordinates1d(50, 0, -10, name=\"lat\")\n assert 0 in c\n assert 10 in c\n assert 50 in c\n assert -10 not in c\n assert 60 not in c\n assert 5 not in c\n assert np.datetime64(\"2018\") not in c\n assert \"a\" not in c\n\n c = UniformCoordinates1d(\"2020-01-01\", \"2020-01-09\", \"2,D\", name=\"time\")\n assert np.datetime64(\"2020-01-01\") in c\n assert np.datetime64(\"2020-01-03\") in c\n assert np.datetime64(\"2020-01-09\") in c\n assert np.datetime64(\"2020-01-11\") not in c\n assert np.datetime64(\"2020-01-02\") not in c\n assert 10 not in c\n assert \"a\" not in c\n\n\nclass TestArrayCoordinatesAreaBounds(object):\n def test_get_area_bounds_numerical(self):\n c = UniformCoordinates1d(0, 50, 10)\n\n # point\n area_bounds = c.get_area_bounds(None)\n assert_equal(area_bounds, [0.0, 50.0])\n\n # uniform\n area_bounds = c.get_area_bounds(0.5)\n assert_equal(area_bounds, [-0.5, 50.5])\n\n # segment\n area_bounds = c.get_area_bounds([-0.2, 0.7])\n assert_equal(area_bounds, [-0.2, 50.7])\n\n # polygon (i.e. there would be corresponding offets for another dimension)\n area_bounds = c.get_area_bounds([-0.2, -0.5, 0.7, 0.5])\n assert_equal(area_bounds, [-0.5, 50.7])\n\n def test_get_area_bounds_datetime(self):\n c = UniformCoordinates1d(\"2018-01-01\", \"2018-01-04\", \"1,D\")\n\n # point\n area_bounds = c.get_area_bounds(None)\n assert_equal(area_bounds, make_coord_array([\"2018-01-01\", \"2018-01-04\"]))\n\n # uniform\n area_bounds = c.get_area_bounds(\"1,D\")\n assert_equal(area_bounds, make_coord_array([\"2017-12-31\", \"2018-01-05\"]))\n\n area_bounds = c.get_area_bounds(\"1,M\")\n assert_equal(area_bounds, make_coord_array([\"2017-12-01\", \"2018-02-04\"]))\n\n area_bounds = c.get_area_bounds(\"1,Y\")\n assert_equal(area_bounds, make_coord_array([\"2017-01-01\", \"2019-01-04\"]))\n\n # segment\n area_bounds = c.get_area_bounds([\"0,h\", \"12,h\"])\n assert_equal(area_bounds, make_coord_array([\"2018-01-01 00:00\", \"2018-01-04 12:00\"]))\n\n\nclass TestUniformCoordinatesSelection(object):\n def test_select_all_shortcut(self):\n c = UniformCoordinates1d(20.0, 70.0, 10.0)\n\n s = c.select([0, 100])\n assert s.start == 20.0\n assert s.stop == 70.0\n assert s.step == 10.0\n\n s, I = c.select([0, 100], return_index=True)\n assert s.start == 20.0\n assert s.stop == 70.0\n assert s.step == 10.0\n assert_equal(c[I], s)\n\n def test_select_none_shortcut(self):\n c = UniformCoordinates1d(20.0, 70.0, 10.0)\n\n # above\n s = c.select([100, 200])\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n\n s, I = c.select([100, 200], return_index=True)\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n assert c[I] == s\n\n # below\n s = c.select([0, 5])\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n\n s, I = c.select([0, 5], return_index=True)\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n assert c[I] == s\n\n def test_select_ascending(self):\n c = UniformCoordinates1d(20.0, 70.0, 10.0)\n\n # inner\n s = c.select([35.0, 55.0])\n assert s.start == 40.0\n assert s.stop == 50.0\n assert s.step == 10.0\n\n s, I = c.select([35.0, 55.0], return_index=True)\n assert s.start == 40.0\n assert s.stop == 50.0\n assert s.step == 10.0\n assert c[I] == s\n\n # inner with aligned bounds\n s = c.select([30.0, 60.0])\n assert s.start == 30.0\n assert s.stop == 60.0\n assert s.step == 10.0\n\n s, I = c.select([30.0, 60.0], return_index=True)\n assert s.start == 30.0\n assert s.stop == 60.0\n assert s.step == 10.0\n assert c[I] == s\n\n # above\n s = c.select([45, 100])\n assert s.start == 50.0\n assert s.stop == 70.0\n assert s.step == 10.0\n\n s, I = c.select([45, 100], return_index=True)\n assert s.start == 50.0\n assert s.stop == 70.0\n assert s.step == 10.0\n assert c[I] == s\n\n # below\n s = c.select([5, 55])\n assert s.start == 20.0\n assert s.stop == 50.0\n assert s.step == 10.0\n\n s, I = c.select([5, 55], return_index=True)\n assert s.start == 20.0\n assert s.stop == 50.0\n assert s.step == 10.0\n assert c[I] == s\n\n # between coordinates\n s = c.select([52, 55])\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n\n s, I = c.select([52, 55], return_index=True)\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n assert_equal(c.coordinates[I], [])\n\n # backwards bounds\n s = c.select([70, 30])\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n\n s, I = c.select([70, 30], return_index=True)\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n assert_equal(c.coordinates[I], [])\n\n def test_select_descending(self):\n c = UniformCoordinates1d(70.0, 20.0, -10.0)\n\n # inner\n s = c.select([35.0, 55.0])\n assert s.start == 50.0\n assert s.stop == 40.0\n assert s.step == -10.0\n\n s, I = c.select([35.0, 55.0], return_index=True)\n assert s.start == 50.0\n assert s.stop == 40.0\n assert s.step == -10.0\n assert c[I] == s\n\n # inner with aligned bounds\n s = c.select([30.0, 60.0])\n assert s.start == 60.0\n assert s.stop == 30.0\n assert s.step == -10.0\n\n s, I = c.select([30.0, 60.0], return_index=True)\n assert s.start == 60.0\n assert s.stop == 30.0\n assert s.step == -10.0\n assert c[I] == s\n\n # above\n s = c.select([45, 100])\n assert s.start == 70.0\n assert s.stop == 50.0\n assert s.step == -10.0\n\n s, I = c.select([45, 100], return_index=True)\n assert s.start == 70.0\n assert s.stop == 50.0\n assert s.step == -10.0\n assert c[I] == s\n\n # below\n s = c.select([5, 55])\n assert s.start == 50.0\n assert s.stop == 20.0\n assert s.step == -10.0\n\n s, I = c.select([5, 55], return_index=True)\n assert s.start == 50.0\n assert s.stop == 20.0\n assert s.step == -10.0\n assert c[I] == s\n\n # between coordinates\n s = c.select([52, 55])\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n\n s, I = c.select([52, 55], return_index=True)\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n assert_equal(c.coordinates[I], [])\n\n # backwards bounds\n s = c.select([70, 30])\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n\n s, I = c.select([70, 30], return_index=True)\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n assert_equal(c.coordinates[I], [])\n\n def test_select_outer(self):\n c = UniformCoordinates1d(20.0, 70.0, 10.0)\n\n # inner\n s = c.select([35.0, 55.0], outer=True)\n assert s.start == 30.0\n assert s.stop == 60.0\n assert s.step == 10.0\n\n s, I = c.select([35.0, 55.0], outer=True, return_index=True)\n assert s.start == 30.0\n assert s.stop == 60.0\n assert s.step == 10.0\n assert c[I] == s\n\n # inner with aligned bounds\n s = c.select([30.0, 60.0], outer=True)\n assert s.start == 30.0\n assert s.stop == 60.0\n assert s.step == 10.0\n\n s, I = c.select([30.0, 60.0], outer=True, return_index=True)\n assert s.start == 30.0\n assert s.stop == 60.0\n assert s.step == 10.0\n assert c[I] == s\n\n # above\n s = c.select([45, 100], outer=True)\n assert s.start == 40.0\n assert s.stop == 70.0\n assert s.step == 10.0\n\n s, I = c.select([45, 100], outer=True, return_index=True)\n assert s.start == 40.0\n assert s.stop == 70.0\n assert s.step == 10.0\n assert c[I] == s\n\n # below\n s = c.select([5, 55], outer=True)\n assert s.start == 20.0\n assert s.stop == 60.0\n assert s.step == 10.0\n\n s, I = c.select([5, 55], outer=True, return_index=True)\n assert s.start == 20.0\n assert s.stop == 60.0\n assert s.step == 10.0\n assert c[I] == s\n\n # between coordinates\n s = c.select([52, 55], outer=True)\n assert s.start == 50.0\n assert s.stop == 60.0\n assert s.step == 10.0\n\n s, I = c.select([52, 55], outer=True, return_index=True)\n assert s.start == 50.0\n assert s.stop == 60.0\n assert s.step == 10.0\n assert c[I] == s\n\n # backwards bounds\n s = c.select([70, 30], outer=True)\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n\n s, I = c.select([70, 30], outer=True, return_index=True)\n assert isinstance(s, ArrayCoordinates1d)\n assert_equal(s.coordinates, [])\n assert_equal(c.coordinates[I], [])\n\n def test_select_time_variable_precision(self):\n c = UniformCoordinates1d(\"2012-05-19\", \"2012-05-20\", \"1,D\", name=\"time\")\n c2 = UniformCoordinates1d(\"2012-05-20T12:00:00\", \"2012-05-21T12:00:00\", \"1,D\", name=\"time\")\n s = c.select(c2.bounds, outer=True)\n s1 = c.select(c2.bounds, outer=False)\n s2 = c2.select(c.bounds)\n assert s.size == 1\n assert s1.size == 0\n assert s2.size == 1\n\n\nclass TestUniformCoordinatesMethods(object):\n def test_unique(self):\n c = UniformCoordinates1d(1, 5, step=1)\n c2 = c.unique()\n assert c2 == c and c2 is not c\n\n c2, I = c.unique(return_index=True)\n assert c2 == c and c2 is not c\n assert c2 == c[I]\n\n def test_simplify(self):\n c = UniformCoordinates1d(1, 5, step=1)\n c2 = c.simplify()\n assert c2 == c and c2 is not c\n\n # reversed, step -2\n c = UniformCoordinates1d(4, 0, step=-2)\n c2 = c.simplify()\n assert c2 == c and c2 is not c\n\n # time, convert to UniformCoordinates\n c = UniformCoordinates1d(\"2020-01-01\", \"2020-01-05\", step=\"1,D\")\n c2 = c.simplify()\n assert c2 == c and c2 is not c\n\n # time, reverse -2,h\n c = UniformCoordinates1d(\"2020-01-01T12:00\", \"2020-01-01T08:00\", step=\"-3,h\")\n c2 = c.simplify()\n assert c2 == c and c2 is not c\n\n def test_flatten(self):\n c = UniformCoordinates1d(1, 5, step=1)\n c2 = c.flatten()\n assert c2 == c and c2 is not c\n\n def test_reshape(self):\n c = UniformCoordinates1d(1, 6, step=1, name=\"lat\")\n c2 = c.reshape((2, 3))\n assert c2 == ArrayCoordinates1d(c.coordinates.reshape((2, 3)), name=\"lat\")\n\n def test_issubset(self):\n c1 = UniformCoordinates1d(2, 1, step=-1)\n c2 = UniformCoordinates1d(1, 3, step=1)\n c3 = UniformCoordinates1d(0, 2, step=1)\n c4 = UniformCoordinates1d(1, 4, step=0.5)\n c5 = UniformCoordinates1d(1.5, 2.5, step=0.5)\n c6 = UniformCoordinates1d(1.4, 2.4, step=0.5)\n c7 = UniformCoordinates1d(1.4, 2.4, step=10)\n\n # self\n assert c1.issubset(c1)\n\n # subsets\n assert c1.issubset(c2)\n assert c1.issubset(c3)\n assert c1.issubset(c4)\n assert c5.issubset(c4)\n assert c7.issubset(c6)\n\n # not subsets\n assert not c2.issubset(c1)\n assert not c2.issubset(c3)\n assert not c3.issubset(c1)\n assert not c3.issubset(c2)\n assert not c4.issubset(c1)\n assert not c6.issubset(c4)\n\n def test_issubset_datetime(self):\n c1 = UniformCoordinates1d(\"2020-01-01\", \"2020-01-03\", \"1,D\")\n c2 = UniformCoordinates1d(\"2020-01-01\", \"2020-01-03\", \"2,D\")\n c3 = UniformCoordinates1d(\"2020-01-01\", \"2020-01-05\", \"1,D\")\n c4 = UniformCoordinates1d(\"2020-01-05\", \"2020-01-01\", \"-2,D\")\n\n # self\n assert c1.issubset(c1)\n\n # same resolution\n assert c1.issubset(c3)\n assert c2.issubset(c1)\n assert c2.issubset(c4)\n assert not c1.issubset(c2)\n assert not c1.issubset(c4)\n assert not c3.issubset(c1)\n\n # different resolution\n c5 = UniformCoordinates1d(\"2020-01-01T00:00\", \"2020-01-03T00:00\", \"1,D\")\n c6 = UniformCoordinates1d(\"2020-01-01T00:00\", \"2020-01-03T00:00\", \"6,h\")\n assert c1.issubset(c5)\n assert c5.issubset(c1)\n assert c1.issubset(c6)\n assert not c6.issubset(c1)\n\n def test_issubset_dtype(self):\n c1 = UniformCoordinates1d(0, 10, step=1)\n c2 = UniformCoordinates1d(\"2018\", \"2020\", step=\"1,Y\")\n assert not c1.issubset(c2)\n assert not c2.issubset(c1)\n\n def test_issubset_array_coordinates(self):\n u = UniformCoordinates1d(start=1, stop=3, step=1)\n a1 = ArrayCoordinates1d([1, 3, 2])\n a2 = ArrayCoordinates1d([1, 2, 3])\n a3 = ArrayCoordinates1d([1, 3, 4])\n e = ArrayCoordinates1d([])\n\n # self\n assert u.issubset(a1)\n assert u.issubset(a2)\n assert not u.issubset(a3)\n assert not u.issubset(e)\n\n def test_issubset_coordinates(self):\n u = UniformCoordinates1d(1, 3, 1, name=\"lat\")\n c1 = Coordinates([[1, 2, 3], [10, 20, 30]], dims=[\"lat\", \"lon\"])\n c2 = Coordinates([[1, 2, 4], [10, 20, 30]], dims=[\"lat\", \"lon\"])\n c3 = Coordinates([[10, 20, 30]], dims=[\"alt\"])\n\n assert u.issubset(c1)\n assert not u.issubset(c2)\n assert not u.issubset(c3)\n", "from __future__ import division, unicode_literals, print_function, absolute_import\n\nimport os\nimport json\nimport warnings\nimport tempfile\nfrom collections import OrderedDict\nfrom copy import deepcopy\n\ntry:\n import urllib.parse as urllib\nexcept: # Python 2.7\n import urlparse as urllib\n\nimport six\nimport pytest\nimport numpy as np\nimport xarray as xr\nfrom pint.errors import DimensionalityError, UndefinedUnitError\nfrom pint import UnitRegistry\n\nureg = UnitRegistry()\nimport traitlets as tl\n\nimport podpac\nfrom podpac.core import common_test_utils as ctu\nfrom podpac.core.utils import ArrayTrait, NodeTrait\nfrom podpac.core.units import UnitsDataArray\nfrom podpac.core.style import Style\nfrom podpac.core.cache import CacheCtrl, RamCacheStore, DiskCacheStore\nfrom podpac.core.node import Node, NodeException, NodeDefinitionError\nfrom podpac.core.node import NoCacheMixin, DiskCacheMixin\n\n\nclass TestNode(object):\n def test_style(self):\n node = Node()\n assert isinstance(node.style, Style)\n\n def test_units(self):\n node = Node(units=\"meters\")\n\n with pytest.raises(UndefinedUnitError):\n Node(units=\"abc\")\n\n def test_outputs(self):\n node = Node()\n assert node.outputs is None\n\n node = Node(outputs=[\"a\", \"b\"])\n assert node.outputs == [\"a\", \"b\"]\n\n def test_output(self):\n node = Node()\n assert node.output is None\n\n node = Node(outputs=[\"a\", \"b\"])\n assert node.output is None\n\n node = Node(outputs=[\"a\", \"b\"], output=\"b\")\n assert node.output == \"b\"\n\n # must be one of the outputs\n with pytest.raises(ValueError, match=\"Invalid output\"):\n node = Node(outputs=[\"a\", \"b\"], output=\"other\")\n\n # only valid for multiple-output nodes\n with pytest.raises(TypeError, match=\"Invalid output\"):\n node = Node(output=\"other\")\n\n def test_cache_output(self):\n with podpac.settings:\n podpac.settings[\"CACHE_NODE_OUTPUT_DEFAULT\"] = False\n node = Node()\n assert not node.cache_output\n\n podpac.settings[\"CACHE_NODE_OUTPUT_DEFAULT\"] = True\n node = Node()\n assert node.cache_output\n\n def test_cache_ctrl(self):\n # settings\n with podpac.settings:\n podpac.settings[\"DEFAULT_CACHE\"] = [\"ram\"]\n node = Node()\n assert node.cache_ctrl is not None\n assert len(node.cache_ctrl._cache_stores) == 1\n assert isinstance(node.cache_ctrl._cache_stores[0], RamCacheStore)\n\n podpac.settings[\"DEFAULT_CACHE\"] = [\"ram\", \"disk\"]\n node = Node()\n assert node.cache_ctrl is not None\n assert len(node.cache_ctrl._cache_stores) == 2\n assert isinstance(node.cache_ctrl._cache_stores[0], RamCacheStore)\n assert isinstance(node.cache_ctrl._cache_stores[1], DiskCacheStore)\n\n # specify\n node = Node(cache_ctrl=[\"ram\"])\n assert node.cache_ctrl is not None\n assert len(node.cache_ctrl._cache_stores) == 1\n assert isinstance(node.cache_ctrl._cache_stores[0], RamCacheStore)\n\n node = Node(cache_ctrl=[\"ram\", \"disk\"])\n assert node.cache_ctrl is not None\n assert len(node.cache_ctrl._cache_stores) == 2\n assert isinstance(node.cache_ctrl._cache_stores[0], RamCacheStore)\n assert isinstance(node.cache_ctrl._cache_stores[1], DiskCacheStore)\n\n def test_tagged_attr_readonly(self):\n class MyNode(Node):\n my_attr = tl.Any().tag(attr=True)\n\n with podpac.settings:\n podpac.settings[\"DEBUG\"] = False\n node = MyNode()\n assert node.traits()[\"my_attr\"].read_only\n\n podpac.settings[\"DEBUG\"] = True\n node = MyNode()\n assert not node.traits()[\"my_attr\"].read_only\n\n def test_trait_is_defined(self):\n node = Node()\n if tl.version_info[0] >= 5:\n assert not node.trait_is_defined(\"units\")\n else:\n assert node.trait_is_defined(\"units\")\n\n def test_init(self):\n class MyNode(Node):\n init_run = False\n\n def init(self):\n super(MyNode, self).init()\n self.init_run = True\n\n node = MyNode()\n assert node.init_run\n\n def test_attrs(self):\n class MyNode(Node):\n my_attr = tl.Any().tag(attr=True)\n my_trait = tl.Any()\n\n n = MyNode()\n assert \"my_attr\" in n.attrs\n assert \"my_trait\" not in n.attrs\n\n def test_repr(self):\n n = Node()\n repr(n)\n\n n = Node(outputs=[\"a\", \"b\"])\n repr(n)\n assert \"outputs=\" in repr(n)\n assert \"output=\" not in repr(n)\n\n n = Node(outputs=[\"a\", \"b\"], output=\"a\")\n repr(n)\n assert \"outputs=\" not in repr(n)\n assert \"output=\" in repr(n)\n\n def test_str(self):\n n = Node()\n str(n)\n\n n = Node(outputs=[\"a\", \"b\"])\n str(n)\n assert \"outputs=\" in str(n)\n assert \"output=\" not in str(n)\n\n n = Node(outputs=[\"a\", \"b\"], output=\"a\")\n str(n)\n assert \"outputs=\" not in str(n)\n assert \"output=\" in str(n)\n\n def test_eval_group(self):\n class MyNode(Node):\n def eval(self, coordinates, output=None, selector=None):\n return self.create_output_array(coordinates)\n\n c1 = podpac.Coordinates([[0, 1], [0, 1]], dims=[\"lat\", \"lon\"])\n c2 = podpac.Coordinates([[10, 11], [10, 11, 12]], dims=[\"lat\", \"lon\"])\n g = podpac.coordinates.GroupCoordinates([c1, c2])\n\n node = MyNode()\n outputs = node.eval_group(g)\n assert isinstance(outputs, list)\n assert len(outputs) == 2\n assert isinstance(outputs[0], UnitsDataArray)\n assert isinstance(outputs[1], UnitsDataArray)\n assert outputs[0].shape == (2, 2)\n assert outputs[1].shape == (2, 3)\n\n # invalid\n with pytest.raises(Exception):\n node.eval_group(c1)\n\n with pytest.raises(Exception):\n node.eval(g)\n\n def test_eval_not_implemented(self):\n node = Node()\n with pytest.raises(NotImplementedError):\n node.eval(podpac.Coordinates([]))\n\n with pytest.raises(NotImplementedError):\n node.eval(podpac.Coordinates([]), output=None)\n\n def test_find_coordinates_not_implemented(self):\n node = Node()\n with pytest.raises(NotImplementedError):\n node.find_coordinates()\n\n def test_get_bounds(self):\n class MyNode(Node):\n def find_coordinates(self):\n return [\n podpac.Coordinates([[0, 1, 2], [0, 10, 20]], dims=[\"lat\", \"lon\"], crs=\"EPSG:2193\"),\n podpac.Coordinates([[3, 4], [30, 40]], dims=[\"lat\", \"lon\"], crs=\"EPSG:2193\"),\n ]\n\n node = MyNode()\n\n with podpac.settings:\n podpac.settings[\"DEFAULT_CRS\"] = \"EPSG:4326\"\n\n # specify crs\n bounds, crs = node.get_bounds(crs=\"EPSG:2193\")\n assert bounds == {\"lat\": (0, 4), \"lon\": (0, 40)}\n assert crs == \"EPSG:2193\"\n\n # default crs\n bounds, crs = node.get_bounds()\n assert bounds == {\n \"lat\": (-75.81397534013118, -75.81362774074242),\n \"lon\": (82.92787904584206, 82.9280189659297),\n }\n assert crs == \"EPSG:4326\"\n\n\nclass TestCreateOutputArray(object):\n def test_create_output_array_default(self):\n c = podpac.Coordinates([podpac.clinspace((0, 0), (1, 1), 10), [0, 1, 2]], dims=[\"lat_lon\", \"time\"])\n node = Node()\n\n output = node.create_output_array(c)\n assert isinstance(output, UnitsDataArray)\n assert output.shape == c.shape\n assert output.dtype == node.dtype\n assert output.crs == c.crs\n assert np.all(np.isnan(output))\n\n def test_create_output_array_data(self):\n c = podpac.Coordinates([podpac.clinspace((0, 0), (1, 1), 10), [0, 1, 2]], dims=[\"lat_lon\", \"time\"])\n node = Node()\n\n output = node.create_output_array(c, data=0)\n assert isinstance(output, UnitsDataArray)\n assert output.shape == c.shape\n assert output.dtype == node.dtype\n assert output.crs == c.crs\n assert np.all(output == 0.0)\n\n @pytest.mark.xfail(reason=\"not yet supported.\")\n def test_create_output_array_dtype(self):\n c = podpac.Coordinates([podpac.clinspace((0, 0), (1, 1), 10), [0, 1, 2]], dims=[\"lat_lon\", \"time\"])\n node = Node(dtype=bool)\n\n output = node.create_output_array(c, data=0)\n assert isinstance(output, UnitsDataArray)\n assert output.shape == c.shape\n assert output.dtype == node.dtype\n assert output.crs == c.crs\n assert np.all(~output)\n\n def test_create_output_array_units(self):\n c = podpac.Coordinates([podpac.clinspace((0, 0), (1, 1), 10), [0, 1, 2]], dims=[\"lat_lon\", \"time\"])\n node = Node(units=\"meters\")\n\n output = node.create_output_array(c)\n assert isinstance(output, UnitsDataArray)\n\n from podpac.core.units import ureg as _ureg\n\n assert output.units == _ureg.meters\n\n def test_create_output_array_crs(self):\n crs = \"+proj=merc +lat_ts=56.5 +ellps=GRS80\"\n c = podpac.Coordinates([podpac.clinspace((0, 0), (1, 1), 10), [0, 1, 2]], dims=[\"lat_lon\", \"time\"], crs=crs)\n node = Node()\n\n output = node.create_output_array(c)\n assert output.crs == crs\n\n\nclass TestNodeEval(object):\n def test_extract_output(self):\n coords = podpac.Coordinates([[0, 1, 2, 3], [0, 1]], dims=[\"lat\", \"lon\"])\n\n class MyNode1(Node):\n outputs = [\"a\", \"b\", \"c\"]\n\n def _eval(self, coordinates, output=None, selector=None):\n return self.create_output_array(coordinates)\n\n # don't extract when no output field is requested\n node = MyNode1()\n out = node.eval(coords)\n assert out.shape == (4, 2, 3)\n\n # do extract when an output field is requested\n node = MyNode1(output=\"b\")\n out = node.eval(coords)\n assert out.shape == (4, 2)\n\n # should still work if the node has already extracted it\n class MyNode2(Node):\n outputs = [\"a\", \"b\", \"c\"]\n\n def _eval(self, coordinates, output=None, selector=None):\n out = self.create_output_array(coordinates)\n return out.sel(output=self.output)\n\n node = MyNode2(output=\"b\")\n out = node.eval(coords)\n assert out.shape == (4, 2)\n\n def test_evaluate_transpose(self):\n class MyNode(Node):\n def _eval(self, coordinates, output=None, selector=None):\n coords = coordinates.transpose(\"lat\", \"lon\")\n data = np.arange(coords.size).reshape(coords.shape)\n a = self.create_output_array(coords, data=data)\n if output is None:\n output = a\n else:\n output[:] = a.transpose(*output.dims)\n return output\n\n coords = podpac.Coordinates([[0, 1, 2, 3], [0, 1]], dims=[\"lat\", \"lon\"])\n\n node = MyNode()\n o1 = node.eval(coords)\n o2 = node.eval(coords.transpose(\"lon\", \"lat\"))\n\n # returned output should match the requested coordinates and data should be transposed\n assert o1.dims == (\"lat\", \"lon\")\n assert o2.dims == (\"lon\", \"lat\")\n np.testing.assert_array_equal(o2.transpose(\"lat\", \"lon\").data, o1.data)\n\n # with transposed output\n o3 = node.create_output_array(coords.transpose(\"lon\", \"lat\"))\n o4 = node.eval(coords, output=o3)\n\n assert o3.dims == (\"lon\", \"lat\") # stay the same\n assert o4.dims == (\"lat\", \"lon\") # match requested coordinates\n np.testing.assert_equal(o3.transpose(\"lat\", \"lon\").data, o4.data)\n\n def test_eval_get_cache(self):\n podpac.settings[\"RAM_CACHE_ENABLED\"] = True\n\n class MyNode(Node):\n def _eval(self, coordinates, output=None, selector=None):\n coords = coordinates.transpose(\"lat\", \"lon\")\n data = np.arange(coords.size).reshape(coords.shape)\n a = self.create_output_array(coords, data=data)\n if output is None:\n output = a\n else:\n output[:] = a.transpose(*output.dims)\n return output\n\n coords = podpac.Coordinates([[0, 1, 2, 3], [0, 1]], dims=[\"lat\", \"lon\"])\n\n node = MyNode(cache_output=True, cache_ctrl=CacheCtrl([RamCacheStore()]))\n\n # first eval\n o1 = node.eval(coords)\n assert node._from_cache == False\n\n # get from cache\n o2 = node.eval(coords)\n assert node._from_cache == True\n np.testing.assert_array_equal(o2, o1)\n\n # get from cache with output\n o3 = node.eval(coords, output=o1)\n assert node._from_cache == True\n np.testing.assert_array_equal(o3, o1)\n\n # get from cache with output transposed\n o4 = node.eval(coords, output=o1.transpose(\"lon\", \"lat\"))\n assert node._from_cache == True\n np.testing.assert_array_equal(o4, o1)\n\n # get from cache with coords transposed\n o5 = node.eval(coords.transpose(\"lon\", \"lat\"))\n assert node._from_cache == True\n np.testing.assert_array_equal(o5, o1.transpose(\"lon\", \"lat\"))\n\n def test_eval_output_crs(self):\n coords = podpac.Coordinates([[0, 1, 2, 3], [0, 1]], dims=[\"lat\", \"lon\"])\n\n node = Node()\n with pytest.raises(ValueError, match=\"Output coordinate reference system .* does not match\"):\n node.eval(coords, output=node.create_output_array(coords.transform(\"EPSG:2193\")))\n\n\nclass TestCaching(object):\n @classmethod\n def setup_class(cls):\n cls._ram_cache_enabled = podpac.settings[\"RAM_CACHE_ENABLED\"]\n\n podpac.settings[\"RAM_CACHE_ENABLED\"] = True\n\n class MyNode(Node):\n pass\n\n cls.node = MyNode(cache_ctrl=CacheCtrl([RamCacheStore()]))\n cls.node.rem_cache(key=\"*\", coordinates=\"*\")\n\n cls.coords = podpac.Coordinates([0, 0], dims=[\"lat\", \"lon\"])\n cls.coords2 = podpac.Coordinates([1, 1], dims=[\"lat\", \"lon\"])\n\n @classmethod\n def teardown_class(cls):\n cls.node.rem_cache(key=\"*\", coordinates=\"*\")\n\n podpac.settings[\"RAM_CACHE_ENABLED\"] = cls._ram_cache_enabled\n\n def setup_method(self, method):\n self.node.rem_cache(key=\"*\", coordinates=\"*\")\n\n def teardown_method(self, method):\n self.node.rem_cache(key=\"*\", coordinates=\"*\")\n\n def test_has_cache(self):\n assert not self.node.has_cache(\"test\")\n\n self.node.put_cache(0, \"test\")\n assert self.node.has_cache(\"test\")\n assert not self.node.has_cache(\"test\", coordinates=self.coords)\n\n def test_has_coordinates(self):\n assert not self.node.has_cache(\"test\", coordinates=self.coords)\n\n self.node.put_cache(0, \"test\", coordinates=self.coords)\n\n assert not self.node.has_cache(\"test\")\n assert self.node.has_cache(\"test\", coordinates=self.coords)\n assert not self.node.has_cache(\"test\", coordinates=self.coords2)\n\n def test_get_put_cache(self):\n with pytest.raises(NodeException):\n self.node.get_cache(\"test\")\n\n self.node.put_cache(0, \"test\")\n assert self.node.get_cache(\"test\") == 0\n\n def test_get_put_coordinates(self):\n with pytest.raises(NodeException):\n self.node.get_cache(\"test\")\n with pytest.raises(NodeException):\n self.node.get_cache(\"test\", coordinates=self.coords)\n with pytest.raises(NodeException):\n self.node.get_cache(\"test\", coordinates=self.coords2)\n\n self.node.put_cache(0, \"test\")\n self.node.put_cache(1, \"test\", coordinates=self.coords)\n self.node.put_cache(2, \"test\", coordinates=self.coords2)\n\n assert self.node.get_cache(\"test\") == 0\n assert self.node.get_cache(\"test\", coordinates=self.coords) == 1\n assert self.node.get_cache(\"test\", coordinates=self.coords2) == 2\n\n def test_put_overwrite(self):\n self.node.put_cache(0, \"test\")\n assert self.node.get_cache(\"test\") == 0\n\n with pytest.raises(NodeException):\n self.node.put_cache(1, \"test\", overwrite=False)\n assert self.node.get_cache(\"test\") == 0\n\n self.node.put_cache(1, \"test\")\n assert self.node.get_cache(\"test\") == 1\n\n def test_rem_all(self):\n self.node.put_cache(0, \"a\")\n self.node.put_cache(0, \"b\")\n self.node.put_cache(0, \"a\", coordinates=self.coords)\n self.node.put_cache(0, \"c\", coordinates=self.coords)\n self.node.put_cache(0, \"c\", coordinates=self.coords2)\n self.node.put_cache(0, \"d\", coordinates=self.coords)\n\n self.node.rem_cache(key=\"*\", coordinates=\"*\")\n assert not self.node.has_cache(\"a\")\n assert not self.node.has_cache(\"b\")\n assert not self.node.has_cache(\"a\", coordinates=self.coords)\n assert not self.node.has_cache(\"c\", coordinates=self.coords)\n assert not self.node.has_cache(\"c\", coordinates=self.coords2)\n assert not self.node.has_cache(\"d\", coordinates=self.coords)\n\n def test_rem_key(self):\n self.node.put_cache(0, \"a\")\n self.node.put_cache(0, \"b\")\n self.node.put_cache(0, \"a\", coordinates=self.coords)\n self.node.put_cache(0, \"c\", coordinates=self.coords)\n self.node.put_cache(0, \"c\", coordinates=self.coords2)\n self.node.put_cache(0, \"d\", coordinates=self.coords)\n\n self.node.rem_cache(key=\"a\", coordinates=\"*\")\n\n assert not self.node.has_cache(\"a\")\n assert not self.node.has_cache(\"a\", coordinates=self.coords)\n assert self.node.has_cache(\"b\")\n assert self.node.has_cache(\"c\", coordinates=self.coords)\n assert self.node.has_cache(\"c\", coordinates=self.coords2)\n assert self.node.has_cache(\"d\", coordinates=self.coords)\n\n def test_rem_coordinates(self):\n self.node.put_cache(0, \"a\")\n self.node.put_cache(0, \"b\")\n self.node.put_cache(0, \"a\", coordinates=self.coords)\n self.node.put_cache(0, \"c\", coordinates=self.coords)\n self.node.put_cache(0, \"c\", coordinates=self.coords2)\n self.node.put_cache(0, \"d\", coordinates=self.coords)\n\n self.node.rem_cache(key=\"*\", coordinates=self.coords)\n\n assert self.node.has_cache(\"a\")\n assert not self.node.has_cache(\"a\", coordinates=self.coords)\n assert self.node.has_cache(\"b\")\n assert not self.node.has_cache(\"c\", coordinates=self.coords)\n assert self.node.has_cache(\"c\", coordinates=self.coords2)\n assert not self.node.has_cache(\"d\", coordinates=self.coords)\n\n def test_rem_key_coordinates(self):\n self.node.put_cache(0, \"a\")\n self.node.put_cache(0, \"b\")\n self.node.put_cache(0, \"a\", coordinates=self.coords)\n self.node.put_cache(0, \"c\", coordinates=self.coords)\n self.node.put_cache(0, \"c\", coordinates=self.coords2)\n self.node.put_cache(0, \"d\", coordinates=self.coords)\n\n self.node.rem_cache(key=\"a\", coordinates=self.coords)\n\n assert self.node.has_cache(\"a\")\n assert not self.node.has_cache(\"a\", coordinates=self.coords)\n assert self.node.has_cache(\"b\")\n assert self.node.has_cache(\"c\", coordinates=self.coords)\n assert self.node.has_cache(\"c\", coordinates=self.coords2)\n assert self.node.has_cache(\"d\", coordinates=self.coords)\n\n def test_put_has_expires(self):\n self.node.put_cache(10, \"key1\", expires=\"1,D\")\n self.node.put_cache(10, \"key2\", expires=\"-1,D\")\n assert self.node.has_cache(\"key1\")\n assert not self.node.has_cache(\"key2\")\n\n def test_put_get_expires(self):\n self.node.put_cache(10, \"key1\", expires=\"1,D\")\n self.node.put_cache(10, \"key2\", expires=\"-1,D\")\n assert self.node.get_cache(\"key1\") == 10\n with pytest.raises(NodeException, match=\"cached data not found\"):\n self.node.get_cache(\"key2\")\n\n # node definition errors\n # this demonstrates both classes of error in the has_cache case, but only one for put/get/rem\n # we could test both classes for put/get/rem as well, but that is not really necessary\n def test_has_cache_unavailable_circular(self):\n class MyNode(Node):\n a = tl.Any().tag(attr=True)\n\n @tl.default(\"a\")\n def _default_a(self):\n return self.b\n\n @property\n def b(self):\n self.has_cache(\"b\")\n return 10\n\n node = MyNode(cache_ctrl=[\"ram\"])\n with pytest.raises(NodeException, match=\"Cache unavailable, node definition has a circular dependency\"):\n assert node.b == 10\n\n def test_has_cache_unavailable_uninitialized(self):\n class MyNode(Node):\n a = tl.Any().tag(attr=True)\n\n @tl.validate(\"a\")\n def _validate_a(self, d):\n self.b\n return d[\"value\"]\n\n @property\n def b(self):\n self.has_cache(\"key\")\n return 10\n\n with pytest.raises(NodeException, match=\"Cache unavailable, node is not yet fully initialized\"):\n node = MyNode(a=3, cache_ctrl=[\"ram\"])\n\n def test_put_cache_unavailable_uninitialized(self):\n class MyNode(Node):\n a = tl.Any().tag(attr=True)\n\n @tl.validate(\"a\")\n def _validate_a(self, d):\n self.b\n return d[\"value\"]\n\n @property\n def b(self):\n self.put_cache(10, \"key\")\n return 10\n\n with pytest.raises(NodeException, match=\"Cache unavailable\"):\n node = MyNode(a=3, cache_ctrl=[\"ram\"])\n\n def test_get_cache_unavailable_uninitialized(self):\n class MyNode(Node):\n a = tl.Any().tag(attr=True)\n\n @tl.validate(\"a\")\n def _validate_a(self, d):\n self.b\n return d[\"value\"]\n\n @property\n def b(self):\n self.get_cache(\"key\")\n return 10\n\n with pytest.raises(NodeException, match=\"Cache unavailable\"):\n node = MyNode(a=3, cache_ctrl=[\"ram\"])\n\n def test_rem_cache_unavailable_uninitialized(self):\n class MyNode(Node):\n a = tl.Any().tag(attr=True)\n\n @tl.validate(\"a\")\n def _validate_a(self, d):\n self.b\n return d[\"value\"]\n\n @property\n def b(self):\n self.rem_cache(\"key\")\n return 10\n\n with pytest.raises(NodeException, match=\"Cache unavailable\"):\n node = MyNode(a=3, cache_ctrl=[\"ram\"])\n\n\nclass TestSerialization(object):\n @classmethod\n def setup_class(cls):\n a = podpac.algorithm.Arange()\n b = podpac.data.Array(source=[10, 20, 30], coordinates=podpac.Coordinates([[0, 1, 2]], dims=[\"lat\"]))\n c = podpac.compositor.OrderedCompositor(sources=[a, b])\n\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", \"Insecure evaluation.*\")\n cls.node = podpac.algorithm.Arithmetic(A=a, B=b, C=c, eqn=\"A + B + C\")\n\n def test_base_ref(self):\n node = Node()\n assert isinstance(node.base_ref, six.string_types)\n\n def test_base_definition(self):\n node = Node()\n d = node._base_definition\n assert \"node\" in d\n assert isinstance(d[\"node\"], six.string_types)\n\n def test_base_definition_attrs(self):\n class MyNode(Node):\n my_attr = tl.Int().tag(attr=True)\n\n node = MyNode(my_attr=7)\n\n d = node._base_definition\n assert d[\"attrs\"][\"my_attr\"] == 7\n\n def test_base_definition_inputs(self):\n class MyNode(Node):\n my_attr = NodeTrait().tag(attr=True)\n\n a = Node()\n node = MyNode(my_attr=a)\n\n d = node._base_definition\n assert d[\"inputs\"][\"my_attr\"] == a\n\n def test_base_definition_inputs_array(self):\n class MyNode(Node):\n my_attr = ArrayTrait().tag(attr=True)\n\n a = Node()\n b = Node()\n node = MyNode(my_attr=[a, b])\n\n d = node._base_definition\n assert d[\"inputs\"][\"my_attr\"][0] == a\n assert d[\"inputs\"][\"my_attr\"][1] == b\n\n def test_base_definition_inputs_dict(self):\n class MyNode(Node):\n my_attr = tl.Dict().tag(attr=True)\n\n a = Node()\n b = Node()\n node = MyNode(my_attr={\"a\": a, \"b\": b})\n\n d = node._base_definition\n assert d[\"inputs\"][\"my_attr\"][\"a\"] == a\n assert d[\"inputs\"][\"my_attr\"][\"b\"] == b\n\n def test_base_definition_style(self):\n node = Node(style=Style(name=\"test\"))\n d = node._base_definition\n assert \"style\" in node._base_definition\n\n def test_base_definition_remove_unnecessary_attrs(self):\n node = Node(outputs=[\"a\", \"b\"], output=\"a\", units=\"m\")\n d = node._base_definition\n assert \"outputs\" in d[\"attrs\"]\n assert \"output\" in d[\"attrs\"]\n assert \"units\" in d[\"attrs\"]\n\n node = Node()\n d = node._base_definition\n if \"attrs\" in d:\n assert \"outputs\" not in d[\"attrs\"]\n assert \"output\" not in d[\"attrs\"]\n assert \"units\" not in d[\"attrs\"]\n\n def test_definition(self):\n # definition\n d = self.node.definition\n assert isinstance(d, OrderedDict)\n assert len(d) == 5\n\n # from_definition\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", \"Insecure evaluation.*\")\n node = Node.from_definition(d)\n\n assert node is not self.node\n assert node == self.node\n assert isinstance(node, podpac.algorithm.Arithmetic)\n assert isinstance(node.inputs[\"A\"], podpac.algorithm.Arange)\n assert isinstance(node.inputs[\"B\"], podpac.data.Array)\n assert isinstance(node.inputs[\"C\"], podpac.compositor.OrderedCompositor)\n\n def test_definition_duplicate_base_ref(self):\n n1 = Node(units=\"m\")\n n2 = Node(units=\"ft\")\n n3 = Node(units=\"in\")\n node = podpac.compositor.OrderedCompositor(sources=[n1, n2, n3])\n d = node.definition\n assert n1.base_ref == n2.base_ref == n3.base_ref\n assert len(d) == 5\n\n def test_definition_inputs_array(self):\n global MyNodeWithArrayInput\n\n class MyNodeWithArrayInput(Node):\n my_array = ArrayTrait().tag(attr=True)\n\n node1 = MyNodeWithArrayInput(my_array=[podpac.algorithm.Arange()])\n node2 = Node.from_definition(node1.definition)\n assert node2 is not node1 and node2 == node1\n\n def test_definition_inputs_dict(self):\n global MyNodeWithDictInput\n\n class MyNodeWithDictInput(Node):\n my_dict = tl.Dict().tag(attr=True)\n\n node1 = MyNodeWithDictInput(my_dict={\"a\": podpac.algorithm.Arange()})\n node2 = Node.from_definition(node1.definition)\n assert node2 is not node1 and node2 == node1\n\n def test_definition_version(self):\n d = self.node.definition\n assert \"podpac_version\" in d\n assert d[\"podpac_version\"] == podpac.__version__\n\n def test_json(self):\n # json\n s = self.node.json\n assert isinstance(s, six.string_types)\n assert json.loads(s)\n\n # test from_json\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", \"Insecure evaluation.*\")\n node = Node.from_json(s)\n assert node is not self.node\n assert node == self.node\n assert isinstance(node, podpac.algorithm.Arithmetic)\n assert isinstance(node.inputs[\"A\"], podpac.algorithm.Arange)\n assert isinstance(node.inputs[\"B\"], podpac.data.Array)\n assert isinstance(node.inputs[\"C\"], podpac.compositor.OrderedCompositor)\n\n def test_file(self):\n path = tempfile.mkdtemp(prefix=\"podpac-test-\")\n filename = os.path.join(path, \"node.json\")\n\n # save\n self.node.save(filename)\n assert os.path.exists(filename)\n\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", \"Insecure evaluation.*\")\n node = Node.load(filename)\n\n assert node is not self.node\n assert node == self.node\n assert isinstance(node, podpac.algorithm.Arithmetic)\n assert isinstance(node.inputs[\"A\"], podpac.algorithm.Arange)\n assert isinstance(node.inputs[\"B\"], podpac.data.Array)\n assert isinstance(node.inputs[\"C\"], podpac.compositor.OrderedCompositor)\n\n def test_json_pretty(self):\n node = Node()\n s = node.json_pretty\n assert isinstance(s, six.string_types)\n json.loads(s)\n\n def test_hash(self):\n class N(Node):\n my_attr = tl.Int().tag(attr=True)\n\n class M(Node):\n my_attr = tl.Int().tag(attr=True)\n\n n1 = N(my_attr=1)\n n2 = N(my_attr=1)\n n3 = N(my_attr=2)\n m1 = M(my_attr=1)\n\n assert n1.hash == n2.hash\n assert n1.hash != n3.hash\n assert n1.hash != m1.hash\n\n def test_hash_preserves_definition(self):\n n = Node()\n d_before = deepcopy(n.definition)\n h = n.hash\n d_after = deepcopy(n.definition)\n\n assert d_before == d_after\n\n def test_hash_omit_style(self):\n class N(Node):\n my_attr = tl.Int().tag(attr=True)\n\n n1 = N(my_attr=1, style=Style(name=\"a\"))\n n2 = N(my_attr=1, style=Style(name=\"b\"))\n\n # json has style in it\n assert n1.json != n2.json\n\n # but hash does not\n assert n1.hash == n2.hash\n\n def test_hash_omit_version(self):\n version = podpac.__version__\n\n try:\n # actual version\n n1 = Node()\n s1 = n1.json\n h1 = n1.hash\n\n # spoof different version\n podpac.__version__ = \"other\"\n n2 = Node()\n s2 = n2.json\n h2 = n2.hash\n\n # JSON should be different, but hash should be the same\n assert s1 != s2\n assert h1 == h2\n\n finally:\n # reset version\n podpac.__version__ = version\n\n def test_eq(self):\n class N(Node):\n my_attr = tl.Int().tag(attr=True)\n\n class M(Node):\n my_attr = tl.Int().tag(attr=True)\n\n n1 = N(my_attr=1)\n n2 = N(my_attr=1)\n n3 = N(my_attr=2)\n m1 = M(my_attr=1)\n\n # eq\n assert n1 == n2\n assert not n1 == n3\n assert not n1 == m1\n assert not n1 == \"other\"\n\n # ne\n assert not n1 != n2\n assert n1 != n3\n assert n1 != m1\n assert n1 != \"other\"\n\n def test_eq_ignore_style(self):\n class N(Node):\n my_attr = tl.Int().tag(attr=True)\n\n n1 = N(my_attr=1, style=Style(name=\"a\"))\n n2 = N(my_attr=1, style=Style(name=\"b\"))\n\n # json has style in it\n assert n1.json != n2.json\n\n # but == and != don't care\n assert n1 == n2\n assert not n1 != n2\n\n def test_from_url(self):\n url = (\n r\"http://testwms/?map=map&&service={service}&request=GetMap&{layername}={layer}&styles=&format=image%2Fpng\"\n r\"&transparent=true&version=1.1.1&transparency=true&width=256&height=256&srs=EPSG%3A4326\"\n r\"&bbox=40,-71,41,70&time=2018-05-19&PARAMS={params}\"\n )\n\n params = [\"{}\", '{\"a\":{\"node\":\"algorithm.Arange\"}}', \"{}\", \"{}\"]\n\n for service, layername in zip([\"WMS\", \"WCS\"], [\"LAYERS\", \"COVERAGE\"]):\n for layer, param in zip(\n [\n \"algorithm.SinCoords\",\n \"%PARAMS%\",\n # urllib.urlencode({'a':'https://raw.githubusercontent.com/creare-com/podpac/develop/podpac/core/pipeline/test/test.json'})[2:],\n # urllib.urlencode({'a':'s3://podpac-s3/test/test.json'})[2:] # Tested locally, works fine. Hard to test with CI\n ],\n params,\n ):\n pipe = Node.from_url(url.format(service=service, layername=layername, layer=layer, params=param))\n\n def test_from_url_with_plugin_style_params(self):\n url0 = (\n r\"https://mobility-devel.crearecomputing.com/geowatch?&SERVICE=WMS&REQUEST=GetMap&VERSION=1.3.0&\"\n r\"LAYERS=Arange&STYLES=&FORMAT=image%2Fpng&TRANSPARENT=true&HEIGHT=256&WIDTH=256\"\n r\"&CRS=EPSG%3A3857&BBOX=-20037508.342789244,10018754.171394618,-10018754.171394622,20037508.34278071&\"\n r'PARAMS={\"plugin\": \"podpac.algorithm\"}'\n )\n url1 = (\n r\"https://mobility-devel.crearecomputing.com/geowatch?&SERVICE=WMS&REQUEST=GetMap&VERSION=1.3.0&\"\n r\"LAYERS=datalib.terraintiles.TerrainTiles&STYLES=&FORMAT=image%2Fpng&TRANSPARENT=true&HEIGHT=256&WIDTH=256&\"\n r\"TIME=2021-03-01T12%3A00%3A00.000Z&CRS=EPSG%3A3857&BBOX=-10018754.171394622,5009377.08569731,-9392582.035682458,5635549.221409475\"\n r'&PARAMS={\"style\": {\"name\": \"Aspect (Composited 30-90 m)\",\"units\": \"radians\",\"colormap\": \"hsv\",\"clim\": [0,6.283185307179586]}}'\n )\n node = Node.from_url(url0)\n node = Node.from_url(url1)\n\n def test_from_name_params(self):\n # Normal\n name = \"algorithm.Arange\"\n node = Node.from_name_params(name)\n\n # Normal with params\n name = \"algorithm.CoordData\"\n params = {\"coord_name\": \"alt\"}\n node = Node.from_name_params(name, params)\n assert node.coord_name == \"alt\"\n\n # Plugin style\n name = \"CoordData\"\n params = {\"plugin\": \"podpac.algorithm\", \"attrs\": {\"coord_name\": \"alt\"}}\n node = Node.from_name_params(name, params)\n assert node.coord_name == \"alt\"\n\n def test_style(self):\n node = podpac.data.Array(\n source=[10, 20, 30],\n coordinates=podpac.Coordinates([[0, 1, 2]], dims=[\"lat\"]),\n style=Style(name=\"test\", units=\"m\"),\n )\n\n d = node.definition\n assert \"style\" in d[node.base_ref]\n\n node2 = Node.from_definition(d)\n assert node2 is not node\n assert isinstance(node2, podpac.data.Array)\n assert node2.style is not node.style\n assert node2.style == node.style\n assert node2.style.name == \"test\"\n assert node2.style.units == \"m\"\n\n # default style\n node = podpac.data.Array(source=[10, 20, 30], coordinates=podpac.Coordinates([[0, 1, 2]], dims=[\"lat\"]))\n d = node.definition\n assert \"style\" not in d[node.base_ref]\n\n def test_circular_definition(self):\n # this is admittedly a contrived example in order to demonstrate the most direct case\n class MyNode(Node):\n a = tl.Any().tag(attr=True)\n\n @tl.default(\"a\")\n def _default_a(self):\n self.definition\n return 10\n\n node = MyNode()\n with pytest.raises(NodeDefinitionError, match=\"node definition has a circular dependency\"):\n node.a\n\n\nclass TestUserDefinition(object):\n def test_empty(self):\n s = \"{ }\"\n with pytest.raises(ValueError, match=\"definition cannot be empty\"):\n Node.from_json(s)\n\n def test_no_node(self):\n s = '{\"test\": { } }'\n with pytest.raises(ValueError, match=\"'node' property required\"):\n Node.from_json(s)\n\n def test_invalid_node(self):\n # module does not exist\n s = '{\"a\": {\"node\": \"nonexistent.Arbitrary\"} }'\n with pytest.raises(ValueError, match=\"no module found\"):\n Node.from_json(s)\n\n # node does not exist in module\n s = '{\"a\": {\"node\": \"core.Nonexistent\"} }'\n with pytest.raises(ValueError, match=\"class 'Nonexistent' not found in module\"):\n Node.from_json(s)\n\n def test_inputs(self):\n # invalid type\n s = \"\"\"\n {\n \"a\": {\n \"node\": \"algorithm.Min\",\n \"inputs\": { \"source\": 10 }\n }\n }\n \"\"\"\n\n with pytest.raises(ValueError, match=\"Invalid definition for node\"):\n Node.from_json(s)\n\n # nonexistent node\n s = \"\"\"\n {\n \"a\": {\n \"node\": \"algorithm.Min\",\n \"inputs\": { \"source\": \"nonexistent\" }\n }\n }\n \"\"\"\n\n with pytest.raises(ValueError, match=\"Invalid definition for node\"):\n Node.from_json(s)\n\n def test_lookup_attrs(self):\n s = \"\"\"\n {\n \"a\": {\n \"node\": \"algorithm.CoordData\",\n \"attrs\": { \"coord_name\": \"lat\" }\n },\n \"b\": {\n \"node\": \"algorithm.CoordData\",\n \"lookup_attrs\": { \"coord_name\": \"a.coord_name\" }\n }\n }\n \"\"\"\n\n node = Node.from_json(s)\n assert isinstance(node, podpac.algorithm.CoordData)\n assert node.coord_name == \"lat\"\n\n # invalid type\n s = \"\"\"\n {\n \"a\": {\n \"node\": \"algorithm.CoordData\",\n \"attrs\": { \"coord_name\": \"lat\" }\n },\n \"b\": {\n \"node\": \"algorithm.CoordData\",\n \"lookup_attrs\": { \"coord_name\": 10 }\n }\n }\n \"\"\"\n\n with pytest.raises(ValueError, match=\"Invalid definition for node\"):\n Node.from_json(s)\n\n # nonexistent node\n s = \"\"\"\n {\n \"a\": {\n \"node\": \"algorithm.CoordData\",\n \"attrs\": { \"coord_name\": \"lat\" }\n },\n \"b\": {\n \"node\": \"algorithm.CoordData\",\n \"lookup_attrs\": { \"coord_name\": \"nonexistent.coord_name\" }\n }\n }\n \"\"\"\n\n with pytest.raises(ValueError, match=\"Invalid definition for node\"):\n Node.from_json(s)\n\n # nonexistent subattr\n s = \"\"\"\n {\n \"a\": {\n \"node\": \"algorithm.CoordData\",\n \"attrs\": { \"coord_name\": \"lat\" }\n },\n \"b\": {\n \"node\": \"algorithm.CoordData\",\n \"lookup_attrs\": { \"coord_name\": \"a.nonexistent\" }\n }\n }\n \"\"\"\n\n with pytest.raises(ValueError, match=\"Invalid definition for node\"):\n Node.from_json(s)\n\n def test_invalid_property(self):\n s = \"\"\"\n {\n \"a\": {\n \"node\": \"algorithm.Arange\",\n \"invalid_property\": \"value\"\n }\n }\n \"\"\"\n\n with pytest.raises(ValueError, match=\"unexpected property\"):\n Node.from_json(s)\n\n def test_plugin(self):\n global MyPluginNode\n\n class MyPluginNode(Node):\n pass\n\n s = \"\"\"\n {\n \"mynode\": {\n \"plugin\": \"test_node\",\n \"node\": \"MyPluginNode\"\n }\n }\n \"\"\"\n\n node = Node.from_json(s)\n assert isinstance(node, MyPluginNode)\n\n # missing plugin\n s = \"\"\"\n {\n \"mynode\": {\n \"plugin\": \"missing\",\n \"node\": \"MyPluginNode\"\n }\n }\n \"\"\"\n\n with pytest.raises(ValueError, match=\"no module found\"):\n Node.from_json(s)\n\n def test_debuggable(self):\n s = \"\"\"\n {\n \"a\": {\n \"node\": \"algorithm.Arange\"\n },\n \"mean\": {\n \"node\": \"algorithm.Convolution\",\n \"lookup_attrs\": {\"source\": \"a\"},\n \"attrs\": {\"kernel_type\": \"mean,3\", \"kernel_dims\": [\"lat\", \"lon\"]}\n },\n \"c\": {\n \"node\": \"algorithm.Arithmetic\",\n \"lookup_attrs\": {\"A\": \"a\", \"B\": \"mean\"},\n \"attrs\": {\"eqn\": \"a-b\"}\n }\n }\n \"\"\"\n\n with warnings.catch_warnings(), podpac.settings:\n warnings.filterwarnings(\"ignore\", \"Insecure evaluation.*\")\n\n # normally node objects can and should be re-used\n podpac.settings[\"DEBUG\"] = False\n node = Node.from_json(s)\n assert node.inputs[\"A\"] is node.inputs[\"B\"].source\n\n # when debugging is on, node objects should be unique\n podpac.settings[\"DEBUG\"] = True\n node = Node.from_json(s)\n assert node.inputs[\"A\"] is not node.inputs[\"B\"].source\n\n def test_from_definition_version_warning(self):\n s = \"\"\"\n {\n \"a\": {\n \"node\": \"algorithm.Arange\"\n },\n \"podpac_version\": \"other\"\n }\n \"\"\"\n\n with pytest.warns(UserWarning, match=\"node definition version mismatch\"):\n node = Node.from_json(s)\n\n\nclass TestNoCacheMixin(object):\n class NoCacheNode(NoCacheMixin, Node):\n pass\n\n def test_default_no_cache(self):\n with podpac.settings:\n podpac.settings[\"DEFAULT_CACHE\"] = [\"ram\"]\n node = self.NoCacheNode()\n assert len(node.cache_ctrl._cache_stores) == 0\n\n def test_customizable(self):\n podpac.settings[\"DEFAULT_CACHE\"] = [\"ram\"]\n node = self.NoCacheNode(cache_ctrl=[\"ram\"])\n assert len(node.cache_ctrl._cache_stores) == 1\n\n\nclass TestDiskCacheMixin(object):\n class DiskCacheNode(DiskCacheMixin, Node):\n pass\n\n def test_default_disk_cache(self):\n with podpac.settings:\n # add disk cache\n podpac.settings[\"DEFAULT_CACHE\"] = [\"ram\"]\n node = self.DiskCacheNode()\n assert len(node.cache_ctrl._cache_stores) == 2\n\n # don't add if it is already there\n podpac.settings[\"DEFAULT_CACHE\"] = [\"ram\", \"disk\"]\n node = self.DiskCacheNode()\n assert len(node.cache_ctrl._cache_stores) == 2\n\n def test_customizable(self):\n node = self.DiskCacheNode(cache_ctrl=[\"ram\"])\n assert len(node.cache_ctrl._cache_stores) == 1\n\n\n# TODO: remove this - this is currently a placeholder test until we actually have integration tests (pytest will exit with code 5 if no tests found)\[email protected]\ndef tests_node_integration():\n assert True\n", "\"\"\"\nTest interpolation methods\n\n\n\"\"\"\n# pylint: disable=C0111,W0212,R0903\n\n\nimport warnings\n\nimport pytest\nimport traitlets as tl\nimport numpy as np\nfrom numpy.testing import assert_array_equal\n\nimport podpac\nfrom podpac.core.units import UnitsDataArray\nfrom podpac.core.node import Node\nfrom podpac.core.coordinates import Coordinates\nfrom podpac.core.interpolation.interpolation_manager import InterpolationException\nfrom podpac.core.interpolation.interpolation import Interpolate, InterpolationMixin\nfrom podpac.core.data.array_source import Array, ArrayRaw\nfrom podpac.core.compositor.tile_compositor import TileCompositorRaw\nfrom podpac.core.interpolation.scipy_interpolator import ScipyGrid\n\n\nclass TestInterpolationMixin(object):\n def test_interpolation_mixin(self):\n class InterpArray(InterpolationMixin, ArrayRaw):\n pass\n\n data = np.random.rand(4, 5)\n native_coords = Coordinates([np.linspace(0, 3, 4), np.linspace(0, 4, 5)], [\"lat\", \"lon\"])\n coords = Coordinates([np.linspace(0, 3, 7), np.linspace(0, 4, 9)], [\"lat\", \"lon\"])\n\n iarr_src = InterpArray(source=data, coordinates=native_coords, interpolation=\"bilinear\")\n arr_src = Array(source=data, coordinates=native_coords, interpolation=\"bilinear\")\n arrb_src = ArrayRaw(source=data, coordinates=native_coords)\n\n iaso = iarr_src.eval(coords)\n aso = arr_src.eval(coords)\n abso = arrb_src.eval(coords)\n\n np.testing.assert_array_equal(iaso.data, aso.data)\n np.testing.assert_array_equal(abso.data, data)\n\n\nclass TestInterpolation(object):\n s1 = ArrayRaw(\n source=np.random.rand(9, 15),\n coordinates=Coordinates([np.linspace(0, 8, 9), np.linspace(0, 14, 15)], [\"lat\", \"lon\"]),\n )\n s2 = ArrayRaw(\n source=np.random.rand(9, 15),\n coordinates=Coordinates([np.linspace(9, 17, 9), np.linspace(0, 14, 15)], [\"lat\", \"lon\"]),\n )\n interp = Interpolate(source=s1, interpolation=\"nearest\")\n coords = Coordinates([np.linspace(0, 8, 17), np.linspace(0, 14, 29)], [\"lat\", \"lon\"])\n coords2 = Coordinates([np.linspace(0, 17, 18), np.linspace(0, 14, 15)], [\"lat\", \"lon\"])\n coords2c = Coordinates([np.linspace(0.1, 16.8, 5), np.linspace(0.1, 13.8, 3)], [\"lat\", \"lon\"])\n\n def test_basic_interpolation(self):\n # This JUST tests the interface, tests for the actual value of the interpolation is left\n # to the test_interpolation_manager.py file\n\n o = self.interp.eval(self.coords)\n\n assert o.shape == (17, 29)\n\n def test_interpolation_definition(self):\n node = Node.from_json(self.interp.json)\n o1 = node.eval(self.coords)\n o2 = self.interp.eval(self.coords)\n np.testing.assert_array_equal(o1.data, o2.data)\n assert node.json == self.interp.json\n\n def test_compositor_chain(self):\n dc = TileCompositorRaw(sources=[self.s2, self.s1])\n node = Interpolate(source=dc, interpolation=\"nearest\")\n o = node.eval(self.coords2)\n\n np.testing.assert_array_equal(o.data, np.concatenate([self.s1.source, self.s2.source], axis=0))\n\n def test_get_bounds(self):\n assert self.interp.get_bounds() == self.s1.get_bounds()\n\n\nclass TestInterpolationBehavior(object):\n def test_linear_1D_issue411and413(self):\n data = [0, 1, 2]\n raw_coords = data.copy()\n raw_e_coords = [0, 0.5, 1, 1.5, 2]\n\n for dim in [\"lat\", \"lon\", \"alt\", \"time\"]:\n ec = Coordinates([raw_e_coords], [dim])\n\n arrb = ArrayRaw(source=data, coordinates=Coordinates([raw_coords], [dim]))\n node = Interpolate(source=arrb, interpolation=\"linear\")\n o = node.eval(ec)\n\n np.testing.assert_array_equal(o.data, raw_e_coords, err_msg=\"dim {} failed to interpolate\".format(dim))\n\n # Do time interpolation explicitly\n raw_coords = [\"2020-11-01\", \"2020-11-03\", \"2020-11-05\"]\n raw_et_coords = [\"2020-11-01\", \"2020-11-02\", \"2020-11-03\", \"2020-11-04\", \"2020-11-05\"]\n ec = Coordinates([raw_et_coords], [\"time\"])\n\n arrb = ArrayRaw(source=data, coordinates=Coordinates([raw_coords], [\"time\"]))\n node = Interpolate(source=arrb, interpolation=\"linear\")\n o = node.eval(ec)\n\n np.testing.assert_array_equal(\n o.data, raw_e_coords, err_msg=\"dim time failed to interpolate with datetime64 coords\"\n )\n\n def test_stacked_coords_with_partial_dims_issue123(self):\n node = Array(\n source=[0, 1, 2],\n coordinates=Coordinates(\n [[[0, 2, 1], [10, 12, 11], [\"2018-01-01\", \"2018-01-02\", \"2018-01-03\"]]], dims=[\"lat_lon_time\"]\n ),\n interpolation=\"nearest\",\n )\n\n # unstacked or and stacked requests without time\n o1 = node.eval(Coordinates([[0.5, 1.5], [10.5, 11.5]], dims=[\"lat\", \"lon\"]))\n o2 = node.eval(Coordinates([[[0.5, 1.5], [10.5, 11.5]]], dims=[\"lat_lon\"]))\n\n assert_array_equal(o1.data, [[0, 2], [2, 1]])\n assert_array_equal(o2.data, [0, 1])\n\n # request without lat or lon\n o3 = node.eval(Coordinates([\"2018-01-01\"], dims=[\"time\"]))\n assert o3.data[0] == 0\n\n def test_ignored_interpolation_params_issue340(self, caplog):\n node = Array(\n source=[0, 1, 2],\n coordinates=Coordinates([[0, 2, 1]], dims=[\"time\"]),\n interpolation={\"method\": \"nearest\", \"params\": {\"fake_param\": 1.1, \"spatial_tolerance\": 1}},\n )\n\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n node.eval(Coordinates([[0.5, 1.5]], [\"time\"]))\n assert \"interpolation parameter 'fake_param' was ignored\" in caplog.text\n assert \"interpolation parameter 'spatial_tolerance' was ignored\" not in caplog.text\n\n def test_silent_nearest_neighbor_interp_bug_issue412(self):\n node = podpac.data.Array(\n source=[0, 1, 2],\n coordinates=podpac.Coordinates([[1, 5, 9]], dims=[\"lat\"]),\n interpolation=[{\"method\": \"bilinear\", \"dims\": [\"lat\"], \"interpolators\": [ScipyGrid]}],\n )\n with pytest.raises(InterpolationException, match=\"can't be handled\"):\n o = node.eval(podpac.Coordinates([podpac.crange(1, 9, 1)], dims=[\"lat\"]))\n\n node = podpac.data.Array(\n source=[0, 1, 2],\n coordinates=podpac.Coordinates([[1, 5, 9]], dims=[\"lat\"]),\n interpolation=[{\"method\": \"bilinear\", \"dims\": [\"lat\"]}],\n )\n o = node.eval(podpac.Coordinates([podpac.crange(1, 9, 1)], dims=[\"lat\"]))\n assert_array_equal(o.data, np.linspace(0, 2, 9))\n\n def test_selection_crs(self):\n base = podpac.core.data.array_source.ArrayRaw(\n source=[0, 1, 2],\n coordinates=podpac.Coordinates(\n [[1, 5, 9]], dims=[\"time\"], crs=\"+proj=longlat +datum=WGS84 +no_defs +vunits=m\"\n ),\n )\n node = podpac.interpolators.Interpolate(source=base, interpolation=\"linear\")\n tocrds = podpac.Coordinates([podpac.crange(1, 9, 1, \"time\")], crs=\"EPSG:4326\")\n o = node.eval(tocrds)\n assert o.crs == tocrds.crs\n assert_array_equal(o.data, np.linspace(0, 2, 9))\n", "\"\"\"\nGeneral-purpose Algorithm Nodes.\n\"\"\"\n\nfrom __future__ import division, unicode_literals, print_function, absolute_import\n\nimport sys\nimport warnings\n\nimport numpy as np\nimport xarray as xr\nimport traitlets as tl\n\n# Optional dependencies\nfrom lazy_import import lazy_module\n\nne = lazy_module(\"numexpr\")\n\nfrom podpac import settings\nfrom podpac import Coordinates\nfrom podpac.core.node import Node\nfrom podpac.core.utils import NodeTrait\nfrom podpac.core.algorithm.algorithm import Algorithm\n\nif sys.version_info.major == 2:\n\n class PermissionError(OSError):\n pass\n\n\nclass GenericInputs(Algorithm):\n \"\"\"Base class for Algorithms that accept generic named inputs.\"\"\"\n\n inputs = tl.Dict(read_only=True)\n\n _repr_keys = [\"inputs\"]\n\n def _first_init(self, **kwargs):\n trait_names = self.trait_names()\n for key in kwargs:\n if key in trait_names and isinstance(kwargs[key], Node):\n raise RuntimeError(\"Trait '%s' is reserved and cannot be used as an Generic Algorithm input\" % key)\n input_keys = [key for key in kwargs if key not in trait_names and isinstance(kwargs[key], Node)]\n inputs = {key: kwargs.pop(key) for key in input_keys}\n self.set_trait(\"inputs\", inputs)\n return super(GenericInputs, self)._first_init(**kwargs)\n\n @property\n def _base_definition(self):\n d = super(GenericInputs, self)._base_definition\n d[\"inputs\"] = self.inputs\n return d\n\n\nclass Arithmetic(GenericInputs):\n \"\"\"Create a simple point-by-point computation using named input nodes.\n\n Examples\n ----------\n a = SinCoords()\n b = Arange()\n arith = Arithmetic(A=a, B=b, eqn = 'A * B + {offset}', params={'offset': 1})\n \"\"\"\n\n eqn = tl.Unicode().tag(attr=True)\n params = tl.Dict().tag(attr=True)\n\n _repr_keys = [\"eqn\"]\n\n def init(self):\n if not settings.allow_unsafe_eval:\n warnings.warn(\n \"Insecure evaluation of Python code using Arithmetic node has not been allowed. If \"\n \"this is an error, use: `podpac.settings.set_unsafe_eval(True)`. \"\n \"NOTE: Allowing unsafe evaluation enables arbitrary execution of Python code through PODPAC \"\n \"Node definitions.\"\n )\n\n if self.eqn == \"\":\n raise ValueError(\"Arithmetic eqn cannot be empty\")\n\n super(Arithmetic, self).init()\n\n def algorithm(self, inputs, coordinates):\n \"\"\"Compute the algorithms equation\n\n Attributes\n ----------\n inputs : dict\n Evaluated outputs of the input nodes. The keys are the attribute names.\n coordinates : podpac.Coordinates\n Requested coordinates.\n Note that the ``inputs`` may contain with different coordinates.\n\n Returns\n -------\n result : UnitsDataArray\n Algorithm result.\n \"\"\"\n\n if not settings.allow_unsafe_eval:\n raise PermissionError(\n \"Insecure evaluation of Python code using Arithmetic node has not been allowed. If \"\n \"this is an error, use: `podpac.settings.set_unsafe_eval(True)`. \"\n \"NOTE: Allowing unsafe evaluation enables arbitrary execution of Python code through PODPAC \"\n \"Node definitions.\"\n )\n\n eqn = self.eqn.format(**self.params)\n\n fields = self.inputs.keys()\n res = xr.broadcast(*[inputs[f] for f in fields])\n f_locals = dict(zip(fields, res))\n\n try:\n from numexpr import evaluate # Needed for some systems to get around lazy_module issues\n\n result = ne.evaluate(eqn, f_locals)\n except (NotImplementedError, ImportError):\n result = eval(eqn, f_locals)\n res = res[0].copy() # Make an xarray object with correct dimensions\n res[:] = result\n return res\n\n\nclass Generic(GenericInputs):\n \"\"\"\n Generic Algorithm Node that allows arbitrary Python code to be executed.\n\n Attributes\n ----------\n code : str\n The multi-line code that will be evaluated. This code should assign \"output\" to the desired result, and \"output\"\n needs to be a \"numpy array\" or \"xarray DataArray\"\n inputs : dict(str: podpac.Node)\n A dictionary of PODPAC nodes that will serve as the input data for the Python script\n\n Examples\n ----------\n a = SinCoords()\n b = Arange()\n code = '''import numpy as np\n output = np.minimum(a, b)\n '''\n generic = Generic(code=code, a=a, b=b)\n \"\"\"\n\n code = tl.Unicode().tag(attr=True, readonly=True)\n\n def init(self):\n if not settings.allow_unsafe_eval:\n warnings.warn(\n \"Insecure evaluation of Python code using Generic node has not been allowed. If this \"\n \"this is an error, use: `podpac.settings.set_unsafe_eval(True)`. \"\n \"NOTE: Allowing unsafe evaluation enables arbitrary execution of Python code through PODPAC \"\n \"Node definitions.\"\n )\n super(Generic, self).init()\n\n def algorithm(self, inputs, coordinates):\n \"\"\"\n Run the generic code.\n\n Attributes\n ----------\n inputs : dict\n Evaluated outputs of the input nodes. The keys are the attribute names.\n coordinates : podpac.Coordinates\n Requested coordinates.\n Note that the ``inputs`` may contain with different coordinates.\n\n Returns\n -------\n result : UnitsDataArray\n Algorithm result.\n \"\"\"\n\n if not settings.allow_unsafe_eval:\n raise PermissionError(\n \"Insecure evaluation of Python code using Generic node has not been allowed. If this \"\n \"this is an error, use: `podpac.settings.set_unsafe_eval(True)`. \"\n \"NOTE: Allowing unsafe evaluation enables arbitrary execution of Python code through PODPAC \"\n \"Node definitions.\"\n )\n exec(self.code, inputs)\n return inputs[\"output\"]\n\n\nclass Mask(Algorithm):\n \"\"\"\n Masks the `source` based on a boolean expression involving the `mask`\n (i.e. source[mask <bool_op> <bool_val> ] = <masked_val>).\n For a normal boolean mask input, default values for `bool_op`, `bool_val` and `masked_val` can be used.\n\n Attributes\n ----------\n source : podpac.Node\n The source that will be masked\n mask : podpac.Node\n The data that will be used to compute the mask\n masked_val : float, optional\n Default value is np.nan. The value that will replace the masked items.\n bool_val : float, optional\n Default value is 1. The value used to compare the mask when creating the boolean expression\n bool_op : enum, optional\n Default value is '=='. One of ['==', '<', '<=', '>', '>=']\n in_place : bool, optional\n Default is False. If True, the source array will be changed in-place, which could affect the value of the source\n in other parts of the pipeline.\n\n Examples\n ----------\n # Mask data from a boolean data node using the default behavior.\n # Create a boolean masked Node (as an example)\n b = Arithmetic(A=SinCoords(), eqn='A>0)\n # Create the source node\n a = Arange()\n masked = Mask(source=a, mask=b)\n\n # Create a node that make the following substitution \"a[b > 0] = np.nan\"\n a = Arange()\n b = SinCoords()\n masked = Mask(source=a, mask=b,\n masked_val=np.nan,\n bool_val=0, bool_op='>'\n in_place=True)\n\n \"\"\"\n\n source = NodeTrait().tag(attr=True)\n mask = NodeTrait().tag(attr=True)\n masked_val = tl.Float(allow_none=True, default_value=None).tag(attr=True)\n bool_val = tl.Float(1).tag(attr=True)\n bool_op = tl.Enum([\"==\", \"<\", \"<=\", \">\", \">=\"], default_value=\"==\").tag(attr=True)\n in_place = tl.Bool(False).tag(attr=True)\n\n _repr_keys = [\"source\", \"mask\"]\n\n def algorithm(self, inputs, coordinates):\n \"\"\"\n Sets the values in inputs['source'] to self.masked_val using (inputs['mask'] <self.bool_op> <self.bool_val>)\n\n Attributes\n ----------\n inputs : dict\n Evaluated outputs of the input nodes. The keys are the attribute names.\n coordinates : podpac.Coordinates\n Requested coordinates.\n Note that the ``inputs`` may contain with different coordinates.\n\n Returns\n -------\n result : UnitsDataArray\n Algorithm result.\n \"\"\"\n\n # shorter names\n mask = inputs[\"mask\"]\n source = inputs[\"source\"]\n op = self.bool_op\n bv = self.bool_val\n\n # Make a copy if we don't want to change the source in-place\n if not self.in_place:\n source = source.copy()\n\n # Make the mask boolean\n if op == \"==\":\n mask = mask == bv\n elif op == \"<\":\n mask = mask < bv\n elif op == \"<=\":\n mask = mask <= bv\n elif op == \">\":\n mask = mask > bv\n elif op == \">=\":\n mask = mask >= bv\n\n # Mask the values and return\n if self.masked_val is None:\n source.set(np.nan, mask)\n else:\n source.set(self.masked_val, mask)\n\n return source\n\n\nclass Combine(GenericInputs):\n \"\"\"Combine multiple nodes into a single node with multiple outputs.\n\n If not output names are specified, the keyword argument names will be used.\n \"\"\"\n\n @tl.default(\"outputs\")\n def _default_outputs(self):\n input_keys = list(self.inputs.keys())\n return input_keys\n\n def algorithm(self, inputs, coordinates):\n cs = [Coordinates.from_xarray(x) for x in inputs.values()]\n if any(c != cs[0] for c in cs):\n raise NodeException(\"Cannot combine inputs with different coordinates\")\n\n data = np.stack([inputs[key] for key in self.inputs], axis=-1)\n return self.create_output_array(cs[0], data=data)\n", "from __future__ import division, unicode_literals, print_function, absolute_import\n\nimport pytest\nimport numpy as np\nimport xarray as xr\nimport scipy.stats\nimport traitlets as tl\n\nimport podpac\nfrom podpac.core.algorithm.utility import Arange\nfrom podpac.core.data.array_source import Array\nfrom podpac.core.algorithm.stats import Reduce\nfrom podpac.core.algorithm.stats import Min, Max, Sum, Count, Mean, Variance, Skew, Kurtosis, StandardDeviation\nfrom podpac.core.algorithm.generic import Arithmetic\nfrom podpac.core.algorithm.stats import Median, Percentile\nfrom podpac.core.algorithm.stats import GroupReduce, DayOfYear, DayOfYearWindow\n\n\ndef setup_module():\n global coords, source, data, multisource, bdata\n coords = podpac.Coordinates(\n [podpac.clinspace(0, 1, 10), podpac.clinspace(0, 1, 10), podpac.crange(\"2018-01-01\", \"2018-01-10\", \"1,D\")],\n dims=[\"lat\", \"lon\", \"time\"],\n )\n\n a = np.random.random(coords.shape)\n a[3, 0, 0] = np.nan\n a[0, 3, 0] = np.nan\n a[0, 0, 3] = np.nan\n source = Array(source=a, coordinates=coords)\n data = source.eval(coords)\n\n ab = np.stack([a, 2 * a], -1)\n multisource = Array(source=ab, coordinates=coords, outputs=[\"a\", \"b\"])\n bdata = 2 * data\n\n\nclass TestReduce(object):\n \"\"\" Tests the Reduce class \"\"\"\n\n def test_auto_chunk(self):\n # any reduce node would do here\n node = Min(source=source)\n\n with podpac.settings:\n podpac.settings[\"CACHE_NODE_OUTPUT_DEFAULT\"] = False\n podpac.settings[\"CHUNK_SIZE\"] = \"auto\"\n node.eval(coords)\n\n def test_chunked_fallback(self):\n with podpac.settings:\n podpac.settings[\"CACHE_NODE_OUTPUT_DEFAULT\"] = False\n\n class First(Reduce):\n def reduce(self, x):\n return x.isel(**{dim: 0 for dim in self.dims})\n\n node = First(source=source, dims=\"time\")\n\n # use reduce function\n podpac.settings[\"CHUNK_SIZE\"] = None\n output = node.eval(coords)\n\n # fall back on reduce function with warning\n with pytest.warns(UserWarning):\n podpac.settings[\"CHUNK_SIZE\"] = 500\n output_chunked = node.eval(coords)\n\n # should be the same\n xr.testing.assert_allclose(output, output_chunked)\n\n\nclass BaseTests(object):\n \"\"\" Common tests for Reduce subclasses \"\"\"\n\n def test_full(self):\n with podpac.settings:\n podpac.settings[\"CACHE_NODE_OUTPUT_DEFAULT\"] = False\n podpac.settings[\"CHUNK_SIZE\"] = None\n\n node = self.NodeClass(source=source)\n output = node.eval(coords)\n # xr.testing.assert_allclose(output, self.expected_full)\n np.testing.assert_allclose(output.data, self.expected_full.data)\n\n node = self.NodeClass(source=source, dims=coords.dims)\n output = node.eval(coords)\n # xr.testing.assert_allclose(output, self.expected_full)\n np.testing.assert_allclose(output.data, self.expected_full.data)\n\n def test_full_chunked(self):\n with podpac.settings:\n node = self.NodeClass(source=source, dims=coords.dims)\n podpac.settings[\"CACHE_NODE_OUTPUT_DEFAULT\"] = False\n podpac.settings[\"CHUNK_SIZE\"] = 500\n output = node.eval(coords)\n # xr.testing.assert_allclose(output, self.expected_full)\n np.testing.assert_allclose(output.data, self.expected_full.data)\n\n def test_lat_lon(self):\n with podpac.settings:\n podpac.settings[\"CACHE_NODE_OUTPUT_DEFAULT\"] = False\n podpac.settings[\"CHUNK_SIZE\"] = None\n node = self.NodeClass(source=source, dims=[\"lat\", \"lon\"])\n output = node.eval(coords)\n # xr.testing.assert_allclose(output, self.expected_latlon)\n np.testing.assert_allclose(output.data, self.expected_latlon.data)\n\n def test_lat_lon_chunked(self):\n with podpac.settings:\n podpac.settings[\"CACHE_NODE_OUTPUT_DEFAULT\"] = False\n podpac.settings[\"CHUNK_SIZE\"] = 500\n node = self.NodeClass(source=source, dims=[\"lat\", \"lon\"])\n output = node.eval(coords)\n # xr.testing.assert_allclose(output, self.expected_latlon)\n np.testing.assert_allclose(output.data, self.expected_latlon.data)\n\n def test_time(self):\n with podpac.settings:\n podpac.settings[\"CACHE_NODE_OUTPUT_DEFAULT\"] = False\n podpac.settings[\"CHUNK_SIZE\"] = None\n node = self.NodeClass(source=source, dims=\"time\")\n output = node.eval(coords)\n # xr.testing.assert_allclose(output, self.expected_time)\n np.testing.assert_allclose(output.data, self.expected_time.data)\n\n def test_time_chunked(self):\n with podpac.settings:\n podpac.settings[\"CACHE_NODE_OUTPUT_DEFAULT\"] = False\n podpac.settings[\"CHUNK_SIZE\"] = 500\n node = self.NodeClass(source=source, dims=\"time\")\n output = node.eval(coords)\n # xr.testing.assert_allclose(output, self.expected_time)\n np.testing.assert_allclose(output.data, self.expected_time.data)\n\n def test_multiple_outputs(self):\n with podpac.settings:\n podpac.settings[\"CACHE_NODE_OUTPUT_DEFAULT\"] = False\n podpac.settings[\"CHUNK_SIZE\"] = None\n node = self.NodeClass(source=multisource, dims=[\"lat\", \"lon\"])\n output = node.eval(coords)\n assert output.dims == (\"time\", \"output\")\n np.testing.assert_array_equal(output[\"output\"], [\"a\", \"b\"])\n np.testing.assert_allclose(output.sel(output=\"a\"), self.expected_latlon)\n np.testing.assert_allclose(output.sel(output=\"b\"), self.expected_latlon_b)\n\n\nclass TestMin(BaseTests):\n @classmethod\n def setup_class(cls):\n cls.NodeClass = Min\n cls.expected_full = data.min()\n cls.expected_latlon = data.min(dim=[\"lat\", \"lon\"])\n cls.expected_latlon_b = bdata.min(dim=[\"lat\", \"lon\"])\n cls.expected_time = data.min(dim=\"time\")\n\n\nclass TestMax(BaseTests):\n @classmethod\n def setup_class(cls):\n cls.NodeClass = Max\n cls.expected_full = data.max()\n cls.expected_latlon = data.max(dim=[\"lat\", \"lon\"])\n cls.expected_latlon_b = bdata.max(dim=[\"lat\", \"lon\"])\n cls.expected_time = data.max(dim=\"time\")\n\n\nclass TestSum(BaseTests):\n @classmethod\n def setup_class(cls):\n cls.NodeClass = Sum\n cls.expected_full = data.sum()\n cls.expected_latlon = data.sum(dim=[\"lat\", \"lon\"])\n cls.expected_latlon_b = bdata.sum(dim=[\"lat\", \"lon\"])\n cls.expected_time = data.sum(dim=\"time\")\n\n\nclass TestCount(BaseTests):\n @classmethod\n def setup_class(cls):\n cls.NodeClass = Count\n cls.expected_full = np.isfinite(data).sum()\n cls.expected_latlon = np.isfinite(data).sum(dim=[\"lat\", \"lon\"])\n cls.expected_latlon_b = np.isfinite(bdata).sum(dim=[\"lat\", \"lon\"])\n cls.expected_time = np.isfinite(data).sum(dim=\"time\")\n\n\nclass TestMean(BaseTests):\n @classmethod\n def setup_class(cls):\n cls.NodeClass = Mean\n cls.expected_full = data.mean()\n cls.expected_latlon = data.mean(dim=[\"lat\", \"lon\"])\n cls.expected_latlon_b = bdata.mean(dim=[\"lat\", \"lon\"])\n cls.expected_time = data.mean(dim=\"time\")\n\n def test_chunk_sizes(self):\n for n in [20, 21, 1000, 1001]:\n podpac.settings[\"CHUNK_SIZE\"] = n\n node = self.NodeClass(source=source, dims=coords.dims)\n output = node.eval(coords)\n # xr.testing.assert_allclose(output, self.expected_full)\n np.testing.assert_allclose(output.data, self.expected_full.data)\n\n\nclass TestVariance(BaseTests):\n @classmethod\n def setup_class(cls):\n cls.NodeClass = Variance\n cls.expected_full = data.var()\n cls.expected_latlon = data.var(dim=[\"lat\", \"lon\"])\n cls.expected_latlon_b = bdata.var(dim=[\"lat\", \"lon\"])\n cls.expected_time = data.var(dim=\"time\")\n\n\nclass TestStandardDeviation(BaseTests):\n @classmethod\n def setup_class(cls):\n cls.NodeClass = StandardDeviation\n cls.expected_full = data.std()\n cls.expected_latlon = data.std(dim=[\"lat\", \"lon\"])\n cls.expected_latlon_b = bdata.std(dim=[\"lat\", \"lon\"])\n cls.expected_time = data.std(dim=\"time\")\n cls.expected_latlon_b = 2 * cls.expected_latlon\n\n\nclass TestSkew(BaseTests):\n @classmethod\n def setup_class(cls):\n cls.NodeClass = Skew\n n, m, l = data.shape\n cls.expected_full = xr.DataArray(scipy.stats.skew(data.data.reshape(n * m * l), nan_policy=\"omit\"))\n cls.expected_latlon = scipy.stats.skew(data.data.reshape((n * m, l)), axis=0, nan_policy=\"omit\")\n cls.expected_latlon_b = scipy.stats.skew(bdata.data.reshape((n * m, l)), axis=0, nan_policy=\"omit\")\n cls.expected_time = scipy.stats.skew(data, axis=2, nan_policy=\"omit\")\n\n\nclass TestKurtosis(BaseTests):\n @classmethod\n def setup_class(cls):\n cls.NodeClass = Kurtosis\n n, m, l = data.shape\n cls.expected_full = xr.DataArray(scipy.stats.kurtosis(data.data.reshape(n * m * l), nan_policy=\"omit\"))\n cls.expected_latlon = scipy.stats.kurtosis(data.data.reshape((n * m, l)), axis=0, nan_policy=\"omit\")\n cls.expected_latlon_b = scipy.stats.kurtosis(bdata.data.reshape((n * m, l)), axis=0, nan_policy=\"omit\")\n cls.expected_time = scipy.stats.kurtosis(data, axis=2, nan_policy=\"omit\")\n\n\nclass TestMedian(BaseTests):\n @classmethod\n def setup_class(cls):\n cls.NodeClass = Median\n cls.expected_full = data.median()\n cls.expected_latlon = data.median(dim=[\"lat\", \"lon\"])\n cls.expected_latlon_b = bdata.median(dim=[\"lat\", \"lon\"])\n cls.expected_time = data.median(dim=\"time\")\n\n\[email protected](\"TODO\")\nclass TestPercentile(BaseTests):\n @classmethod\n def setup_class(cls):\n cls.node = Percentile(source=source)\n # TODO can we replace dims_axes with reshape (or vice versa)\n\n\nclass TestGroupReduce(object):\n pass\n\n\nclass TestResampleReduce(object):\n pass\n\n\nclass TestDayOfYear(object):\n pass\n\n\nclass F(DayOfYearWindow):\n cache_output = tl.Bool(False)\n force_eval = tl.Bool(True)\n\n def function(self, data, output):\n return len(data)\n\n\nclass FM(DayOfYearWindow):\n cache_output = tl.Bool(False)\n force_eval = tl.Bool(True)\n\n def function(self, data, output):\n return np.mean(data)\n\n\nclass TestDayOfYearWindow(object):\n def test_doy_window1(self):\n coords = podpac.coordinates.concat(\n [\n podpac.Coordinates([podpac.crange(\"1999-12-29\", \"2000-01-02\", \"1,D\", \"time\")]),\n podpac.Coordinates([podpac.crange(\"2001-12-30\", \"2002-01-03\", \"1,D\", \"time\")]),\n ]\n )\n\n node = Arange()\n nodedoywindow = F(source=node, window=1, cache_output=False, force_eval=True)\n o = nodedoywindow.eval(coords)\n\n np.testing.assert_array_equal(o, [2, 2, 1, 1, 2, 2])\n\n def test_doy_window2(self):\n coords = podpac.coordinates.concat(\n [\n podpac.Coordinates([podpac.crange(\"1999-12-29\", \"2000-01-03\", \"1,D\", \"time\")]),\n podpac.Coordinates([podpac.crange(\"2001-12-30\", \"2002-01-02\", \"1,D\", \"time\")]),\n ]\n )\n\n node = Arange()\n nodedoywindow = F(source=node, window=2, cache_output=False, force_eval=True)\n o = nodedoywindow.eval(coords)\n\n np.testing.assert_array_equal(o, [6, 5, 3, 3, 5, 6])\n\n def test_doy_window2_mean_rescale_float(self):\n coords = podpac.coordinates.concat(\n [\n podpac.Coordinates([podpac.crange(\"1999-12-29\", \"2000-01-03\", \"1,D\", \"time\")]),\n podpac.Coordinates([podpac.crange(\"2001-12-30\", \"2002-01-02\", \"1,D\", \"time\")]),\n ]\n )\n\n node = Arange()\n nodedoywindow = FM(source=node, window=2, cache_output=False, force_eval=True)\n o = nodedoywindow.eval(coords)\n\n nodedoywindow_s = FM(\n source=node, window=2, cache_output=False, force_eval=True, scale_float=[0, coords.size], rescale=True\n )\n o_s = nodedoywindow_s.eval(coords)\n\n np.testing.assert_array_almost_equal(o, o_s)\n\n def test_doy_window2_mean_rescale_max_min(self):\n with podpac.settings:\n podpac.settings.set_unsafe_eval(True)\n\n coords = podpac.coordinates.concat(\n [\n podpac.Coordinates([podpac.crange(\"1999-12-29\", \"2000-01-03\", \"1,D\", \"time\")]),\n podpac.Coordinates([podpac.crange(\"2001-12-30\", \"2002-01-02\", \"1,D\", \"time\")]),\n ]\n )\n\n node = Arange()\n node_max = Arithmetic(source=node, eqn=\"(source < 5) + source\")\n node_min = Arithmetic(source=node, eqn=\"-1*(source < 5) + source\")\n\n nodedoywindow_s = FM(\n source=node,\n window=2,\n cache_output=False,\n force_eval=True,\n scale_max=node_max,\n scale_min=node_min,\n rescale=False,\n )\n o_s = nodedoywindow_s.eval(coords)\n\n np.testing.assert_array_almost_equal([0.5] * o_s.size, o_s)\n" ]
[ [ "numpy.testing.assert_equal", "numpy.linspace", "numpy.datetime64", "numpy.timedelta64", "numpy.array" ], [ "numpy.all", "numpy.arange", "numpy.isnan", "numpy.testing.assert_array_equal" ], [ "numpy.testing.assert_array_equal", "numpy.concatenate", "numpy.random.rand", "numpy.linspace" ], [ "numpy.stack" ], [ "numpy.random.random", "numpy.isfinite", "numpy.stack", "numpy.testing.assert_array_equal", "numpy.mean", "numpy.testing.assert_allclose", "numpy.testing.assert_array_almost_equal" ] ]
[ { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] }, { "matplotlib": [], "numpy": [], "pandas": [], "scipy": [], "tensorflow": [] } ]